Repository: jensl/critic Branch: stable/1 Commit: c2d962b909ff Files: 730 Total size: 3.9 MB Directory structure: gitextract_90xaov2p/ ├── .gitignore ├── .gitmodules ├── CONTRIBUTORS ├── COPYING ├── INSTALL ├── README.md ├── documentation/ │ ├── concepts.txt │ ├── tutorials.txt │ └── user_faq.md ├── extend.py ├── install.py ├── installation/ │ ├── __init__.py │ ├── admin.py │ ├── config.py │ ├── criticctl.py │ ├── data/ │ │ ├── comments.pgsql │ │ ├── dbschema.base.sql │ │ ├── dbschema.changesets.sql │ │ ├── dbschema.comments.sql │ │ ├── dbschema.extensions.sql │ │ ├── dbschema.filters.sql │ │ ├── dbschema.git.sql │ │ ├── dbschema.news.sql │ │ ├── dbschema.preferences.sql │ │ ├── dbschema.reviews.sql │ │ ├── dbschema.trackedbranches.sql │ │ └── dbschema.users.sql │ ├── database.py │ ├── extensions.py │ ├── externals/ │ │ ├── .gitignore │ │ ├── MIT-LICENSE.Chosen.md │ │ └── MIT-LICENSE.jQuery.txt │ ├── files.py │ ├── git.py │ ├── httpd.py │ ├── initd.py │ ├── input.py │ ├── lifecycle.json │ ├── migrate.py │ ├── migrations/ │ │ ├── dbschema.altertable.branches.add.archived.py │ │ ├── dbschema.altertable.changesets.parent.dropnotnull.py │ │ ├── dbschema.altertable.commentchainchanges.addressed_by.py │ │ ├── dbschema.altertable.commentchainchanges.drop.review.py │ │ ├── dbschema.altertable.commentchainlines.drop.commit.py │ │ ├── dbschema.altertable.comments.time.setdefaultnow.py │ │ ├── dbschema.altertable.previousreachable.rebase.ondeletecascade.py │ │ ├── dbschema.altertable.repositories.drop.branch.py │ │ ├── dbschema.altertable.repositories.drop.relay.py │ │ ├── dbschema.altertable.reviewfilechanges.rename-columns.py │ │ ├── dbschema.altertable.reviewmergeconfirmations.add.tail.py │ │ ├── dbschema.altertable.reviewrebases.add.equivalent_merge.py │ │ ├── dbschema.altertable.reviewrebases.add.replayed_rebase.py │ │ ├── dbschema.altertable.reviewrecipientfilters.uid-can-be-null.py │ │ ├── dbschema.altertable.reviews.add.origin.py │ │ ├── dbschema.altertable.systemidentities.add.installed_sha1.installed_at.py │ │ ├── dbschema.altertable.systemidentities.url-prefix.py │ │ ├── dbschema.altertable.usergitemails.py │ │ ├── dbschema.altertable.usersessions.add.labels.py │ │ ├── dbschema.createindex.misc.py │ │ ├── dbschema.createtable.accesstokens.py │ │ ├── dbschema.createtable.knownremotes.py │ │ ├── dbschema.createtable.scheduledreviewbrancharchivals.py │ │ ├── dbschema.createtable.timezones.py │ │ ├── dbschema.createtable.useremails.py │ │ ├── dbschema.createtable.usersessions.py │ │ ├── dbschema.createtables.accesscontrol.py │ │ ├── dbschema.droptable.knownhosts.py │ │ ├── dbschema.extension-filterhook-role.py │ │ ├── dbschema.external-authentication.py │ │ ├── dbschema.files-and-directories.py │ │ ├── dbschema.fixup-extensionroles.py │ │ ├── dbschema.per-repository-or-filter-preferences.py │ │ ├── dbschema.review-constraints-tweaking.py │ │ ├── git.check-keepalive-references.py │ │ ├── git.clean-up-temporary-references.py │ │ ├── git.convert-replays-into-keepalives.py │ │ ├── git.rename-keepalive-chain.py │ │ ├── installation.config-pyc-file-permissions.py │ │ ├── news.filter-system-rewrite.py │ │ ├── news.review-branch-archival.py │ │ ├── news.review-quick-search.py │ │ └── preference.commit.diff.rulerColumn.py │ ├── paths.py │ ├── prefs.py │ ├── prereqs.py │ ├── process.py │ ├── qs/ │ │ ├── __init__.py │ │ ├── data.py │ │ └── sqlite.py │ ├── smtp.py │ ├── system.py │ ├── templates/ │ │ ├── apache/ │ │ │ ├── site.both │ │ │ ├── site.http │ │ │ └── site.https │ │ ├── configuration/ │ │ │ ├── __init__.py │ │ │ ├── auth.py │ │ │ ├── base.py │ │ │ ├── database.py │ │ │ ├── debug.py │ │ │ ├── executables.py │ │ │ ├── extensions.py │ │ │ ├── limits.py │ │ │ ├── mimetypes.py │ │ │ ├── paths.py │ │ │ ├── services.py │ │ │ ├── smtp-credentials.json │ │ │ └── smtp.py │ │ ├── criticctl │ │ ├── initd │ │ ├── nginx/ │ │ │ ├── site.both │ │ │ ├── site.http │ │ │ └── site.https │ │ └── uwsgi/ │ │ ├── app.backend.ini │ │ ├── app.frontend.ini.both │ │ ├── app.frontend.ini.http │ │ └── app.frontend.ini.https │ └── utils.py ├── pylint.rc ├── pythonversion.py ├── quickstart.py ├── src/ │ ├── api/ │ │ ├── __init__.py │ │ ├── accesscontrolprofile.py │ │ ├── accesstoken.py │ │ ├── apierror.py │ │ ├── apiobject.py │ │ ├── batch.py │ │ ├── branch.py │ │ ├── changeset.py │ │ ├── comment.py │ │ ├── commit.py │ │ ├── commitset.py │ │ ├── config.py │ │ ├── critic.py │ │ ├── extension.py │ │ ├── file.py │ │ ├── filechange.py │ │ ├── filecontent.py │ │ ├── filediff.py │ │ ├── filters.py │ │ ├── impl/ │ │ │ ├── __init__.py │ │ │ ├── accesscontrolprofile.py │ │ │ ├── accesstoken.py │ │ │ ├── apiobject.py │ │ │ ├── batch.py │ │ │ ├── branch.py │ │ │ ├── branch_unittest.py │ │ │ ├── changeset.py │ │ │ ├── changeset_unittest.py │ │ │ ├── comment.py │ │ │ ├── comment_unittest.py │ │ │ ├── commit.py │ │ │ ├── commit_unittest.py │ │ │ ├── commitset.py │ │ │ ├── commitset_unittest.py │ │ │ ├── config_unittest.py │ │ │ ├── critic.py │ │ │ ├── extension.py │ │ │ ├── file.py │ │ │ ├── filechange.py │ │ │ ├── filechange_unittest.py │ │ │ ├── filecontent.py │ │ │ ├── filediff.py │ │ │ ├── filediff_unittest.py │ │ │ ├── filters.py │ │ │ ├── labeledaccesscontrolprofile.py │ │ │ ├── log/ │ │ │ │ ├── __init__.py │ │ │ │ ├── partition.py │ │ │ │ ├── partition_unittest.py │ │ │ │ ├── rebase.py │ │ │ │ └── rebase_unittest.py │ │ │ ├── reply.py │ │ │ ├── reply_unittest.py │ │ │ ├── repository.py │ │ │ ├── repository_unittest.py │ │ │ ├── review.py │ │ │ ├── review_unittest.py │ │ │ ├── reviewablefilechange.py │ │ │ ├── reviewsummary.py │ │ │ ├── user.py │ │ │ └── user_unittest.py │ │ ├── labeledaccesscontrolprofile.py │ │ ├── log/ │ │ │ ├── __init__.py │ │ │ ├── partition.py │ │ │ └── rebase.py │ │ ├── preference.py │ │ ├── reply.py │ │ ├── repository.py │ │ ├── review.py │ │ ├── reviewablefilechange.py │ │ ├── reviewsummary.py │ │ ├── transaction/ │ │ │ ├── __init__.py │ │ │ ├── accesscontrolprofile.py │ │ │ ├── accesstoken.py │ │ │ ├── comment.py │ │ │ ├── filters.py │ │ │ ├── labeledaccesscontrolprofile.py │ │ │ ├── reply.py │ │ │ ├── review.py │ │ │ └── user.py │ │ └── user.py │ ├── auth/ │ │ ├── __init__.py │ │ ├── accesscontrol.py │ │ ├── database.py │ │ ├── databases/ │ │ │ ├── __init__.py │ │ │ ├── accesstokensdb.py │ │ │ ├── internaldb.py │ │ │ └── ldapdb.py │ │ ├── oauth.py │ │ ├── provider.py │ │ ├── providers/ │ │ │ ├── __init__.py │ │ │ ├── dummy.py │ │ │ ├── github.py │ │ │ └── google.py │ │ └── session.py │ ├── background/ │ │ ├── __init__.py │ │ ├── branchtracker.py │ │ ├── branchtrackerhook.py │ │ ├── changeset.py │ │ ├── daemon.py │ │ ├── extensionrunner.py │ │ ├── extensiontasks.py │ │ ├── githook.py │ │ ├── highlight.py │ │ ├── maildelivery.py │ │ ├── maintenance.py │ │ ├── servicemanager.py │ │ ├── utils.py │ │ ├── wait-for-pidfiles.py │ │ └── watchdog.py │ ├── base.py │ ├── base_unittest.py │ ├── changeset/ │ │ ├── __init__.py │ │ ├── client.py │ │ ├── create.py │ │ ├── detectmoves.py │ │ ├── html.py │ │ ├── load.py │ │ ├── process.py │ │ ├── text.py │ │ └── utils.py │ ├── cli.py │ ├── communicate.py │ ├── coverage.py │ ├── critic.py │ ├── data/ │ │ └── preferences.json │ ├── dbaccess.py │ ├── dbutils/ │ │ ├── __init__.py │ │ ├── branch.py │ │ ├── database.py │ │ ├── database_unittest.py │ │ ├── paths.py │ │ ├── review.py │ │ ├── session.py │ │ ├── system.py │ │ ├── timezones.py │ │ ├── unittest.py │ │ └── user.py │ ├── diff/ │ │ ├── __init__.py │ │ ├── analyze.py │ │ ├── context.py │ │ ├── html.py │ │ ├── merge.py │ │ └── parse.py │ ├── diffutils.py │ ├── extensions/ │ │ ├── __init__.py │ │ ├── execute.py │ │ ├── extension.py │ │ ├── installation.py │ │ ├── manifest.py │ │ ├── resource.py │ │ ├── role/ │ │ │ ├── __init__.py │ │ │ ├── filterhook.py │ │ │ ├── inject.py │ │ │ ├── page.py │ │ │ └── processcommits.py │ │ ├── unittest.py │ │ └── utils.py │ ├── gitutils.py │ ├── gitutils_unittest.py │ ├── hooks/ │ │ └── pre-receive │ ├── htmlutils.py │ ├── htmlutils_unittest.py │ ├── index.py │ ├── inpututils.py │ ├── jsonapi/ │ │ ├── __init__.py │ │ ├── check.py │ │ ├── documentation.py │ │ └── v1/ │ │ ├── README.txt │ │ ├── __init__.py │ │ ├── accesscontrolprofiles.py │ │ ├── accesstokens.py │ │ ├── batches.py │ │ ├── branches.py │ │ ├── changesets.py │ │ ├── comments.py │ │ ├── commits.py │ │ ├── documentation.py │ │ ├── extensions.py │ │ ├── filechanges.py │ │ ├── filecontents.py │ │ ├── filediffs.py │ │ ├── files.py │ │ ├── labeledaccesscontrolprofiles.py │ │ ├── rebases.py │ │ ├── replies.py │ │ ├── repositories.py │ │ ├── reviewablefilechanges.py │ │ ├── reviews.py │ │ ├── reviewsummaries.py │ │ ├── sessions.py │ │ └── users.py │ ├── library/ │ │ └── js/ │ │ └── v8/ │ │ ├── critic-batch.js │ │ ├── critic-branch.js │ │ ├── critic-changeset.js │ │ ├── critic-cli.js │ │ ├── critic-comment.js │ │ ├── critic-commitset.js │ │ ├── critic-dashboard.js │ │ ├── critic-file.js │ │ ├── critic-filters.js │ │ ├── critic-filterstransaction.js │ │ ├── critic-git.js │ │ ├── critic-html.js │ │ ├── critic-launcher-fork.js │ │ ├── critic-launcher.js │ │ ├── critic-log.js │ │ ├── critic-mail.js │ │ ├── critic-review.js │ │ ├── critic-statistics.js │ │ ├── critic-storage.js │ │ ├── critic-text.js │ │ ├── critic-trackedbranch.js │ │ ├── critic-user.js │ │ └── critic.js │ ├── linkify.py │ ├── log/ │ │ ├── __init__.py │ │ ├── commitset.py │ │ └── html.py │ ├── mailutils.py │ ├── maintenance/ │ │ ├── __init__.py │ │ ├── check-branches.py │ │ ├── check-commits.py │ │ ├── configtest.py │ │ ├── criticctl.py │ │ ├── dumppreferences.py │ │ └── progress.py │ ├── operation/ │ │ ├── __init__.py │ │ ├── addrepository.py │ │ ├── applyfilters.py │ │ ├── autocompletedata.py │ │ ├── basictypes.py │ │ ├── basictypes_unittest.py │ │ ├── blame.py │ │ ├── brancharchiving.py │ │ ├── checkrebase.py │ │ ├── createcomment.py │ │ ├── createreview.py │ │ ├── draftchanges.py │ │ ├── editresource.py │ │ ├── extensioninstallation.py │ │ ├── fetchlines.py │ │ ├── manipulateassignments.py │ │ ├── manipulatecomment.py │ │ ├── manipulatefilters.py │ │ ├── manipulatereview.py │ │ ├── manipulateuser.py │ │ ├── markfiles.py │ │ ├── miscellaneous.py │ │ ├── news.py │ │ ├── rebasereview.py │ │ ├── recipientfilter.py │ │ ├── registeruser.py │ │ ├── savesettings.py │ │ ├── searchreview.py │ │ ├── servicemanager.py │ │ ├── trackedbranch.py │ │ ├── typechecker.py │ │ ├── typechecker_unittest.py │ │ ├── unittest.py │ │ └── usersession.py │ ├── page/ │ │ ├── __init__.py │ │ ├── addrepository.py │ │ ├── basic.py │ │ ├── branches.py │ │ ├── checkbranch.py │ │ ├── config.py │ │ ├── confirmmerge.py │ │ ├── createreview.py │ │ ├── createuser.py │ │ ├── dashboard.py │ │ ├── editresource.py │ │ ├── filterchanges.py │ │ ├── home.py │ │ ├── loadmanifest.py │ │ ├── login.py │ │ ├── manageextensions.py │ │ ├── managereviewers.py │ │ ├── news.py │ │ ├── parameters.py │ │ ├── processcommits.py │ │ ├── rebasetrackingreview.py │ │ ├── repositories.py │ │ ├── search.py │ │ ├── services.py │ │ ├── showbatch.py │ │ ├── showbranch.py │ │ ├── showcomment.py │ │ ├── showcommit.py │ │ ├── showfile.py │ │ ├── showfilters.py │ │ ├── showreview.py │ │ ├── showreviewlog.py │ │ ├── showtree.py │ │ ├── statistics.py │ │ ├── tutorial.py │ │ ├── utils.py │ │ └── verifyemail.py │ ├── profiling.py │ ├── request.py │ ├── resources/ │ │ ├── .gitattributes │ │ ├── autocomplete.js │ │ ├── basic.css │ │ ├── basic.js │ │ ├── branches.css │ │ ├── branches.js │ │ ├── changeset.css │ │ ├── changeset.js │ │ ├── checkbranch.css │ │ ├── checkbranch.js │ │ ├── comment.css │ │ ├── comment.js │ │ ├── config.css │ │ ├── config.js │ │ ├── confirmmerge.css │ │ ├── confirmmerge.js │ │ ├── createreview.css │ │ ├── createreview.js │ │ ├── createuser.css │ │ ├── createuser.js │ │ ├── dashboard.css │ │ ├── dashboard.js │ │ ├── diff.css │ │ ├── editresource.css │ │ ├── editresource.js │ │ ├── filterchanges.css │ │ ├── filterchanges.js │ │ ├── home.css │ │ ├── home.js │ │ ├── log.css │ │ ├── log.js │ │ ├── login.css │ │ ├── login.js │ │ ├── manageextensions.css │ │ ├── manageextensions.js │ │ ├── managereviewers.css │ │ ├── managereviewers.js │ │ ├── message.css │ │ ├── newrepository.css │ │ ├── newrepository.js │ │ ├── news.css │ │ ├── news.js │ │ ├── overrides.css │ │ ├── rebasetrackingreview.css │ │ ├── rebasetrackingreview.js │ │ ├── repositories.css │ │ ├── repositories.js │ │ ├── review.css │ │ ├── review.js │ │ ├── reviewfilters.js │ │ ├── ruler.js │ │ ├── search.css │ │ ├── search.js │ │ ├── services.css │ │ ├── services.js │ │ ├── showbatch.css │ │ ├── showbranch.css │ │ ├── showcomment.js │ │ ├── showfile.css │ │ ├── showfile.js │ │ ├── showreview.css │ │ ├── showreview.js │ │ ├── showreviewlog.css │ │ ├── showtree.css │ │ ├── statistics.css │ │ ├── syntax.css │ │ ├── tabify.css │ │ ├── tabify.js │ │ ├── third-party/ │ │ │ ├── chosen.css │ │ │ ├── chosen.js │ │ │ ├── jquery-ui-1.10.2.custom.css │ │ │ └── jquery-ui-autocomplete-html.js │ │ ├── tutorial.css │ │ ├── tutorial.js │ │ └── whitespace.css │ ├── reviewing/ │ │ ├── __init__.py │ │ ├── comment/ │ │ │ ├── __init__.py │ │ │ └── propagate.py │ │ ├── filters.py │ │ ├── html.py │ │ ├── mail.py │ │ ├── rebase.py │ │ └── utils.py │ ├── run_unittest.py │ ├── syntaxhighlight/ │ │ ├── __init__.py │ │ ├── clexer.py │ │ ├── context.py │ │ ├── cpp.py │ │ ├── generate.py │ │ ├── generic.py │ │ └── request.py │ ├── textformatting.py │ ├── textutils.py │ ├── textutils_unittest.py │ ├── tutorials/ │ │ ├── administration.txt │ │ ├── archival.txt │ │ ├── checkbranch.txt │ │ ├── customization.txt │ │ ├── extensions-api.txt │ │ ├── extensions.txt │ │ ├── external-authentication.txt │ │ ├── filters.txt │ │ ├── rebasing.txt │ │ ├── reconfiguring.txt │ │ ├── repository.txt │ │ ├── requesting.txt │ │ ├── reviewing.txt │ │ └── search.txt │ ├── urlutils.py │ ├── wsgi.py │ └── wsgistartup.py ├── testing/ │ ├── USAGE.md │ ├── __init__.py │ ├── __main__.py │ ├── expect.py │ ├── findtests.py │ ├── flags/ │ │ ├── addrepository-has-mirror-parameter.flag │ │ ├── fixed-batch-preview.flag │ │ ├── is-testing.flag │ │ ├── pwd-independence.flag │ │ ├── reliable-admin-newswriter.flag │ │ ├── reliable-git-emails.flag │ │ ├── system-recipients.flag │ │ └── web-server-integration.flag │ ├── frontend.py │ ├── input/ │ │ ├── SystemExtension/ │ │ │ ├── MANIFEST │ │ │ ├── check.js │ │ │ └── resources/ │ │ │ └── HelloWorld.txt │ │ ├── TestExtension/ │ │ │ ├── MANIFEST │ │ │ ├── MailTransaction.js │ │ │ ├── Review.list.js │ │ │ ├── echo.js │ │ │ ├── empty.js │ │ │ ├── error.compilation.js │ │ │ ├── error.runtime.js │ │ │ ├── evaluate.js │ │ │ ├── filterhook.js │ │ │ ├── inject.js │ │ │ ├── nothandled.js │ │ │ ├── processcommits.js │ │ │ ├── resources/ │ │ │ │ ├── hello.world.js │ │ │ │ ├── helloworld.css │ │ │ │ └── helloworld.html │ │ │ ├── restrictions.js │ │ │ └── version.js │ │ ├── binary │ │ ├── customization/ │ │ │ ├── githook.py │ │ │ └── linktypes.py │ │ ├── empty.txt │ │ ├── service_log_filter.py │ │ ├── service_synchronization_helper.py │ │ └── syntaxhighlight/ │ │ └── example.cpp │ ├── local.py │ ├── mailbox.py │ ├── main.py │ ├── password-invalid │ ├── password-testing │ ├── quickstart.py │ ├── repository.py │ ├── tests/ │ │ └── 001-main/ │ │ ├── 000-install.py │ │ ├── 001-empty/ │ │ │ ├── 001-anonymous/ │ │ │ │ ├── 001-dashboard.py │ │ │ │ ├── 002-branches.py │ │ │ │ ├── 003-search.py │ │ │ │ ├── 004-config.py │ │ │ │ ├── 005-tutorial.py │ │ │ │ ├── 006-news.py │ │ │ │ ├── 007-home.py │ │ │ │ ├── 008-repositories.py │ │ │ │ ├── 009-services.py │ │ │ │ ├── 010-createreview.py │ │ │ │ ├── 011-manageextensions.py │ │ │ │ ├── 012-statistics.py │ │ │ │ ├── 013-static-resource.py │ │ │ │ └── 100-preferences/ │ │ │ │ ├── 001-commit.diff.rulerColumn.py │ │ │ │ ├── 002-review.defaultOptOut.py │ │ │ │ ├── 003-timezone.py │ │ │ │ └── __init__.py │ │ │ ├── 002-authenticated/ │ │ │ │ ├── 001-dashboard.py │ │ │ │ ├── 002-branches.py │ │ │ │ ├── 003-search.py │ │ │ │ ├── 004-config.py │ │ │ │ ├── 005-tutorial.py │ │ │ │ ├── 006-news.py │ │ │ │ ├── 007-home.py │ │ │ │ ├── 008-repositories.py │ │ │ │ ├── 009-services.py │ │ │ │ ├── 010-createreview.py │ │ │ │ ├── 011-manageextensions.py │ │ │ │ ├── 012-statistics.py │ │ │ │ └── 100-preferences/ │ │ │ │ ├── 001-commit.diff.rulerColumn.py │ │ │ │ ├── 002-review.defaultOptOut.py │ │ │ │ ├── 003-timezone.py │ │ │ │ └── __init__.py │ │ │ ├── 003-criticctl/ │ │ │ │ ├── 001-basic.py │ │ │ │ ├── 002-adduser-deluser.py │ │ │ │ ├── 003-addrole-delrole.py │ │ │ │ ├── 004-listusers.py │ │ │ │ ├── 005-configtest.py │ │ │ │ └── 006-restart.py │ │ │ └── 004-mixed/ │ │ │ ├── 001-newswriter.py │ │ │ ├── 002-email.py │ │ │ ├── 003-oauth.py │ │ │ ├── 004-password.py │ │ │ ├── 005-accesstoken.py │ │ │ ├── 006-accesscontrol-http.py │ │ │ ├── 007-json-session.py │ │ │ └── __init__.py │ │ ├── 002-createrepository.py │ │ ├── 003-self/ │ │ │ ├── 001-rulerColumn.py │ │ │ ├── 002-emptyfile.py │ │ │ ├── 003-binaryfile.py │ │ │ ├── 004-createreview.py │ │ │ ├── 004-first-review-created/ │ │ │ │ ├── 001-addreviewfilters-bogus.py │ │ │ │ ├── 002-review-archival.py │ │ │ │ └── __init__.py │ │ │ ├── 005-checkbranch.py │ │ │ ├── 006-showreview-reviewfilter.py │ │ │ ├── 007-http-backend.py │ │ │ ├── 008-initial-commit-diff.py │ │ │ ├── 009-fetchremotebranch.py │ │ │ ├── 010-linkification.py │ │ │ ├── 011-linkification-custom.py │ │ │ ├── 012-createreview-recipients.py │ │ │ ├── 012-replayrebase.py │ │ │ ├── 014-non-ascii-filenames.py │ │ │ ├── 015-non-ascii-line-diff.py │ │ │ ├── 016-showcommit-ranges.py │ │ │ ├── 017-showcommit-merge-replay.py │ │ │ ├── 018-detect-moves-no-moved-code.py │ │ │ ├── 019-showtree-showfile-bogus.py │ │ │ ├── 020-fixup-review-via-push.py │ │ │ ├── 020-reviewrebase.py │ │ │ ├── 021-updatereview-bogus.py │ │ │ ├── 022-removereviewfilter-bogus.py │ │ │ ├── 024-customizations.githook.py │ │ │ ├── 025-trackedbranch.py │ │ │ ├── 026-searchreview.py │ │ │ ├── 027-whitespace-filenames.py │ │ │ ├── 028-gitemails.py │ │ │ ├── 029-log-bogus.py │ │ │ ├── 030-trackingreview.py │ │ │ ├── 031-fetchlines-bom.py │ │ │ ├── 032-download.py │ │ │ ├── 033-propagation-vs-rebase.py │ │ │ ├── 100-reviewing/ │ │ │ │ ├── 001-comments.basic.py │ │ │ │ └── __init__.py │ │ │ ├── 101-keepalives.py │ │ │ ├── 200-json/ │ │ │ │ ├── 001-users.py │ │ │ │ ├── 002-branches.py │ │ │ │ ├── 003-repositories.py │ │ │ │ ├── 004-review.py │ │ │ │ ├── 005-commits.py │ │ │ │ ├── 006-changesets.py │ │ │ │ ├── 006-comments.py │ │ │ │ ├── 007-filechanges.py │ │ │ │ ├── 007-replies.py │ │ │ │ ├── 008-batches.py │ │ │ │ └── __init__.py │ │ │ └── __init__.py │ │ ├── 004-extensions/ │ │ │ ├── 001-enable.py │ │ │ ├── 002-tests/ │ │ │ │ ├── 001-tutorial.py │ │ │ │ ├── 002-manageextensions.py │ │ │ │ ├── 003-install-TestExtension.py │ │ │ │ ├── 004-TestExtension/ │ │ │ │ │ ├── 001-echo.py │ │ │ │ │ ├── 002-nothandled.py │ │ │ │ │ ├── 003-empty.py │ │ │ │ │ ├── 004-Review.list.py │ │ │ │ │ ├── 005-MailTransaction.py │ │ │ │ │ ├── 006-inject.py │ │ │ │ │ ├── 007-version.py │ │ │ │ │ ├── 008-processcommits.py │ │ │ │ │ ├── 009-error-messages.py │ │ │ │ │ ├── 010-restrictions.py │ │ │ │ │ ├── 011-User.py │ │ │ │ │ ├── 012-resources.py │ │ │ │ │ ├── 013-storage.py │ │ │ │ │ ├── 014-Repository.run.py │ │ │ │ │ ├── 015-filterhook.py │ │ │ │ │ ├── 016-accesscontrol.py │ │ │ │ │ └── 999-missing.py │ │ │ │ ├── 005-install-SystemExtension.py │ │ │ │ └── 006-manifest-checks.py │ │ │ └── __init__.py │ │ ├── 005-unittests/ │ │ │ ├── 001-local/ │ │ │ │ ├── 001-independence.py │ │ │ │ ├── 002-operation.py │ │ │ │ ├── 005-dbutils.database.py │ │ │ │ └── __init__.py │ │ │ ├── 002-api/ │ │ │ │ ├── 001-commit.py │ │ │ │ ├── 002-review.py │ │ │ │ ├── 003-user.py │ │ │ │ ├── 004-config.py │ │ │ │ ├── 005-log.partition.py │ │ │ │ ├── 006-log.rebase.py │ │ │ │ ├── 007-repository.py │ │ │ │ ├── 008-branch.py │ │ │ │ ├── 009-commitset.py │ │ │ │ ├── 010-comment.py │ │ │ │ ├── 011-reply.py │ │ │ │ ├── 012-changeset.py │ │ │ │ ├── 013-filechange.py │ │ │ │ └── 014-filediff.py │ │ │ └── 003-other/ │ │ │ ├── 001-dbutils.database.py │ │ │ └── __init__.py │ │ └── 900-uninstall-reinstall.py │ ├── tools/ │ │ ├── __init__.py │ │ ├── install.py │ │ └── upgrade.py │ ├── utils.py │ └── virtualbox.py ├── uninstall.py └── upgrade.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ .install.data .installed *.pyc *.pyo *~ testing/cache ================================================ FILE: .gitmodules ================================================ [submodule "installation/externals/chosen"] path = installation/externals/chosen url = ../chosen.git [submodule "installation/externals/v8-jsshell"] path = installation/externals/v8-jsshell url = ../v8-jsshell.git ================================================ FILE: CONTRIBUTORS ================================================ Author: Jens Lindström Contributors: Rafał Chłodnicki Philip Jägenstedt Leif Arne Storset Alexey Feldgendler Michał Gawron James Graham Martin Olsson Daniel Bratell Odin Hørthe Omdal Fredrik Öhrn Peter Krefting Pengfei Xue Johan Herland Ryan Fowler Jacob Rask Felix Ekblom Andreas Tolfsen ================================================ FILE: COPYING ================================================ Copyright 2012-2014 the Critic contributors, Opera Software ASA The Critic code review system is licensed under the Apache License, version 2.0, with exceptions noted below. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 An list of contributors can be found in the CONTRIBUTORS file. Third-party software ==================== The following third-party software is distributed with Critic (checked into Critic's Git repository.) jQuery ------ The jQuery library is licensed under the MIT License, available in the file installation/externals/MIT-LICENSE.jQuery.txt. These files distributed with Critic are part of jQuery: installation/externals/MIT-LICENSE.jQuery.txt resources/jquery-2.0.0.min.js resources/jquery-ui-1.10.2.custom.css resources/jquery-ui-1.10.2.custom.min.js resources/images/ui-*.png For more information, see: http://jquery.org/ Chosen ------ The Chosen library is licensed under the MIT License, available in the file installation/externals/MIT-LICENSE.Chosen.md. These files distributed with Critic are part of Chosen: installation/externals/MIT-LICENSE.Chosen.md resources/chosen*.js resources/chosen*.css resources/chosen-*.png For more information, see: http://harvesthq.github.io/chosen/ Critic's version of Chosen is built from slightly modified fork of the main GitHub repository: https://github.com/jensl/chosen ================================================ FILE: INSTALL ================================================ Installation ============ To install Critic, run the script install.py as root. It will ask a number of question and then perform the installation. In short, what it does is: * Check for and/or install required software packages. * Create a system user (typically named "critic") and group (also typically named "critic"). * Generate system configuration into /etc/critic/. * Install the source code into /usr/share/critic/. * Create a PostgreSQL user and database, both named "critic". * Create a System-V style init script, /etc/init.d/critic-main, and create links to it in the /etc/rcN.d/ directories. * Enable the Apache modules mod_expires and mod_wsgi. * Create an Apache site named "critic-main" and enable it. Required Software Packages -------------------------- Critic depends on the following software packages: * PostgreSQL (9.1 or later), both client and server parts * Apache (2.2 or later) * Git * Python (2.7 or later; 3.x not supported) * Non-standard Python modules: - passlib (if Critic does user authentication) - psycopg2 - pygments Note that on Debian/Ubuntu systems, the install.py script can install all of these software packages automatically. ================================================ FILE: README.md ================================================ Critic ====== This is the code review system, Critic. Critic has a few [concepts][concepts] that might be useful to know. Installation ------------ To install Critic, run the script `install.py` as root: # python install.py It will ask a number of questions and then perform the installation. You should probably read the [INSTALL file][install] for all the information. [install]: https://github.com/jensl/critic/blob/master/INSTALL [concepts]: https://github.com/jensl/critic/blob/master/documentation/concepts.txt Adding a repository ------------------- After installing you should be able to navigate to the hostname you specified during installtion and see Critic running. When using the administrator account you will also see 'Repositories' and 'Services' as top level menu items, in addition to the usual menu items. To add a new repository, click the 'Repositories' menu item and then the 'Add Repository' button in the top right corner. If Critic only has ssh access to the upstream of your repository, you must set up the 'critic' system user (or whatever user name was chosen during installation) to have ssh access without the need of a password. You can do that by creating an ssh key without a password and using 'ssh-copy-id' to copy the key across to the server. If you need to connect to the upstream server using a different user name, you need to create a 'config' file in the 'critic' system user's '.ssh' directory containing: Host User Make sure to verify that you can access the repository from the 'critic' user by running something like this: su -s /bin/bash -c "ssh -v " critic This should also ensure that the upstream server key is stored in the 'critic' user's 'known_hosts' file. This needs only to be done once per upstream server. Adding users ------------ If you are using the 'host' authentication system, users authenticated by the web server will be added automatically to the Critic user database. If you are using the 'critic' authentication system you can use the 'critcctl' tool to add users beyond the administrative user created by the install.py script: sudo criticctl adduser Only the users added with this method will be able to sign in to the system when using the 'critic' authentication system. Adding push rights ------------------ Before a user can push review branches to the newly created Critic repository their system account must be a member of the 'critic' system group (or whatever group name was chosen during installation). In Debian/Ubuntu this can be done using: usermod -a -G critic Setting up reviewers and requesting a review -------------------------------------------- The developers responsible for performing the code review can subscribe to new review requests either for a specific set of subdirectories or for the entire source tree. This configuration is done by each reviewer under the 'Home' top level menu item. For information about how to request a new code review, click the 'Tutorial' top level menu item, and then select 'Requesting a Review'. See also -------- The [Critic user FAQ][faq] answers some common questions and gives some useful tips on how to use Critic efficiently. There is a tutorial on basic system administration tasks available in the installed Critic system; click the 'Tutorial' top level menu item and select 'Administration'. [faq]: https://github.com/jensl/critic/blob/master/documentation/user_faq.md ================================================ FILE: documentation/concepts.txt ================================================ Critic(al) Concepts =================== Branches -------- Critic maintains a view of branches that is slightly different from git's. In addition to knowing the head of the branch, that is, the most recent commit, it also keeps track of (after basically inventing the information itself) a base branch and a "tail" commit. A branch is considered to "contain" all commits that are reachable from its head commit, except all commits that are reachable from the base branch. This view of a branch is useful when the purpose is (possibly) to review the work done on the branch. In this case, one needs to limit the scope to where the branch started, since in git's view, the branch contains everything back to the beginning of time, which is not what one means to review. It's important to note, however, that the "base" of a branch is not something that is stored in git, and thus Critic needs to resort to fairly simple heuristics in determining the base of a branch, and can get it wrong. In particular, it will reverse the relationship between related branches in some cases. If a branch A is created from master, and then later a branch B is created from branch A, and branches A and B diverge, the "correct" relationships are that A's base branch is master and B's base branch is A. However, if B is pushed to Critic's repository before A, Critic's will think that B's base branch is master and A's base branch is B. If branches are pushed to Critic's repository in the order which they are created, then Critic will get it right, though. Reviews ------- A review in Critic is a branch in Critic's repository, and the changes to be reviewed are the commits (that, according to Critic, are) contained on that branch. There are two basic ways to start a review: push a branch to Critic's repository and create a review of all the changes on the branch, or select one or more commits and have Critic create a branch containing exactly those commits. In practice, the difference between the two alternatives is quite insignificant. The branch is like any other git branch. It can be fetched from Critic's repository into a local work repository, and additional commits can be pushed back to Critic's repository. Commits pushed to Critic's repository are automatically added to the review, as changes that need reviewing. At this time, non-fast forward updates of review branches in Critic's repository are not possible. Reviewers --------- Reviewers are users of Critic that have registered themselves as reviewers of parts of the source code tree. When changes are scheduled for review, reviewers are automatically assigned, based on this configuration. Thus the user requesting the review needn't assign reviewers manually. Chunks ------ Each commit scheduled to be reviewed as part of a review is split into individual change "chunks" (N lines deleted, M lines added,) individually recorded in Critic's database along with a status. Each such chunk of changes needs to be approved by a registered reviewer for the modified file. The low-level details is typically hidden from the reviewers, however. They don't need to explicitly approve each chunk individually, for instance. "Approval" of chunks of changes is not thought to be final, and thus not standing in conflict with not accepting the changes as-is. (And thus the term "approve" is slightly misleading; "read" or "reviewed" would be perhaps more appropriate.) Any chunk of changes not approved yet blocks the review from being "accepted". Comments -------- A vital part of reviewing is commenting specific lines of code. In Critic, there are two types of comments; "issues" and "notes". An "issue" is a comment that must be addressed, one way or another, before the review can be "accepted." A "note" is simply a note; it has no formal significance. When a new version of the file that is commented is created (by adding additional commits to the review,) one of two things may happen to existing comments: they can be transferred to the new version, if all commented lines are identical in the new version of the file, or marked as "addressed," if any commented line was modified. When a comment is automatically marked as "addressed," it could be because it was in fact properly addressed, or it could be because some unrelated change was made to the commented lines. In the latter case, the author of the comment (or anyone else, for that matter) may "reopen" the comment by manually transferring the comment to a sequence of lines in the new version of the file. It may seem as if "issue" comments might easily be "lost" by unrelated changes touching the commented lines, but everyone involved in the comment (its author and anyone who replied to it) will be notified that the comment was marked as addressed, and of course, the new changes in the file have to be reviewed as any other changes; the fact that they "addressed" a comment does not automatically mark the changes themselves as approved. "Issue" comments can also be explicitly closed their authors (or reviewers of the file in which the comment was made) after discussion, if the agreement is that no changes to the commented code needs to be made. Review Progress --------------- The ultimate goal of a review is to close it. A review can only be closed when it is in an "accepted" state, which, in turn, it is when each and every chunk of changes have been approved by reviewers, and every "issue" comment has been marked either as addressed or closed. Reviews can also be dropped, meaning the changes are not meant to be merged in their current change. To drop a currently accepted (but not closed) review, you need to create an issue to "un-accept" it. It is suitable to explain why the review will be dropped in that issue. ================================================ FILE: documentation/tutorials.txt ================================================ Critic Tutorials ================ Most of Critic's documentation is available as tutorials available in the installed system, accessed via the "Tutorial" link at the top of every page. This documentation can also be read without installing Critic first, in the public Critic system used to review Critic itself: https://critic-review.org/tutorial They can also be found in source form in the tutorials/ directory. The source is in custom text-based format similar to the Markdown markup language. ================================================ FILE: documentation/user_faq.md ================================================ Critic User FAQ =============== ### How can I create a code review for a subset of my topic branch? ### Suppose that you've done 5 commits on a topic branch and that you want to submit the first 3 to code review now and then continue work on the topic branch. After you've pushed the branch to Critic you find it under the 'Branches' menu item in the top level menu. Critic will now show all the commits on your topic branch and there is a button called 'Create Review', in the top right corner, that you can use if you want to push the entire branch into code review. If you want to push just a single commit into code review, you can click that commit, and then click 'Create Review' on the subsequent page. However, at the branch page it's also possible to select a range of commits to be pushed into review. This is done by selecting the desired commits using the mouse (i.e. click the first commit, hold down the mouse and release over the last commit in the range). ### When reviewing code, how can I select some snippet or function name etc from the code and copy it to the clipboard? (i.e. it's not possible to select text because clicking to select triggers the 'New Issue' dialog) ### Hold down the CTRL button while selecting. ### Can Critic provide some statistics for how much is submitted/reviewed by whom etc? ### Yes, there is a basic statistics page available at http://your.critic.domain.tld/statistics ### How can I see a list of users that have registered as reviewers/watchers for a particular directory? ### Navigate to: http://your.critic.domain.tld/showfilters?repository=INSERT_REPO_NAME&path=INSERT_PATH See for example: https://critic-review.org/showfilters?repository=critic&path=/ ================================================ FILE: extend.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys # To avoid accidentally creating files owned by root. sys.dont_write_bytecode = True # Python version check is done before imports below so that python # 2.6/2.5 users can see the error message. import pythonversion pythonversion.check() import argparse import subprocess import multiprocessing import tempfile import pwd from distutils.version import LooseVersion import installation parser = argparse.ArgumentParser(description="Critic extension support installation script", epilog="""\ Critic extension support is activated by simply running (as root): # python extend.py For finer control over the script's operation you can invoke it with one or more of the action arguments: --prereqs, --fetch, --build, --install and --enable This can for instance be used to build the v8-jsshell executable on a system where Critic has not been installed.""", formatter_class=argparse.RawDescriptionHelpFormatter) # Uses default values for everything that has a default value (and isn't # overridden by other command-line arguments) and signals an error for anything # that doesn't have a default value and isn't set by a command-line argument. parser.add_argument("--headless", help=argparse.SUPPRESS, action="store_true") class DefaultBinDir: pass v8_url = "git://github.com/v8/v8.git" depot_tools_url = "https://chromium.googlesource.com/chromium/tools/depot_tools.git" basic = parser.add_argument_group("basic options") basic.add_argument("--etc-dir", help="directory where the Critic system configuration is stored [default=/etc/critic]", action="store", default="/etc/critic") basic.add_argument("--identity", help="system identity to upgrade [default=main]", action="store", default="main") basic.add_argument("--bin-dir", help="directory where the extension host executable is installed [default=/usr/lib/critic/$IDENTITY/bin]", action="store", default=DefaultBinDir) basic.add_argument("--no-compiler-check", help="disable compiler version check", action="store_true") basic.add_argument("--dry-run", "-n", help="produce output but don't modify the system at all", action="store_true") basic.add_argument("--libcurl-flavor", help="libcurl flavor (openssl, gnutls or nss) or install", choices=["openssl", "gnutls", "nss"]) actions = parser.add_argument_group("actions") actions.add_argument("--prereqs", help="(check for and) install prerequisite software", action="store_true") actions.add_argument("--fetch", help="fetch the extension host source code", action="store_true") actions.add_argument("--build", help="build the extension host executable", action="store_true") actions.add_argument("--install", help="install the extension host executable", action="store_true") actions.add_argument("--enable", help="enable extension support in Critic's configuration", action="store_true") actions.add_argument("--with-v8-jsshell", help="v8-jsshell repository URL [default=../v8-jsshell.git]", metavar="URL") actions.add_argument("--with-v8", help="v8 repository URL [default=%s]" % v8_url, metavar="URL") actions.add_argument("--with-depot_tools", help="depot_tools repository URL [default=%s]" % depot_tools_url, metavar="URL") # Useful to speed up repeated building from clean repositories; used # by the testing framework. actions.add_argument("--export-v8-dependencies", help=argparse.SUPPRESS) actions.add_argument("--import-v8-dependencies", help=argparse.SUPPRESS) arguments = parser.parse_args() if arguments.headless: installation.input.headless = True import installation is_root = os.getuid() == 0 prereqs = arguments.prereqs fetch = arguments.fetch build = arguments.build install = arguments.install enable = arguments.enable if not any([prereqs, fetch, build, install, enable]) \ and arguments.export_v8_dependencies is None \ and arguments.import_v8_dependencies is None: prereqs = fetch = build = install = enable = True libcurl = False if any([prereqs, install, enable]) and not is_root: print """ ERROR: You need to run this script as root. """ sys.exit(1) git = os.environ.get("GIT", "git") if install or enable: data = installation.utils.read_install_data(arguments) if data is not None: git = data["installation.prereqs.git"] installed_sha1 = data["sha1"] current_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() if installed_sha1 != current_sha1: print """ ERROR: You should to run upgrade.py to upgrade to the current commit before using this script to enable extension support. """ sys.exit(1) if arguments.bin_dir is DefaultBinDir: bin_dir = os.path.join("/usr/lib/critic", arguments.identity, "bin") else: bin_dir = arguments.bin_dir if "CXX" in os.environ: compiler = os.environ["CXX"] try: subprocess.check_output([compiler, "--help"]) except OSError as error: print """ ERROR: %r (from $CXX) does not appear to be a valid compiler. """ % compiler sys.exit(1) else: compiler = "g++" def check_libcurl(): fd, empty_cc = tempfile.mkstemp(".cc") os.close(fd) try: subprocess.check_output([compiler, "-include", "curl/curl.h", "-c", empty_cc, "-o", "/dev/null"], stderr=subprocess.STDOUT) return True except subprocess.CalledProcessError as error: if "curl/curl.h" in error.output: return False raise finally: os.unlink(empty_cc) def missing_packages(): packages = [] if not installation.prereqs.find_executable("svn"): packages.append("subversion") if not installation.prereqs.find_executable("make"): packages.append("make") if "CXX" not in os.environ and not installation.prereqs.find_executable("g++"): packages.append("g++") pg_config = installation.prereqs.find_executable("pg_config") if pg_config: try: subprocess.check_output(["pg_config"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: # Just installing the PostgreSQL database server might install # a dummy pg_config that just outputs an error message. pg_config = None if not pg_config: packages.append("libpq-dev") return packages if prereqs: packages = missing_packages() if packages: installation.prereqs.install_packages(*packages) if not check_libcurl(): if arguments.libcurl_flavor: installation.prereqs.install_packages( "libcurl4-%s-dev" % arguments.libcurl_flavor) else: print """ No version of libcurl-dev appears to be installed. There are usually multiple versions available to install using different libraries (openssl, gnutls or nss) for secure communication. If curl is already installed, you probably need to install a matching version of libcurl-dev. This script can install any one of them, or build the extension host executable without URL loading support ("none"). Available choices are: "openssl", "gnutls", "nss" Also: "none", "abort" """ def check(string): if string not in ("openssl", "gnutls", "nss", "none", "abort"): return 'please answer "openssl", "gnutls", "nss", "none" or "abort"' choice = installation.input.string("Install libcurl-dev version?", "none") if choice in ("openssl", "gnutls", "nss"): installation.prereqs.install_packages("libcurl4-%s-dev" % choice) elif choice == "abort": print """ ERROR: Installation aborted. """ sys.exit(1) env = os.environ.copy() if build and not arguments.no_compiler_check: version = subprocess.check_output([compiler, "--version"]) if version.startswith("g++"): version = subprocess.check_output([compiler, "-dumpversion"]).strip() if LooseVersion(version) < LooseVersion("4.7"): print """ ERROR: GCC version 4.7 or later required to build v8-jsshell. HINT: Set $CXX to use a different compiler than '%s', or use --no-compiler-check to try to build anyway. """ % compiler sys.exit(1) else: if "clang" in version: note_clang = "NOTE: CLang (version 3.2 and earlier) is known not to work.\n" else: note_clang = "" print """ ERROR: GCC (version 4.7 or later) required to build v8-jsshell. %sHINT: Set $CXX to use a different compiler than '%s', or use --no-compiler-check to try to build anyway. """ % (note_clang, compiler) sys.exit(1) env["compiler"] = compiler env["v8static"] = "yes" env["postgresql"] = "yes" if check_libcurl(): env["libcurl"] = "yes" env["PATH"] = (os.path.join(os.getcwd(), "installation/externals/depot_tools") + ":" + os.environ["PATH"]) root = os.path.dirname(os.path.abspath(sys.argv[0])) v8_jsshell = os.path.join(root, "installation/externals/v8-jsshell") def do_unprivileged_work(): global depot_tools_url if is_root: stat = os.stat(sys.argv[0]) os.environ["USER"] = pwd.getpwuid(stat.st_uid).pw_name os.environ["HOME"] = pwd.getpwuid(stat.st_uid).pw_dir os.setgid(stat.st_gid) os.setuid(stat.st_uid) if fetch: if arguments.with_depot_tools: depot_tools_url = arguments.with_depot_tools if os.path.isdir('installation/externals/depot_tools'): subprocess.check_call( [git, "pull"], cwd="installation/externals/depot_tools") else: subprocess.check_call( [git, "clone", depot_tools_url], cwd="installation/externals") def fetch_submodule(cwd, submodule, url=None): subprocess.check_call( [git, "submodule", "init", submodule], cwd=cwd) if url: subprocess.check_call( [git, "config", "submodule.%s.url" % submodule, url], cwd=cwd) subprocess.check_call( [git, "submodule", "update", submodule], cwd=cwd) fetch_submodule(root, "installation/externals/v8-jsshell", arguments.with_v8_jsshell) fetch_submodule(v8_jsshell, "v8", arguments.with_v8) if arguments.import_v8_dependencies or arguments.export_v8_dependencies: argv = ["make", "v8dependencies"] if arguments.import_v8_dependencies: argv.append("v8importdepsfrom=" + arguments.import_v8_dependencies) if arguments.export_v8_dependencies: argv.append("v8exportdepsto=" + arguments.export_v8_dependencies) subprocess.check_call(argv, cwd=v8_jsshell) if build: subprocess.check_call( ["make", "-j%d" % multiprocessing.cpu_count()], cwd=v8_jsshell, env=env) def checked_unprivileged_work(result): try: do_unprivileged_work() except: result.put(False) raise else: result.put(True) if fetch or build \ or arguments.import_v8_dependencies \ or arguments.export_v8_dependencies: if is_root: unprivileged_result = multiprocessing.Queue() unprivileged = multiprocessing.Process(target=checked_unprivileged_work, args=(unprivileged_result,)) unprivileged.start() unprivileged.join() if not unprivileged_result.get(): sys.exit(1) else: do_unprivileged_work() if install or enable: etc_path = os.path.join(arguments.etc_dir, arguments.identity) sys.path.insert(0, etc_path) import configuration executable = configuration.extensions.FLAVORS.get("js/v8", {}).get("executable") if not executable or not os.access(executable, os.X_OK): executable = os.path.join(bin_dir, "v8-jsshell") if install: if not os.path.isdir(os.path.dirname(executable)): os.makedirs(os.path.dirname(executable)) subprocess.check_call( ["install", os.path.join(v8_jsshell, "out", "jsshell"), executable]) if enable and not configuration.extensions.ENABLED: try: subprocess.check_output( ["su", "-s", "/bin/bash", "-c", "psql -q -c 'SELECT 1 FROM extensions LIMIT 1'", configuration.base.SYSTEM_USER_NAME], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: installation.database.psql_import( "installation/data/dbschema.extensions.sql", configuration.base.SYSTEM_USER_NAME) data = { "installation.system.username": configuration.base.SYSTEM_USER_NAME, "installation.system.groupname": configuration.base.SYSTEM_GROUP_NAME, "installation.extensions.enabled": True, "installation.extensions.critic_v8_jsshell": executable, "installation.extensions.default_flavor": "js/v8" } installation.system.fetch_uid_gid() installation.paths.mkdir(configuration.extensions.INSTALL_DIR) installation.paths.mkdir(configuration.extensions.WORKCOPY_DIR) compilation_failed = [] if installation.config.update_file(os.path.join(etc_path, "configuration"), "extensions.py", data, arguments, compilation_failed): if compilation_failed: print print "ERROR: Update aborted." print installation.config.undo() sys.exit(1) subprocess.check_call(["criticctl", "restart"]) ================================================ FILE: install.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys import stat # To avoid accidentally creating files owned by root. sys.dont_write_bytecode = True # Python version check is done before imports below so # that python 2.6/2.5 users can see the error message. import pythonversion pythonversion.check("""\ NOTE: This script must be run in the Python interpreter that will be used to run Critic. """) if sys.flags.optimize > 0: print """ ERROR: Please run this script without -O or -OO options. """ sys.exit(1) import argparse import subprocess import traceback import installation parser = argparse.ArgumentParser(description="Critic installation script") # Uses default values for everything that has a default value (and isn't # overridden by other command-line arguments) and signals an error for anything # that doesn't have a default value and isn't set by a command-line argument. parser.add_argument("--headless", help=argparse.SUPPRESS, action="store_true") parser.add_argument("--etc-dir", help="directory where the Critic system configuration is stored", action="store") parser.add_argument("--install-dir", help="directory where the Critic source code is installed", action="store") parser.add_argument("--data-dir", help="directory where Critic's persistent data files are stored", action="store") parser.add_argument("--cache-dir", help="directory where Critic's temporary data files are stored", action="store") parser.add_argument("--git-dir", help="directory where the main Git repositories are stored", action="store") parser.add_argument("--log-dir", help="directory where Critic's log files are stored", action="store") parser.add_argument("--run-dir", help="directory where Critic's runtime files are stored", action="store") for module in installation.modules: if hasattr(module, "add_arguments"): module.add_arguments("install", parser) arguments = parser.parse_args() if os.getuid() != 0: print """ ERROR: This script must be run as root. """ sys.exit(1) if os.path.exists(os.path.join(installation.root_dir, ".installed")): print """ ERROR: Found an .installed file in the directory you're installing from. This typically means that Critic is already installed on this system, and if so then the upgrade.py script should be used to upgrade the installation rather than re-running install.py. """ sys.exit(1) if arguments.headless: installation.input.headless = True def abort(): print print "ERROR: Installation aborted." print for module in reversed(installation.modules): try: if hasattr(module, "undo"): module.undo() except: print >>sys.stderr, "FAILED: %s.undo()" % module.__name__ traceback.print_exc() sys.exit(1) try: lifecycle = installation.utils.read_lifecycle() if not lifecycle["stable"]: print """ WARNING: You're about to install an unstable development version of Critic. If you're setting up a production server, you're most likely better off installing from the latest stable branch. The latest stable branch is the default branch (i.e. HEAD) in Critic's GitHub repository at https://github.com/jensl/critic.git To interrogate it from the command-line, run $ git ls-remote --symref https://github.com/jensl/critic.git HEAD """ if not installation.input.yes_or_no( "Do you want to continue installing the unstable version?", default=True): print print "Installation aborted." print sys.exit(1) sha1 = "0" * 40 # If Git is already installed, check for local modifications. If Git isn't # installed (no 'git' executable in $PATH) then presumably we're not # installing from a repository clone, but from an exported tree, and in that # case we can't check for local modifications anyway. if installation.prereqs.git.check(): git = installation.prereqs.git.path try: if installation.utils.run_git([git, "status", "--porcelain"], cwd=installation.root_dir).strip(): print """ ERROR: This Git repository has local modifications. Installing from a Git repository with local changes is not supported. Please commit or stash the changes and then try again. """ sys.exit(1) sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() except subprocess.CalledProcessError: # Probably not a Git repository at all. pass data = { "sha1": sha1 } for module in installation.modules: try: if hasattr(module, "prepare") and not module.prepare("install", arguments, data): abort() except KeyboardInterrupt: abort() except SystemExit: raise except: print >>sys.stderr, "FAILED: %s.prepare()" % module.__name__ traceback.print_exc() abort() print installed_file = os.path.join(installation.root_dir, ".installed") with open(installed_file, "w"): pass install_py_stat = os.stat(os.path.join(installation.root_dir, "install.py")) os.chown(installed_file, install_py_stat.st_uid, install_py_stat.st_gid) for module in installation.modules: try: if hasattr(module, "install") and not module.install(data): abort() except KeyboardInterrupt: abort() except SystemExit: raise except: print >>sys.stderr, "FAILED: %s.install()" % module.__name__ traceback.print_exc() abort() for module in installation.modules: try: if hasattr(module, "finish"): module.finish("install", arguments, data) except: print >>sys.stderr, "WARNING: %s.finish() failed" % module.__name__ traceback.print_exc() installation.utils.write_install_data(arguments, data) installation.utils.clean_root_pyc_files() print print "SUCCESS: Installation complete!" print except SystemExit: raise except: traceback.print_exc() abort() ================================================ FILE: installation/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. __doc__ = "Installation utilities." import os import sys quiet = False is_quick_start = False root_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")) sys.path.insert(0, os.path.join(root_dir, "src")) # Helpers. import input import process import utils # Modules. import prereqs import system import paths import files import database import smtp import config import httpd import criticctl import admin import initd import prefs import git import migrate import extensions modules = [prereqs, system, paths, files, database, extensions, config, httpd, criticctl, admin, smtp, initd, git, migrate, prefs] ================================================ FILE: installation/admin.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import subprocess import installation username = None email = None fullname = None password = None system_recipients = None def add_arguments(mode, parser): if mode != "install": return parser.add_argument("--admin-username", action="store", help="name of Critic administrator user") parser.add_argument("--admin-email", action="store", help="email address to Critic administrator user") parser.add_argument("--admin-fullname", action="store", help="Critic administrator user's full name") parser.add_argument("--admin-password", action="store", help="Critic administrator user's password") def prepare(mode, arguments, data): global username, email, fullname, password if mode == "install": print """ Critic Installation: Administrator ================================== An administrator user is a Critic user with some special privileges; they can do various things using the Web interface that other users are not allowed to do. Additional administrator users can be added post-installation using the 'criticctl' utility. This user does not need to match a system user on this machine. """ if arguments.admin_username: username = arguments.admin_username else: username = installation.input.string(prompt="Administrator user name:") if arguments.admin_email: email = arguments.admin_email else: email = installation.input.string(prompt="Administrator email address:") if arguments.admin_fullname: fullname = arguments.admin_fullname else: fullname = installation.input.string(prompt="Administrator full name:") if installation.config.auth_mode == "critic": if arguments.admin_password: password = arguments.admin_password else: password = installation.input.password("Password for '%s':" % username) print """ Critic Installation: System Messages ==================================== Critic sends out email notifications when unexpected errors (crashes) occur, and in various other cases when things happen that the system administrators might need to know about right away. """ if arguments.system_recipients: system_recipients = arguments.system_recipients else: system_recipient = installation.input.string( prompt="Where should system messages be sent?", default="%s <%s>" % (fullname, email)) system_recipients = [system_recipient] data["installation.admin.email"] = email else: import configuration try: system_recipients = configuration.base.SYSTEM_RECIPIENTS except AttributeError: system_recipients = ["%(fullname)s <%(email)s>" % admin for admin in configuration.base.ADMINISTRATORS] # The --system-recipients argument, on upgrade, is mostly intended to be # used by the testing framework. It is checked after the code above has # run for testing purpose; making sure the code above ever runs while # testing is meaningful. if arguments.system_recipients: system_recipients = arguments.system_recipients data["installation.system.recipients"] = system_recipients return True def install(data): global password try: criticctl_argv = [installation.criticctl.criticctl_path, "adduser", "--name", username, "--email", email, "--fullname", fullname] if not password: criticctl_argv.extend(["--no-password"]) else: criticctl_argv.extend(["--password", password]) subprocess.check_output(criticctl_argv) for role in ["administrator", "repositories", "newswriter"]: subprocess.check_output( [installation.criticctl.criticctl_path, "addrole", "--name", username, "--role", role]) except subprocess.CalledProcessError: return False return True ================================================ FILE: installation/config.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import os.path import py_compile import argparse import multiprocessing import installation auth_mode = "host" session_type = None allow_anonymous_user = None web_server_integration = None access_scheme = None repository_url_types = ["http"] allow_user_registration = None verify_email_addresses = True archive_review_branches = True password_hash_schemes = ["pbkdf2_sha256", "bcrypt"] default_password_hash_scheme = "pbkdf2_sha256" minimum_password_hash_time = 0.25 minimum_rounds = {} auth_database = "internal" enable_access_tokens = True ldap_url = "ldap://ldap.example.com:389" ldap_search_base = "dc=example,dc=com" ldap_create_user = True ldap_username_attribute = "uid" ldap_fullname_attribute = "cn" ldap_email_attribute = "mail" ldap_cache_max_age = 600 is_development = False is_testing = False coverage_dir = None class Provider(object): def __init__(self, name): self.name = name self.enabled = False self.allow_user_registration = False self.verify_email_addresses = False self.client_id = None self.client_secret = None self.redirect_uri = None self.bypass_createuser = False def load(self, settings): if self.name not in settings: return settings = settings[self.name] self.enabled = settings.get("enabled", self.enabled) self.allow_user_registration = settings.get("allow_user_registration", self.allow_user_registration) self.verify_email_addresses = settings.get("verify_email_addresses", self.verify_email_addresses) self.client_id = settings.get("client_id", self.client_id) self.client_secret = settings.get("client_secret", self.client_secret) self.redirect_uri = settings.get("redirect_uri", self.redirect_uri) self.bypass_createuser = settings.get("bypass_createuser", self.bypass_createuser) def readargs(self, arguments): def getarg(name, default): value = getattr(arguments, name, None) if value is None: return default return value self.enabled = getarg( "provider_%s_enabled" % self.name, self.enabled) self.allow_user_registration = getarg( "provider_%s_user_registration" % self.name, self.allow_user_registration) self.verify_email_addresses = getarg( "provider_%s_verify_email_addresses" % self.name, self.verify_email_addresses) self.client_id = getarg( "provider_%s_client_id" % self.name, self.client_id) self.client_secret = getarg( "provider_%s_client_secret" % self.name, self.client_secret) self.redirect_uri = getarg( "provider_%s_redirect_uri" % self.name, self.redirect_uri) def store(self, data): base = "installation.config.provider_%s." % self.name data[base + "enabled"] = self.enabled data[base + "allow_user_registration"] = self.allow_user_registration data[base + "verify_email_addresses"] = self.verify_email_addresses data[base + "client_id"] = self.client_id data[base + "client_secret"] = self.client_secret data[base + "redirect_uri"] = self.redirect_uri data[base + "bypass_createuser"] = self.bypass_createuser def scrub(self, data): base = "installation.config.provider_%s." % self.name del data[base + "client_id"] del data[base + "client_secret"] providers = [] default_provider_names = ["github", "google"] def calibrate_minimum_rounds(): import time import passlib.context min_rounds_name = "%s__min_rounds" % default_password_hash_scheme min_rounds_value = 100 while True: calibration_context = passlib.context.CryptContext( schemes=[default_password_hash_scheme], default=default_password_hash_scheme, **{ min_rounds_name: min_rounds_value }) before = time.time() calibration_context.encrypt("password") # It's possible encryption was fast enough to measure as zero, or some # other ridiculously small number. "Round" it up to at least one # millisecond for sanity. hash_time = max(0.001, time.time() - before) if hash_time >= minimum_password_hash_time: break # Multiplication factor. Make it at least 1.2, to ensure we actually # ever finish this loop, and at most 10, to ensure we don't over-shoot # by too much. factor = max(1.2, min(10.0, minimum_password_hash_time / hash_time)) min_rounds_value = int(factor * min_rounds_value) # If we're upgrading and have a current calibrated value, only change it if # the new value is significantly higher, indicating that the system's # performance has increased, or the hash implementation has gotten faster. if default_password_hash_scheme in minimum_rounds: current_value = minimum_rounds[default_password_hash_scheme] if current_value * 1.5 > min_rounds_value: return minimum_rounds[default_password_hash_scheme] = min_rounds_value def add_arguments(mode, parser): def H(help_string): # Wrapper to hide arguments when upgrading, but still supporting them. # Primarily we need to support arguments on upgrade for testing, which # might upgrade from a commit that doesn't support an argument, and thus # needs to provide the argument when upgrading to the tested commit. if mode == "install": return help_string else: return argparse.SUPPRESS parser.add_argument( "--auth-mode", choices=["host", "critic"], help=H("user authentication mode")) parser.add_argument( "--session-type", choices=["httpauth", "cookie"], help=H("session type")) parser.add_argument( "--allow-anonymous-user", dest="anonymous", action="store_const", const=True, help=H("allow limited unauthenticated access")) parser.add_argument( "--no-allow-anonymous-user", dest="anonymous", action="store_const", const=False, help=H("do not allow unauthenticated access")) parser.add_argument( "--allow-user-registration", dest="user_registration", action="store_const", const=True, help=H("allow unattended user registration")) parser.add_argument( "--no-allow-user-registration", dest="user_registration", action="store_const", const=False, help=H("do not allow unattended user registration")) parser.add_argument( "--web-server-integration", choices=["apache", "nginx+uwsgi", "uwsgi", "none"], help=H("web server to set up and integrate with")) parser.add_argument( "--access-scheme", choices=["http", "https", "both"], help=H("scheme used to access Critic")) parser.add_argument( "--repository-url-types", default="http", help=H("comma-separated list of supported repository URL types " "(valid types: git, http, ssh and host)")) for provider_name in default_provider_names: if mode == "install": group = parser.add_argument_group( "'%s' authentication provider" % provider_name) else: group = parser group.add_argument( "--provider-%s-enabled" % provider_name, action="store_const", const=True, help=H("enable authentication provider")) group.add_argument( "--provider-%s-disabled" % provider_name, action="store_const", const=False, dest="provider_%s_enabled" % provider_name, help=H("disable authentication provider")) group.add_argument( "--provider-%s-user-registration" % provider_name, action="store_const", const=True, help=H("enable new user registration")) group.add_argument( "--provider-%s-no-user-registration" % provider_name, action="store_const", const=False, dest="provider_%s_user_registration" % provider_name, help=H("disable new user registration")) group.add_argument( "--provider-%s-client-id" % provider_name, action="store", help=H("OAuth2 client id")) group.add_argument( "--provider-%s-client-secret" % provider_name, action="store", help=H("OAuth2 client secret")) group.add_argument( "--provider-%s-redirect-uri" % provider_name, action="store", help=H("OAuth2 authentication callback URI")) parser.add_argument( "--minimum-password-hash-time", help=H("approximate minimum time to spend hashing a single password")) # Using argparse.SUPPRESS to not include these in --help output; they are # not something a typical installer ought to want to use. parser.add_argument( "--is-development", action="store_true", help=argparse.SUPPRESS) parser.add_argument( "--is-testing", action="store_true", help=argparse.SUPPRESS) parser.add_argument( "--coverage-dir", help=argparse.SUPPRESS) default_encodings = ["utf-8", "latin-1"] def prepare(mode, arguments, data): global auth_mode, session_type, allow_anonymous_user global web_server_integration, access_scheme global repository_url_types, default_encodings, allow_user_registration global verify_email_addresses, archive_review_branches global password_hash_schemes, default_password_hash_scheme global minimum_password_hash_time, minimum_rounds, auth_database global enable_access_tokens global is_development, is_testing, coverage_dir global ldap_url, ldap_search_base, ldap_create_user, ldap_username_attribute global ldap_fullname_attribute, ldap_email_attribute, ldap_cache_max_age header_printed = False if mode == "install": if arguments.minimum_password_hash_time is not None: try: minimum_password_hash_time = float(arguments.minimum_password_hash_time) except ValueError: print ("Invalid --minimum-password-hash-time argument: %s (must be a number)." % arguments.minimum_password_hash_time) return False if arguments.repository_url_types: repository_url_types = filter( None, arguments.repository_url_types.split(",")) invalid_url_types = [] for url_type in repository_url_types: if url_type not in ["git", "http", "ssh", "host"]: invalid_url_types.append(url_type) if invalid_url_types or not repository_url_types: print ("Invalid --repository-url-types argument: %s" % arguments.repository_url_types) if invalid_url_types: print ("These types are invalid: %s" % ",".join(invalid_url_types)) if not repository_url_types: print "No URL types specified!" return False def check_auth_mode(value): if value.strip() not in ("host", "critic"): return "must be one of 'host' and 'critic'" if arguments.auth_mode: error = check_auth_mode(arguments.auth_mode) if error: print "Invalid --auth-mode argument: %s." % arguments.auth_mode return False auth_mode = arguments.auth_mode else: header_printed = True print """ Critic Installation: Authentication =================================== Critic needs to identify (via HTTP authentication) users who access the Web front-end. This can be handled in two different ways: host The Web server (Apache) handles authentication and Critic only makes use of the user name that it reports via the WSGI API. critic Critic implements HTTP authentication itself using passwords stored (encrypted) in its database. """ auth_mode = installation.input.string( "Which authentication mode should be used?", default="critic", check=check_auth_mode) is_development = arguments.is_development is_testing = arguments.is_testing coverage_dir = arguments.coverage_dir else: import configuration auth_mode = configuration.base.AUTHENTICATION_MODE try: session_type = configuration.base.SESSION_TYPE except AttributeError: pass try: allow_anonymous_user = configuration.base.ALLOW_ANONYMOUS_USER except AttributeError: pass try: web_server_integration = configuration.base.WEB_SERVER_INTEGRATION except AttributeError: web_server_integration = "apache" try: access_scheme = configuration.base.ACCESS_SCHEME except AttributeError: pass try: repository_url_types = configuration.base.REPOSITORY_URL_TYPES except AttributeError: pass try: default_encodings = configuration.base.DEFAULT_ENCODINGS except AttributeError: pass try: password_hash_schemes = configuration.auth.PASSWORD_HASH_SCHEMES default_password_hash_scheme = configuration.auth.DEFAULT_PASSWORD_HASH_SCHEME minimum_password_hash_time = configuration.auth.MINIMUM_PASSWORD_HASH_TIME minimum_rounds = configuration.auth.MINIMUM_ROUNDS except AttributeError: pass try: auth_database = configuration.auth.DATABASE except AttributeError: pass try: enable_access_tokens = configuration.auth.ENABLE_ACCESS_TOKENS except AttributeError: pass try: is_development = configuration.debug.IS_DEVELOPMENT except AttributeError: # Was moved from configuration.base to configuration.debug. try: is_development = configuration.base.IS_DEVELOPMENT except AttributeError: pass try: is_testing = configuration.debug.IS_TESTING except AttributeError: is_testing = arguments.is_testing try: coverage_dir = configuration.debug.COVERAGE_DIR except AttributeError: pass try: allow_user_registration = configuration.base.ALLOW_USER_REGISTRATION except AttributeError: pass try: verify_email_addresses = configuration.base.VERIFY_EMAIL_ADDRESSES except AttributeError: pass try: archive_review_branches = configuration.base.ARCHIVE_REVIEW_BRANCHES except AttributeError: pass try: ldap = configuration.auth.DATABASES["ldap"] except (AttributeError, KeyError): pass else: ldap_url = ldap.get("ldap_url", ldap_url) ldap_search_base = ldap.get("ldap_search_base", ldap_search_base) ldap_create_user = ldap.get("ldap_create_user", ldap_create_user) ldap_username_attribute = ldap.get("ldap_username_attribute", ldap_username_attribute) ldap_fullname_attribute = ldap.get("ldap_fullname_attribute", ldap_fullname_attribute) ldap_email_attribute = ldap.get("ldap_email_attribute", ldap_email_attribute) ldap_cache_max_age = ldap.get("ldap_cache_max_age", ldap_cache_max_age) if auth_mode == "critic": if session_type is None: def check_session_type(value): if value.strip() not in ("httpauth", "cookie"): return "must be one of 'http' and 'cookie'" if arguments.session_type: error = check_session_type(arguments.session_type) if error: print "Invalid --session_type argument: %s." % arguments.session_type return False session_type = arguments.session_type else: if not header_printed: header_printed = True print """ Critic Installation: Authentication ===================================""" print """ Critic can authenticate users either via HTTP authentication or via a "Sign in" form and session cookies. The major difference is that HTTP authentication requires a valid login to access any page whereas the other type of authentication supports limited anonymous access. httpauth Use HTTP authentication. cookie Use session cookie based authentication. """ session_type = installation.input.string( "Which session type should be used?", default="cookie", check=check_session_type) if allow_anonymous_user is None: if session_type == "httpauth": allow_anonymous_user = False elif arguments.anonymous is not None: allow_anonymous_user = arguments.anonymous else: if not header_printed: header_printed = True print """ Critic Installation: Authentication ===================================""" print """ With cookie based authentication, Critic can support anonymous access. Users still have to sign in in order to make any changes (such as write comments in reviews) but will be able to view most information in the system without signin in. """ allow_anonymous_user = installation.input.yes_or_no( "Do you want to allow anonymous access?", default=True) if allow_user_registration is None: if session_type == "httpauth": allow_user_registration = False elif arguments.user_registration is not None: allow_user_registration = arguments.user_registration else: if not header_printed: header_printed = True print """ Critic Installation: Authentication ===================================""" print """ With cookie based authentication, Critic can support unattended user registration. With this enabled, the "Sign in" page has a link to a page where a new user can register a Critic user without needing to contact the system administrator(s). """ allow_user_registration = installation.input.yes_or_no( "Do you want to allow user registration?", default=False) else: session_type = "cookie" if web_server_integration is None: if arguments.web_server_integration: web_server_integration = arguments.web_server_integration else: print """ Critic Installation: Web Server Integration =========================================== This installation script can install and do basic configuration of a few different host web servers. Supported web servers are: 1) nginx + uWSGI Use the nginx web server together with uWSGI as the WSGI application server to actually run Critic. This is the recommended option for new installs. 2) uWSGI Use uWSGI as both HTTP(S) front-end and as the WSGI application server to actually run Critic. 3) Apache + mod_wsgi Use the Apache web server and its third-party WSGI module (mod_wsgi) to actually run Critic. This is the traditional configuration used to run Critic, but mod_wsgi is not actively maintained, and has some known issues. 4) no integration Don't configure any web server. The installation performed by this script will be incomplete and the system administrator will need to set the integration up themselves. """ def check_web_server_integration(value): if value not in ("1", "nginx+uwsgi", "2", "uwsgi", "3", "apache", "4", "none"): return ("must be one of '1'/'nginx+uwsgi', '2'/'uwsgi', " "'3'/'apache' and '4'/'none'") web_server_integration = installation.input.string( "What web server should be set up?", default="nginx+uwsgi", check=check_web_server_integration) aliases = { "1": "nginx+uwsgi", "2": "uwsgi", "3": "apache", "4": "none" } if web_server_integration in aliases: web_server_integration = aliases[web_server_integration] if access_scheme is None: if arguments.access_scheme: access_scheme = arguments.access_scheme else: print """ Critic Installation: Scheme =========================== Critic can be set up to be accessed over HTTP, HTTPS, or both. This installation script will not do the actual configuration of the host web server (Apache) necessary for it to support the desired schemes (in particular HTTPS, which is non-trivial,) but can at least set up Critic's Apache site declaration appropriately. You have three choices: http Critic will be accessible only over HTTP. https Critic will be accessible only over HTTPS. both Critic will be accessible over both HTTP and HTTPS. If you choose "both", Critic will redirect all authenticated accesses to HTTPS, to avoid sending credentials over plain text connections.""" if allow_anonymous_user: print """\ Anonymous users will be allowed to access the site over HTTP, though. If this is not desirable, you should select "https" and configure the web server to redirect all HTTP accesses to HTTPS. """ else: print def check_access_scheme(value): if value not in ("http", "https", "both"): return "must be one of 'http', 'https' and 'both'" access_scheme = installation.input.string( "How will Critic be accessed?", default="http", check=check_access_scheme) if mode == "upgrade" \ and hasattr(configuration, "auth") \ and hasattr(configuration.auth, "PROVIDERS"): for provider_name in configuration.auth.PROVIDERS: provider = Provider(provider_name) provider.load(configuration.auth.PROVIDERS) providers.append(provider) else: providers.extend(Provider(provider_name) for provider_name in default_provider_names) if access_scheme == "http": base_url = "http" else: base_url = "https" base_url += "://%s/oauth/" % installation.system.hostname for provider in providers: provider.readargs(arguments) if provider.redirect_uri is None: provider.redirect_uri = base_url + provider.name data["installation.config.auth_mode"] = auth_mode data["installation.config.session_type"] = session_type data["installation.config.allow_anonymous_user"] = allow_anonymous_user data["installation.config.web_server_integration"] = web_server_integration data["installation.config.access_scheme"] = access_scheme data["installation.config.repository_url_types"] = repository_url_types data["installation.config.default_encodings"] = default_encodings data["installation.config.allow_user_registration"] = allow_user_registration data["installation.config.verify_email_addresses"] = verify_email_addresses data["installation.config.archive_review_branches"] = archive_review_branches data["installation.config.password_hash_schemes"] = password_hash_schemes data["installation.config.default_password_hash_scheme"] = default_password_hash_scheme data["installation.config.minimum_password_hash_time"] = minimum_password_hash_time data["installation.config.auth_database"] = auth_database data["installation.config.enable_access_tokens"] = enable_access_tokens data["installation.config.is_quickstart"] = False data["installation.config.is_development"] = is_development data["installation.config.is_testing"] = is_testing data["installation.config.coverage_dir"] = coverage_dir if mode == "upgrade": data["installation.config.highlight.max_workers"] = \ configuration.services.HIGHLIGHT["max_workers"] data["installation.config.changeset.max_workers"] = \ configuration.services.CHANGESET["max_workers"] else: cpu_count = multiprocessing.cpu_count() data["installation.config.highlight.max_workers"] = cpu_count data["installation.config.changeset.max_workers"] = max(1, cpu_count / 2) for provider in providers: provider.store(data) data["installation.config.ldap_url"] = ldap_url data["installation.config.ldap_search_base"] = ldap_search_base data["installation.config.ldap_create_user"] = ldap_create_user data["installation.config.ldap_username_attribute"] = ldap_username_attribute data["installation.config.ldap_fullname_attribute"] = ldap_fullname_attribute data["installation.config.ldap_email_attribute"] = ldap_email_attribute data["installation.config.ldap_cache_max_age"] = ldap_cache_max_age return True created_file = [] created_dir = [] renamed = [] modified_files = 0 def compile_file(filename): global created_file try: path = os.path.join(installation.paths.etc_dir, "main", filename) with installation.utils.as_critic_system_user(): py_compile.compile(path, doraise=True) except py_compile.PyCompileError as error: print """ ERROR: Failed to compile %s:\n%s """ % (filename, error) return False else: created_file.append(path + "c") return True def set_file_mode_and_owner(path): uid = installation.system.uid gid = installation.system.gid filename = os.path.basename(path) if filename in ("database.py", "auth.py", "smtp-credentials.json"): # May contain sensitive information. mode = 0600 if filename == "smtp-credentials.json": uid = gid = 0 else: mode = 0640 os.chmod(path, mode) if not installation.is_quick_start: os.chown(path, uid, gid) def copy_file_mode_and_owner(src_path, dst_path): status = os.stat(src_path) os.chmod(dst_path, status.st_mode) os.chown(dst_path, status.st_uid, status.st_gid) def install(data): if auth_mode == "critic": calibrate_minimum_rounds() data["installation.config.minimum_rounds"] = minimum_rounds source_dir = os.path.join(installation.root_dir, "installation", "templates", "configuration") target_dir = os.path.join(installation.paths.etc_dir, "main", "configuration") compilation_failed = False os.mkdir(target_dir, 0750) created_dir.append(target_dir) os.chown(target_dir, installation.system.uid, installation.system.gid) for entry in os.listdir(source_dir): source_path = os.path.join(source_dir, entry) target_path = os.path.join(target_dir, entry) with open(target_path, "w") as target: created_file.append(target_path) with open(source_path, "r") as source: target.write((source.read().decode("utf-8") % data).encode("utf-8")) set_file_mode_and_owner(target_path) if entry.endswith(".py"): path = os.path.join("configuration", entry) if not compile_file(path): compilation_failed = True else: copy_file_mode_and_owner(target_path, target_path + "c") if compilation_failed: return False # Make the newly written 'configuration' module available to the rest of the # installation script(s). sys.path.insert(0, os.path.join(installation.paths.etc_dir, "main")) return True def update_file(target_dir, entry, data, arguments, compilation_failed): global modified_files import configuration source_dir = os.path.join(installation.root_dir, "installation", "templates", "configuration") compilation_failed = False source_path = os.path.join(source_dir, entry) target_path = os.path.join(target_dir, entry) backup_path = os.path.join(target_dir, "_" + entry) source = open(source_path, "r").read().decode("utf-8") % data if not os.path.isfile(target_path): write_target = True else: if open(target_path).read().decode("utf-8") == source: return False def generateVersion(label, path): if label == "updated": with open(path, "w") as target: target.write(source.encode("utf-8")) update_query = installation.utils.UpdateModifiedFile( arguments, message="""\ A configuration file is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if any configuration options were added in the updated version, the system will most likely break if you do not either install the updated version or manually transfer the new configuration options to the existing version. """, versions={ "current": target_path, "updated": target_path + ".new" }, options=[ ("i", "install the updated version"), ("k", "keep the current version"), ("d", ("current", "updated")) ], generateVersion=generateVersion) write_target = update_query.prompt() == "i" if write_target: print "Updated file: %s" % target_path if not arguments.dry_run: if os.path.isfile(target_path): os.rename(target_path, backup_path) renamed.append((target_path, backup_path)) with open(target_path, "w") as target: created_file.append(target_path) target.write(source.encode("utf-8")) set_file_mode_and_owner(target_path) if target_path.endswith(".py"): path = os.path.join("configuration", entry) if not compile_file(path): compilation_failed.append(path) else: copy_file_mode_and_owner(target_path, target_path + "c") # The module's name (relative the 'configuration' package) # is the base name minus the trailing ".py". module_name = os.path.basename(target_path)[:-3] if module_name != "__init__" \ and hasattr(configuration, module_name): # Reload the updated module so that code executing later # sees added configuration options. (It will also see # removed configuration options, but that is unlikely to # be a problem.) reload(getattr(configuration, module_name)) modified_files += 1 return True def upgrade(arguments, data): global modified_files import configuration if auth_mode == "critic": calibrate_minimum_rounds() data["installation.config.minimum_rounds"] = minimum_rounds source_dir = os.path.join(installation.root_dir, "installation", "templates", "configuration") target_dir = os.path.join(data["installation.paths.etc_dir"], arguments.identity, "configuration") compilation_failed = [] no_changes = True for entry in os.listdir(source_dir): if update_file(target_dir, entry, data, arguments, compilation_failed): no_changes = False if compilation_failed: return False if no_changes: print "No changed configuration files." if modified_files: reload(configuration) return True def undo(): map(os.unlink, reversed(created_file)) map(os.rmdir, reversed(created_dir)) for target, backup in renamed: os.rename(backup, target) def finish(mode, arguments, data): for target, backup in renamed: os.unlink(backup) for provider in providers: provider.scrub(data) ================================================ FILE: installation/criticctl.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import installation import os import os.path criticctl_path = None created_file = [] renamed = [] def install(data): global criticctl_path source_path = os.path.join(installation.root_dir, "installation", "templates", "criticctl") target_path = criticctl_path = os.path.join(installation.paths.bin_dir, "criticctl") with open(target_path, "w") as target: created_file.append(target_path) os.chmod(target_path, 0755) with open(source_path, "r") as source: target.write((source.read().decode("utf-8") % data).encode("utf-8")) return True def upgrade(arguments, data): target_path = os.path.join(installation.paths.bin_dir, "criticctl") backup_path = installation.utils.update_from_template( arguments, data, template_path="installation/templates/criticctl", target_path=target_path, message="""\ The criticctl utility is about to be updated. Please check that no local modifications are being overwritten. %(versions)s Please note that if the modifications are not installed, the criticctl utility is likely to stop working. """) if backup_path: created_file.append(target_path) renamed.append((target_path, backup_path)) return True def undo(): map(os.unlink, created_file) for target, backup in renamed: os.rename(backup, target) def finish(mode, arguments, data): for target, backup in renamed: os.unlink(backup) ================================================ FILE: installation/data/comments.pgsql ================================================ -- -*- mode: sql -*- -- -- Copyright 2012 Jens Lindström, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. CREATE OR REPLACE FUNCTION chaincomments(chain_id INTEGER) RETURNS INTEGER AS $$ DECLARE result INTEGER; BEGIN SELECT COUNT(*) INTO STRICT result FROM comments WHERE chain=chain_id AND state='current'; RETURN result; END; $$ LANGUAGE 'plpgsql'; CREATE OR REPLACE FUNCTION chainunread(chain_id INTEGER, user_id INTEGER) RETURNS INTEGER AS $$ DECLARE result INTEGER; BEGIN SELECT COUNT(*) INTO STRICT result FROM commentstoread JOIN comments ON (comments.id=commentstoread.comment) WHERE comments.chain=chain_id AND comments.state='current' AND commentstoread.uid=user_id; RETURN result; END; $$ LANGUAGE 'plpgsql'; ================================================ FILE: installation/data/dbschema.base.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE systemidentities ( key VARCHAR(32) PRIMARY KEY, name VARCHAR(64) UNIQUE, anonymous_scheme VARCHAR(5) NOT NULL, authenticated_scheme VARCHAR(5) NOT NULL, hostname VARCHAR(265) NOT NULL, description VARCHAR(256) NOT NULL, installed_sha1 CHAR(40) NOT NULL, installed_at TIMESTAMP DEFAULT NOW() NOT NULL ); CREATE TABLE files ( id SERIAL PRIMARY KEY, path TEXT NOT NULL ); -- Index used to enforce uniqueness, and for quick lookup of single -- paths (using "SELECT id FROM files WHERE MD5(path)=MD5(...)". CREATE UNIQUE INDEX files_path_md5 ON files (MD5(path)); -- Index used for path searches, for instance when searching for -- reviews that touch files in a certain directory. CREATE INDEX files_path_gin ON files USING gin (STRING_TO_ARRAY(path, '/')); CREATE TABLE knownremotes ( url VARCHAR(256) PRIMARY KEY, -- True if this remote has a post-update hook (or similar) that contacts the -- branchtrackerhook service and triggers immediate updates of tracked -- branches. pushing BOOLEAN NOT NULL ); CREATE TABLE timezones ( name VARCHAR(256) PRIMARY KEY, abbrev VARCHAR(16) NOT NULL, utc_offset INTERVAL NOT NULL ); INSERT INTO timezones (name, abbrev, utc_offset) VALUES ('Universal/UTC', 'UTC', INTERVAL '0'); ================================================ FILE: installation/data/dbschema.changesets.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TYPE changesettype AS ENUM ( 'direct', -- Plain diff between immediate parent and child (including -- cases where the child commit has other parents.) 'custom', -- Plain diff between any other two commits. 'merge', -- Relevance filtered merge diff between immediate parent and -- child where child has other parents. 'conflicts'); -- Diff between two merge commits, one automatically generated -- and one "real." The automatically generated merge commit -- is created without resolving any conflicts (the conflict -- markers inserted by "git merge" are committed as-is.) CREATE TABLE changesets ( id SERIAL PRIMARY KEY, parent INTEGER REFERENCES commits ON DELETE CASCADE, child INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE, type changesettype NOT NULL, UNIQUE (parent, child, type) ); CREATE INDEX changesets_child ON changesets (child); CREATE TABLE customchangesets ( changeset INTEGER PRIMARY KEY REFERENCES changesets ON DELETE CASCADE, time TIMESTAMP ); CREATE TABLE mergereplays ( original INTEGER PRIMARY KEY REFERENCES commits ON DELETE CASCADE, replay INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE ); CREATE TABLE fileversions ( changeset INTEGER NOT NULL REFERENCES changesets ON DELETE CASCADE, file INTEGER NOT NULL REFERENCES files, old_sha1 CHAR(40), new_sha1 CHAR(40), old_mode CHAR(6), new_mode CHAR(6), PRIMARY KEY (changeset, file) ); CREATE INDEX fileversions_old_sha1 ON fileversions (file, old_sha1); CREATE INDEX fileversions_new_sha1 ON fileversions (file, new_sha1); CREATE TABLE chunks ( id SERIAL PRIMARY KEY, changeset INTEGER NOT NULL REFERENCES changesets ON DELETE CASCADE, file INTEGER NOT NULL REFERENCES files, deleteOffset INTEGER NOT NULL, deleteCount INTEGER NOT NULL, insertOffset INTEGER NOT NULL, insertCount INTEGER NOT NULL, analysis TEXT, whitespace INTEGER NOT NULL ); CREATE INDEX chunks_changeset_file ON chunks (changeset, file); CREATE TABLE codecontexts ( sha1 CHAR(40), context VARCHAR(256) NOT NULL, first_line INTEGER NOT NULL, last_line INTEGER NOT NULL ); CREATE INDEX codecontexts_sha1_first_last ON codecontexts (sha1, first_line, last_line); ================================================ FILE: installation/data/dbschema.comments.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2012 Jens Lindström, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TYPE commentchaintype AS ENUM ( 'issue', -- The comment chain, while open, blocks the review. 'note' -- The comment chain doesn't block the review. ); CREATE TYPE commentchainstate AS ENUM ( 'draft', -- The comment chain (and all it's comments) are drafts. 'open', -- The comment chain is open. 'addressed',-- The commented code was changed by a later commit. 'closed', -- The comment chain is closed. 'empty' -- The comment chain has no comments. ); CREATE TYPE commentchainorigin AS ENUM ( 'old', -- The user commented the old/left-hand side in a diff. 'new' -- The user commented the new/right-hand side in a diff. ); CREATE TABLE commentchains ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews, batch INTEGER REFERENCES batches ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, time TIMESTAMP NOT NULL DEFAULT NOW(), type commentchaintype NOT NULL DEFAULT 'issue', state commentchainstate NOT NULL DEFAULT 'draft', origin commentchainorigin, file INTEGER REFERENCES files, first_commit INTEGER REFERENCES commits, last_commit INTEGER REFERENCES commits, closed_by INTEGER REFERENCES users, addressed_by INTEGER REFERENCES commits, first_comment INTEGER ); -- Foreign key constraint "REFERENCES comments" set up later. CREATE INDEX commentchains_review_file ON commentchains(review, file); CREATE INDEX commentchains_review_type_state ON commentchains(review, type, state); CREATE INDEX commentchains_batch ON commentchains(batch); -- FIXME: This circular relation is unnecessary. Should have a separate table -- for mapping batches to comments intead. ALTER TABLE batches ADD CONSTRAINT batches_comment_fkey FOREIGN KEY (comment) REFERENCES commentchains ON DELETE CASCADE; CREATE TYPE commentchainchangestate AS ENUM ( 'draft', -- This change hasn't been performed yet. 'performed', -- The change has been performed. 'rejected' -- The change was rejected; affected comment chain wasn't in -- expected state. ); CREATE TABLE commentchainchanges ( batch INTEGER REFERENCES batches ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, chain INTEGER NOT NULL REFERENCES commentchains ON DELETE CASCADE, time TIMESTAMP NOT NULL DEFAULT NOW(), state commentchainchangestate NOT NULL DEFAULT 'draft', from_type commentchaintype, to_type commentchaintype, from_state commentchainstate, to_state commentchainstate, from_last_commit INTEGER REFERENCES commits, to_last_commit INTEGER REFERENCES commits, from_addressed_by INTEGER REFERENCES commits, to_addressed_by INTEGER REFERENCES commits ); CREATE INDEX commentchainchanges_batch ON commentchainchanges(batch); CREATE INDEX commentchainchanges_chain ON commentchainchanges(chain); CREATE TYPE commentchainlinesstate AS ENUM ( 'draft', 'current' ); CREATE TABLE commentchainlines ( chain INTEGER NOT NULL REFERENCES commentchains ON DELETE CASCADE, uid INTEGER REFERENCES users, time TIMESTAMP NOT NULL DEFAULT NOW(), state commentchainlinesstate NOT NULL DEFAULT 'draft', sha1 CHAR(40) NOT NULL, first_line INTEGER NOT NULL, last_line INTEGER NOT NULL, -- This UNIQUE constraint is a bit fishy; it means two different users -- can't have a draft "reopening" of the commentchain at the same time, -- which strictly speaking wouldn't necessarily be a problem. UNIQUE (chain, sha1) ); CREATE INDEX commentchainlines_chain_sha1 ON commentchainlines(chain, sha1); CREATE TABLE commentchainusers ( chain INTEGER NOT NULL REFERENCES commentchains ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, PRIMARY KEY (chain, uid) ); CREATE TYPE commentstate AS ENUM ( 'draft', -- The comment is a draft. 'current', -- The comment is currently displayed. 'edited', -- The comment was edited (that is, replaced by another -- comment whose 'edit_of' field references this.) 'deleted' -- The comment was deleted and is not displayed. ); CREATE TABLE comments ( id SERIAL PRIMARY KEY, chain INTEGER NOT NULL REFERENCES commentchains ON DELETE CASCADE, batch INTEGER REFERENCES batches ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, time TIMESTAMP NOT NULL DEFAULT NOW(), state commentstate NOT NULL, comment TEXT, code TEXT ); CREATE INDEX comments_chain_uid_state ON comments (chain, uid, state); CREATE INDEX comments_batch ON comments(batch); CREATE INDEX comments_id_chain ON comments(id, chain); -- FIXME: This is an unfortunate circular relation. It's here to optimize -- accessing a group of comment chains and their first comment (i.e. accessing -- comments but not their replies.) This matters (supposedly) when loading -- review front-pages, but it's questionable whether this is really necessary. ALTER TABLE commentchains ADD CONSTRAINT commentchains_first_comment_fkey FOREIGN KEY (first_comment) REFERENCES comments; CREATE TABLE commentstoread ( uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, comment INTEGER NOT NULL REFERENCES comments ON DELETE CASCADE, PRIMARY KEY (uid, comment) ); CREATE INDEX commentstoread_comment ON commentstoread(comment); CREATE TABLE commentmessageids ( uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, comment INTEGER NOT NULL REFERENCES comments ON DELETE CASCADE, messageid CHAR(24) NOT NULL, PRIMARY KEY (uid, comment) ); CREATE INDEX commentmessageids_comment ON commentmessageids(comment); ================================================ FILE: installation/data/dbschema.extensions.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2012 Jens Lindström, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE extensions ( id SERIAL PRIMARY KEY, author INTEGER REFERENCES users, -- NULL means system extension name VARCHAR(64) NOT NULL, UNIQUE (author, name) ); CREATE TABLE extensionversions ( id SERIAL PRIMARY KEY, extension INTEGER NOT NULL REFERENCES extensions, name VARCHAR(256) NOT NULL, sha1 CHAR(40) NOT NULL, UNIQUE (sha1) ); -- Installed extensions. -- If uid=NULL, it is a "universal install" (affecting all users.) -- If version=NULL, the "LIVE" version is installed. CREATE TABLE extensioninstalls ( id SERIAL PRIMARY KEY, uid INTEGER REFERENCES users, extension INTEGER NOT NULL REFERENCES extensions, version INTEGER REFERENCES extensionversions, UNIQUE (uid, extension) ); CREATE TABLE extensionroles ( id SERIAL PRIMARY KEY, version INTEGER NOT NULL REFERENCES extensionversions, script VARCHAR(64) NOT NULL, function VARCHAR(64) NOT NULL ); CREATE TABLE extensionpageroles ( role INTEGER NOT NULL REFERENCES extensionroles ON DELETE CASCADE, path VARCHAR(64) NOT NULL ); CREATE VIEW extensionroles_page AS SELECT version, path, script, function FROM extensionroles JOIN extensionpageroles ON (role=id); CREATE TABLE extensioninjectroles ( role INTEGER NOT NULL REFERENCES extensionroles ON DELETE CASCADE, path VARCHAR(64) NOT NULL ); CREATE VIEW extensionroles_inject AS SELECT version, path, script, function FROM extensionroles JOIN extensioninjectroles ON (role=id); CREATE TABLE extensionprocesscommitsroles ( role INTEGER NOT NULL REFERENCES extensionroles ON DELETE CASCADE ); CREATE TABLE extensionfilterhookroles ( role INTEGER NOT NULL REFERENCES extensionroles ON DELETE CASCADE, name VARCHAR(64) NOT NULL, title VARCHAR(64) NOT NULL, role_description TEXT, data_description TEXT ); CREATE TABLE extensionhookfilters ( id SERIAL PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, extension INTEGER NOT NULL REFERENCES extensions ON DELETE CASCADE, repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, name VARCHAR(64) NOT NULL, path TEXT NOT NULL, data TEXT ); CREATE INDEX extensionhookfilters_uid_extension ON extensionhookfilters (uid, extension); CREATE INDEX extensionhookfilters_repository ON extensionhookfilters (repository); CREATE TABLE extensionfilterhookevents ( id SERIAL PRIMARY KEY, filter INTEGER NOT NULL REFERENCES extensionhookfilters ON DELETE CASCADE, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, data TEXT ); CREATE TABLE extensionfilterhookcommits ( event INTEGER NOT NULL REFERENCES extensionfilterhookevents ON DELETE CASCADE, commit INTEGER NOT NULL REFERENCES commits ); CREATE INDEX extensionfilterhookcommits_event ON extensionfilterhookcommits (event); CREATE TABLE extensionfilterhookfiles ( event INTEGER NOT NULL REFERENCES extensionfilterhookevents ON DELETE CASCADE, file INTEGER NOT NULL REFERENCES files ); CREATE INDEX extensionfilterhookfiles_event ON extensionfilterhookfiles (event); CREATE TABLE extensionstorage ( extension INTEGER NOT NULL REFERENCES extensions, uid INTEGER NOT NULL REFERENCES users, key VARCHAR(64) NOT NULL, text TEXT NOT NULL, PRIMARY KEY (extension, uid, key) ); CREATE TABLE extensionlog ( extension INTEGER NOT NULL REFERENCES extensions, uid INTEGER NOT NULL REFERENCES users, category VARCHAR(64) NOT NULL DEFAULT 'default', time TIMESTAMP NOT NULL DEFAULT NOW(), text TEXT NOT NULL ); CREATE INDEX extensionlog_extension_uid_category ON extensionlog(extension, uid, category); CREATE TYPE extensionaccesstype AS ENUM ( 'install', 'execute' ); CREATE TABLE accesscontrol_extensions ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- Type of extension access. NULL means "any type". access_type extensionaccesstype, -- Extension key: / for user extensions and -- for system extensions. NULL means "any extension". extension_key TEXT ); CREATE INDEX accesscontrol_extensions_profile ON accesscontrol_extensions (profile); ================================================ FILE: installation/data/dbschema.filters.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TYPE filtertype AS ENUM ( 'reviewer', 'watcher', 'ignored' ); CREATE TABLE filters ( id SERIAL PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users, repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, path TEXT NOT NULL, type filtertype NOT NULL, delegate TEXT ); -- Index used to enforce uniqueness. CREATE UNIQUE INDEX filters_uid_repository_path_md5 ON filters (uid, repository, MD5(path)); ================================================ FILE: installation/data/dbschema.git.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE repositories ( id SERIAL PRIMARY KEY, parent INTEGER REFERENCES repositories, name VARCHAR(64) NOT NULL UNIQUE, path VARCHAR(256) NOT NULL UNIQUE ); CREATE TABLE gitusers ( id SERIAL PRIMARY KEY, email VARCHAR(256) NOT NULL, fullname VARCHAR(256) NOT NULL, UNIQUE (email, fullname) ); CREATE TABLE commits ( id SERIAL PRIMARY KEY, sha1 CHAR(40) NOT NULL UNIQUE, author_gituser INTEGER NOT NULL REFERENCES gitusers, commit_gituser INTEGER NOT NULL REFERENCES gitusers, author_time TIMESTAMP NOT NULL, commit_time TIMESTAMP NOT NULL ); CREATE TABLE edges ( parent INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE, child INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE ); CREATE INDEX edges_parent ON edges (parent); CREATE INDEX edges_child ON edges (child); CREATE TYPE branchtype AS ENUM ( 'normal', 'review' ); CREATE TABLE branches ( id SERIAL PRIMARY KEY, name VARCHAR(256) NOT NULL, repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, head INTEGER NOT NULL REFERENCES commits, base INTEGER REFERENCES branches, tail INTEGER REFERENCES commits, type branchtype NOT NULL DEFAULT 'normal', archived BOOLEAN NOT NULL DEFAULT FALSE, UNIQUE (repository, name) ); CREATE TABLE reachable ( branch INTEGER NOT NULL REFERENCES branches ON DELETE CASCADE, commit INTEGER NOT NULL REFERENCES commits, PRIMARY KEY (branch, commit) ); CREATE INDEX reachable_branch ON reachable (branch); CREATE INDEX reachable_commit ON reachable (commit); CREATE TABLE tags ( id SERIAL PRIMARY KEY, name VARCHAR(256) NOT NULL, repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, sha1 CHAR(40) NOT NULL, UNIQUE (repository, name) ); CREATE INDEX tags_repository_sha1 ON tags (repository, sha1); -- Cached result of 'git merge-base ' for commits. CREATE TABLE mergebases ( commit INTEGER PRIMARY KEY REFERENCES commits ON DELETE CASCADE, mergebase CHAR(40) ); -- Cached per-file-and-parent "relevant" commits, for a merge commit. -- -- Each row says that for the merge commit |commit|'s |parent|th parent and the -- file |file|, |relevant| is a commit between the merge-base and the merge that -- also modifies the file, and that isn't an ancestor of that parent. CREATE TABLE relevantcommits ( commit INTEGER REFERENCES commits ON DELETE CASCADE, parent SMALLINT NOT NULL, file INTEGER REFERENCES files, relevant INTEGER REFERENCES commits ON DELETE CASCADE, PRIMARY KEY (commit, parent, file, relevant) ); CREATE TYPE repositoryaccesstype AS ENUM ( 'read', 'modify' ); CREATE TABLE accesscontrol_repositories ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- Type of access. NULL means "any type". access_type repositoryaccesstype, -- Repository to access. NULL means "any repository". repository INTEGER REFERENCES repositories ON DELETE CASCADE ); CREATE INDEX accesscontrol_repositories_profile ON accesscontrol_repositories (profile); ================================================ FILE: installation/data/dbschema.news.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE newsitems ( id SERIAL PRIMARY KEY, date DATE DEFAULT NOW(), text TEXT NOT NULL ); CREATE TABLE newsread ( item INTEGER NOT NULL REFERENCES newsitems ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE ); ================================================ FILE: installation/data/dbschema.preferences.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TYPE preferencetype AS ENUM ( 'boolean', 'integer', 'string' ); CREATE TABLE preferences ( item VARCHAR(64) PRIMARY KEY, type preferencetype NOT NULL, description TEXT NOT NULL, -- If TRUE, this preference is relevant to configure per system (IOW -- globally), per user, per repository and/or per filter. This controls -- whether the preference is displayed on the corresponding /config page -- variant. per_system BOOLEAN NOT NULL DEFAULT TRUE, per_user BOOLEAN NOT NULL DEFAULT TRUE, per_repository BOOLEAN NOT NULL DEFAULT FALSE, per_filter BOOLEAN NOT NULL DEFAULT FALSE ); CREATE TABLE userpreferences ( item VARCHAR(64) NOT NULL REFERENCES preferences, uid INTEGER REFERENCES users ON DELETE CASCADE, repository INTEGER REFERENCES repositories ON DELETE CASCADE, filter INTEGER REFERENCES filters ON DELETE CASCADE, integer INTEGER, string TEXT, -- Invariant: If 'filter' is not NULL, then 'uid' must not be NULL. CONSTRAINT check_uid_filter CHECK (filter IS NULL OR uid IS NOT NULL), -- Invariant: At least one of 'repository' and 'filter' must be NULL. CONSTRAINT check_repository_filter CHECK (repository IS NULL OR filter IS NULL) ); -- These indexes are primarily used to enforce uniqueness. The three columns -- 'uid', 'repository' and 'filter' can all be NULL (in various configurations) -- and from a uniqueness point of view, we want those NULL to behave as if they -- compared equal. CREATE UNIQUE INDEX userpreferences_item ON userpreferences (item) WHERE uid IS NULL AND repository IS NULL AND filter IS NULL; CREATE UNIQUE INDEX userpreferences_item_uid ON userpreferences (item, uid) WHERE uid IS NOT NULL AND repository IS NULL AND filter IS NULL; CREATE UNIQUE INDEX userpreferences_item_repository ON userpreferences (item, repository) WHERE uid IS NULL AND repository IS NOT NULL AND filter IS NULL; CREATE UNIQUE INDEX userpreferences_item_uid_repository ON userpreferences (item, uid, repository) WHERE uid IS NOT NULL AND repository IS NOT NULL AND filter IS NULL; CREATE UNIQUE INDEX userpreferences_item_uid_filter ON userpreferences (item, uid, filter) WHERE uid IS NOT NULL AND repository IS NULL AND filter IS NOT NULL; ================================================ FILE: installation/data/dbschema.reviews.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TYPE reviewtype AS ENUM ( 'official', 'rfc', 'ad-hoc' ); CREATE TYPE reviewstate AS ENUM ( 'draft', 'open', 'closed', 'dropped' ); CREATE TABLE reviews ( id SERIAL PRIMARY KEY, type reviewtype NOT NULL, -- The review branch. branch INTEGER NOT NULL REFERENCES branches, -- The (non-review) branch from which this review was created, if any. origin INTEGER REFERENCES branches ON DELETE SET NULL, state reviewstate NOT NULL, serial INTEGER NOT NULL DEFAULT 0, closed_by INTEGER REFERENCES users, dropped_by INTEGER REFERENCES users, applyfilters BOOLEAN NOT NULL, applyparentfilters BOOLEAN NOT NULL, summary TEXT, description TEXT ); CREATE INDEX reviews_branch ON reviews (branch); CREATE TABLE scheduledreviewbrancharchivals ( review INTEGER PRIMARY KEY REFERENCES reviews (id), deadline TIMESTAMP NOT NULL ); CREATE TABLE reviewfilters ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, path TEXT NOT NULL, type filtertype NOT NULL, creator INTEGER NOT NULL REFERENCES users ON DELETE CASCADE ); -- Index used to enforce uniqueness. CREATE UNIQUE INDEX reviewfilters_review_uid_path_md5 ON reviewfilters (review, uid, MD5(path)); CREATE TABLE batches ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, comment INTEGER, -- REFERENCES commentchains, time TIMESTAMP NOT NULL DEFAULT NOW() ); CREATE INDEX batches_review_uid ON batches (review, uid); CREATE TYPE reviewusertype AS ENUM ( 'automatic', 'manual' ); CREATE TABLE reviewusers ( review INTEGER NOT NULL, uid INTEGER NOT NULL, owner BOOLEAN NOT NULL DEFAULT FALSE, type reviewusertype NOT NULL DEFAULT 'automatic', PRIMARY KEY (review, uid), FOREIGN KEY (review) REFERENCES reviews(id) ON DELETE CASCADE, FOREIGN KEY (uid) REFERENCES users(id) ); CREATE INDEX reviewusers_uid ON reviewusers (uid); CREATE TABLE reviewchangesets ( review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, changeset INTEGER NOT NULL REFERENCES changesets, PRIMARY KEY (review, changeset) ); CREATE TABLE reviewrebases ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, old_head INTEGER NOT NULL REFERENCES commits, new_head INTEGER REFERENCES commits, old_upstream INTEGER REFERENCES commits, new_upstream INTEGER REFERENCES commits, equivalent_merge INTEGER REFERENCES commits, replayed_rebase INTEGER REFERENCES commits, uid INTEGER NOT NULL REFERENCES users, branch VARCHAR(256), UNIQUE (review, old_head) ); CREATE TABLE previousreachable ( rebase INTEGER NOT NULL REFERENCES reviewrebases ON DELETE CASCADE, commit INTEGER NOT NULL REFERENCES commits ); CREATE INDEX previousreachable_rebase ON previousreachable (rebase); CREATE TYPE reviewfilestate AS ENUM ( 'pending', -- No one has said anything. 'reviewed' -- The file has been reviewed. ); CREATE TABLE reviewfiles ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, changeset INTEGER NOT NULL REFERENCES changesets ON DELETE CASCADE, file INTEGER NOT NULL REFERENCES files ON DELETE CASCADE, deleted INTEGER NOT NULL, inserted INTEGER NOT NULL, state reviewfilestate NOT NULL DEFAULT 'pending', reviewer INTEGER REFERENCES users ON DELETE SET NULL, time TIMESTAMP, FOREIGN KEY (review, changeset) REFERENCES reviewchangesets ON DELETE CASCADE, FOREIGN KEY (changeset, file) REFERENCES fileversions ON DELETE CASCADE ); CREATE INDEX reviewfiles_review_changeset ON reviewfiles (review, changeset); CREATE INDEX reviewfiles_review_state ON reviewfiles (review, state); CREATE TABLE reviewassignmentstransactions ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, assigner INTEGER NOT NULL REFERENCES users, note TEXT, time TIMESTAMP DEFAULT NOW() ); CREATE TABLE reviewassignmentchanges ( transaction INTEGER NOT NULL REFERENCES reviewassignmentstransactions, file INTEGER NOT NULL REFERENCES reviewfiles ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, assigned BOOLEAN NOT NULL, PRIMARY KEY (transaction, file, uid) ); CREATE TABLE reviewfilterchanges ( transaction INTEGER NOT NULL REFERENCES reviewassignmentstransactions ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, path TEXT NOT NULL, type filtertype NOT NULL, created BOOLEAN NOT NULL ); CREATE TABLE reviewuserfiles ( file INTEGER NOT NULL REFERENCES reviewfiles ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, time TIMESTAMP DEFAULT NOW(), PRIMARY KEY (file, uid) ); CREATE INDEX reviewuserfiles_uid ON reviewuserfiles (uid); CREATE VIEW reviewfilesharing AS SELECT reviewfiles.review AS review, reviewfiles.id AS file, COUNT(reviewuserfiles.uid) AS reviewers FROM reviewfiles JOIN reviewuserfiles ON (reviewfiles.id=reviewuserfiles.file) JOIN users ON (users.id=reviewuserfiles.uid) WHERE users.status='current' GROUP BY reviewfiles.review, reviewfiles.id; CREATE TYPE reviewfilechangestate AS ENUM ( 'draft', -- This change hasn't been performed yet. 'performed', -- The change has been performed. 'rejected' -- The change was rejected; affected file wasn't in expected -- state (concurrent update.) ); CREATE TABLE reviewfilechanges ( batch INTEGER REFERENCES batches, file INTEGER NOT NULL REFERENCES reviewfiles, uid INTEGER NOT NULL REFERENCES users, time TIMESTAMP NOT NULL DEFAULT NOW(), state reviewfilechangestate NOT NULL DEFAULT 'draft', from_state reviewfilestate NOT NULL, to_state reviewfilestate NOT NULL, FOREIGN KEY (file, uid) REFERENCES reviewuserfiles ON DELETE CASCADE ); CREATE INDEX reviewfilechanges_batch ON reviewfilechanges (batch); CREATE INDEX reviewfilechanges_file ON reviewfilechanges (file); CREATE INDEX reviewfilechanges_uid_state ON reviewfilechanges (uid, state); CREATE INDEX reviewfilechanges_time ON reviewfilechanges (time); CREATE TABLE lockedreviews ( review INTEGER PRIMARY KEY REFERENCES reviews ); CREATE VIEW fullreviewuserfiles AS SELECT reviewfiles.review as review, reviewfiles.changeset as changeset, reviewfiles.file as file, reviewfiles.deleted as deleted, reviewfiles.inserted as inserted, reviewfiles.state as state, reviewfiles.reviewer as reviewer, reviewuserfiles.uid as assignee FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id); CREATE TABLE reviewmessageids ( uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, messageid CHAR(24) NOT NULL, PRIMARY KEY (uid, review) ); CREATE INDEX reviewmessageids_review ON reviewmessageids (review); CREATE TABLE reviewmergeconfirmations ( id SERIAL PRIMARY KEY, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, merge INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE, tail INTEGER REFERENCES commits ON DELETE CASCADE, confirmed BOOLEAN NOT NULL DEFAULT FALSE, UNIQUE (review, uid, merge) ); CREATE TABLE reviewmergecontributions ( id INTEGER NOT NULL REFERENCES reviewmergeconfirmations ON DELETE CASCADE, merged INTEGER NOT NULL REFERENCES commits ON DELETE CASCADE, PRIMARY KEY (id, merged) ); CREATE TABLE reviewrecipientfilters ( review INTEGER NOT NULL REFERENCES reviews, uid INTEGER REFERENCES users, include BOOLEAN NOT NULL, UNIQUE (review, uid) ); CREATE TABLE checkbranchnotes ( repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, branch VARCHAR(256) NOT NULL, upstream VARCHAR(256) NOT NULL, sha1 CHAR(40) NOT NULL, uid INTEGER NOT NULL REFERENCES users, review INTEGER REFERENCES reviews ON DELETE SET NULL, text TEXT, PRIMARY KEY (repository, branch, upstream, sha1) ); ================================================ FILE: installation/data/dbschema.trackedbranches.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE trackedbranches ( id SERIAL PRIMARY KEY, repository INTEGER NOT NULL REFERENCES repositories, local_name VARCHAR(256) NOT NULL, remote VARCHAR(256) NOT NULL, remote_name VARCHAR(256) NOT NULL, forced BOOLEAN NOT NULL, disabled BOOLEAN NOT NULL DEFAULT FALSE, updating BOOLEAN NOT NULL DEFAULT FALSE, delay INTERVAL NOT NULL, previous TIMESTAMP, next TIMESTAMP, UNIQUE (repository, local_name) ); CREATE TABLE trackedbranchusers ( branch INTEGER NOT NULL REFERENCES trackedbranches ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users, PRIMARY KEY (branch, uid) ); CREATE TABLE trackedbranchlog ( branch INTEGER NOT NULL REFERENCES trackedbranches ON DELETE CASCADE, time TIMESTAMP NOT NULL DEFAULT NOW(), from_sha1 CHAR(40), to_sha1 CHAR(40) NOT NULL, hook_output TEXT NOT NULL, successful BOOLEAN NOT NULL ); CREATE INDEX trackedbranchlog_branch ON trackedbranchlog (branch); ================================================ FILE: installation/data/dbschema.users.sql ================================================ -- -*- mode: sql -*- -- -- Copyright 2015 the Critic contributors, Opera Software ASA -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may not -- use this file except in compliance with the License. You may obtain a copy of -- the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations under -- the License. -- Disable notices about implicitly created indexes and sequences. SET client_min_messages TO WARNING; CREATE TABLE roles ( name VARCHAR(64) PRIMARY KEY, description TEXT ); INSERT INTO roles (name, description) VALUES ('administrator', 'Almighty system administrator.'), ('repositories', 'Allowed to add and configure repositories.'), ('developer', 'System developer.'), ('newswriter', 'Allowed to add and edit news items.'); CREATE TYPE userstatus AS ENUM ( 'unknown', 'current', 'absent', 'retired' ); CREATE TABLE users ( id SERIAL PRIMARY KEY, name VARCHAR(64) NOT NULL UNIQUE, fullname VARCHAR(256), password VARCHAR(256), email INTEGER, -- Foreign key constraint "REFERENCES useremails" set up later. status userstatus NOT NULL DEFAULT 'unknown' ); CREATE TABLE useremails ( id SERIAL PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, email VARCHAR(256) NOT NULL, verified BOOLEAN, verification_token VARCHAR(256), UNIQUE (uid, email) ); -- FIXME: This circular relation is unnecessary. Should have a separate table -- for mapping a user's selected email, or just store it as a boolean in the -- useremails table instead. ALTER TABLE users ADD CONSTRAINT users_email_fkey FOREIGN KEY (email) REFERENCES useremails; CREATE TABLE usersessions ( key CHAR(28) PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users, labels VARCHAR(256), atime TIMESTAMP DEFAULT NOW() ); CREATE TABLE usergitemails ( email VARCHAR(256), uid INTEGER REFERENCES users ON DELETE CASCADE, PRIMARY KEY (email, uid) ); CREATE INDEX usergitemails_uid ON usergitemails (uid); CREATE TABLE userabsence ( uid INTEGER NOT NULL REFERENCES users, until DATE ); CREATE INDEX userabsence_uid_until ON userabsence (uid, until); CREATE TABLE userroles ( uid INTEGER NOT NULL REFERENCES users, role VARCHAR(64) NOT NULL REFERENCES roles ); CREATE TABLE userresources ( uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, name VARCHAR(32) NOT NULL, revision INTEGER NOT NULL DEFAULT 0, source TEXT NOT NULL, PRIMARY KEY (uid, name, revision) ); CREATE TABLE externalusers ( id SERIAL PRIMARY KEY, uid INTEGER REFERENCES users, provider VARCHAR(16) NOT NULL, account VARCHAR(256) NOT NULL, email VARCHAR(256), token VARCHAR(256), UNIQUE (provider, account) ); CREATE TABLE oauthstates ( state VARCHAR(64) PRIMARY KEY, url TEXT, time TIMESTAMP NOT NULL DEFAULT NOW() ); CREATE TYPE systemaccesstype AS ENUM ( -- The system is accessed as a named user. 'user', -- The system is accessed by a system service or similar. 'system', -- The system is accessed anonymously. 'anonymous' ); CREATE TABLE accesstokens ( id SERIAL PRIMARY KEY, -- The type of access granted by this access token. access_type systemaccesstype NOT NULL DEFAULT 'user', -- The user (when access_type='user') or NULL. uid INTEGER REFERENCES users ON DELETE CASCADE, -- First part of access token ("username"). part1 VARCHAR(32) NOT NULL, -- Second part of access token ("password"). part2 VARCHAR(32) NOT NULL, -- Access token title. title VARCHAR(256), UNIQUE (part1, part2), CONSTRAINT valid_user CHECK ((access_type='user' AND uid IS NOT NULL) OR (access_type!='user' AND uid IS NULL)) ); CREATE TYPE accesscontrolrule AS ENUM ( 'allow', 'deny' ); CREATE TABLE accesscontrolprofiles ( id SERIAL PRIMARY KEY, title TEXT, -- Access token that this profile belongs to. access_token INTEGER REFERENCES accesstokens ON DELETE CASCADE, http accesscontrolrule NOT NULL DEFAULT 'allow', repositories accesscontrolrule NOT NULL DEFAULT 'allow', extensions accesscontrolrule NOT NULL DEFAULT 'allow', UNIQUE (access_token) ); CREATE TYPE httprequestmethod AS ENUM ( 'GET', 'HEAD', 'OPTIONS', 'POST', 'PUT', 'DELETE' ); -- Exceptions for HTTP requests. CREATE TABLE accesscontrol_http ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- HTTP request method. NULL means "all methods". request_method httprequestmethod, -- Python regular expression that must match the entire path. NULL means -- "all paths". path_pattern TEXT ); CREATE INDEX accesscontrol_http_profile ON accesscontrol_http (profile); CREATE TABLE useraccesscontrolprofiles ( -- The type of access that is controlled. access_type systemaccesstype NOT NULL DEFAULT 'user', -- The user (when access_type='user') or NULL. If access_type='user' and -- this is NULL, then this is the default profile association. uid INTEGER REFERENCES users, -- Access control profile. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, CONSTRAINT valid_user CHECK (access_type='user' OR uid IS NULL) ); CREATE INDEX useraccesscontrolprofiles_uid ON useraccesscontrolprofiles (uid); CREATE TABLE labeledaccesscontrolprofiles ( -- Authentication labels from user authentication, typically indicating some -- type of group memberships. Labels should be sorted lexicographically and -- separated by pipe ('|') characters. labels VARCHAR(256) PRIMARY KEY, -- Access control profile. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE ); ================================================ FILE: installation/database.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import tempfile import shutil import os import time import errno import subprocess import installation user_created = False database_created = False language_created = False def psql_import(sql_file, as_user=None): if as_user is None: as_user = installation.system.username temp_file = tempfile.mkstemp()[1] shutil.copy(os.path.join(installation.root_dir, sql_file), temp_file) # Make sure file is readable by postgres user os.chmod(temp_file, 0644) subprocess.check_output( ["su", "-s", "/bin/sh", "-c", "psql -v ON_ERROR_STOP=1 -f %s" % temp_file, as_user]) os.unlink(temp_file) def add_arguments(mode, parser): if mode == "upgrade": parser.add_argument("--backup-database", dest="database_backup", action="store_const", const=True, help="backup database to default location without asking") parser.add_argument("--no-backup-database", dest="database_backup", action="store_const", const=False, help="do not backup database before upgrading") def prepare(mode, arguments, data): if mode == "upgrade": default_path = os.path.join(data["installation.paths.data_dir"], "backups", time.strftime("%Y%m%d_%H%M.dump", time.localtime())) if arguments.database_backup is False: backup_database = False elif arguments.database_backup is True: backup_database = True backup_path = default_path else: if installation.migrate.will_modify_dbschema(data): print """ The database schema will be modified by the upgrade. Creating a backup of the database first is strongly recommended. """ default_backup = True else: default_backup = False if installation.input.yes_or_no("Do you want to create a backup of the database?", default=default_backup): backup_database = True backup_path = installation.input.string("Where should the backup be stored?", default=default_path) else: backup_database = False if backup_database: try: os.makedirs(os.path.dirname(backup_path), 0750) except OSError as error: if error.errno == errno.EEXIST: pass else: raise print print "Dumping database ..." with open(backup_path, "w") as output_file: subprocess.check_call( ["su", "-s", "/bin/sh", "-c", "pg_dump -Fc critic", data["installation.system.username"]], stdout=output_file) data["installation.database.driver"] = "postgresql" data["installation.database.parameters"] = { "database": "critic", "user": data["installation.system.username"] } return True SCHEMA_FILES = [ # No dependencies. "installation/data/dbschema.base.sql", "installation/data/dbschema.users.sql", # Depends on: base[files]. "installation/data/dbschema.git.sql", # Depends on: users. "installation/data/dbschema.news.sql", # Depends on: git, users. "installation/data/dbschema.trackedbranches.sql", # Depends on: base[files], git. "installation/data/dbschema.changesets.sql", # Depends on: git, users. "installation/data/dbschema.filters.sql", # Depends on: git, users, filters. "installation/data/dbschema.preferences.sql", # Depends on: base[files], git, users, changesets. "installation/data/dbschema.reviews.sql", # Depends on: base[files], git, users, reviews. "installation/data/dbschema.comments.sql", # Depends on: base[files], git, users, reviews. "installation/data/dbschema.extensions.sql", ] PGSQL_FILES = ["installation/data/comments.pgsql"] def install(data): global user_created, database_created, language_created postgresql_version_output = subprocess.check_output( [installation.prereqs.psql.path, "--version"]) postgresql_version = postgresql_version_output.splitlines()[0].split()[-1] postgresql_version_components = postgresql_version.split(".") postgresql_major = postgresql_version_components[0] postgresql_minor = postgresql_version_components[1] if postgresql_major < 9 or (postgresql_major == 9 and postgresql_minor < 1): print print """\ Unsupported PostgreSQL version: %s ERROR: Critic requires PostgreSQL 9.1.x or later! """ % postgresql_version return False print "Creating database ..." # Several subsequent commands will run as Critic system user or "postgres" # user, and these users typically don't have read access to the installation # 'root_dir', so set cwd to something that Critic system / "postgres" users # has access to. with installation.utils.temporary_cwd(): subprocess.check_output(["su", "-c", "psql -v ON_ERROR_STOP=1 -c 'CREATE USER \"%s\";'" % installation.system.username, "postgres"]) user_created = True subprocess.check_output(["su", "-c", "psql -v ON_ERROR_STOP=1 -c 'CREATE DATABASE \"critic\";'", "postgres"]) database_created = True try: subprocess.check_output(["su", "-c", "createlang plpgsql critic", "postgres"], stderr=subprocess.STDOUT) language_created = True except subprocess.CalledProcessError: # The 'createlang' command fails if the language is already enabled # in the database, and we want to ignore such failures. It might # also fail for other reasons, that we really don't mean to ignore, # but in that case importing the *.pgsql files below would fail, # since they define PL/pgSQL functions. pass subprocess.check_output(["su", "-c", "psql -v ON_ERROR_STOP=1 -c 'GRANT ALL ON DATABASE \"critic\" TO \"%s\";'" % installation.system.username, "postgres"]) for schema_file in SCHEMA_FILES: psql_import(schema_file) for pgsql_file in PGSQL_FILES: psql_import(pgsql_file) import psycopg2 def adapt(value): return psycopg2.extensions.adapt(value).getquoted() if installation.config.access_scheme in ("http", "https"): anonymous_scheme = authenticated_scheme = installation.config.access_scheme else: anonymous_scheme = "http" authenticated_scheme = "https" add_systemidentity_query = ( """INSERT INTO systemidentities (key, name, anonymous_scheme, authenticated_scheme, hostname, description, installed_sha1) VALUES ('main', 'main', %s, %s, %s, 'Main', %s);""" % (adapt(anonymous_scheme), adapt(authenticated_scheme), adapt(installation.system.hostname), adapt(data["sha1"]))) installation.process.check_input( ["su", "-s", "/bin/sh", "-c", "psql -q -v ON_ERROR_STOP=1 -f -", installation.system.username], stdin=add_systemidentity_query) return True def upgrade(arguments, data): git = data["installation.prereqs.git"] old_sha1 = data["sha1"] new_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() for pgsql_file in PGSQL_FILES: old_file_sha1 = installation.utils.get_file_sha1( git, old_sha1, pgsql_file) new_file_sha1 = installation.utils.get_file_sha1( git, new_sha1, pgsql_file) if old_file_sha1 == new_file_sha1: continue with installation.utils.temporary_cwd(): # We assume that these files use CREATE OR REPLACE syntax, so that # we can simply re-import them when they change, and they'll update. # If they need more than that to update (for instance if a function # is removed) we'll need to use a migration script for that. print "Reloading: %s" % pgsql_file if not arguments.dry_run: psql_import(pgsql_file) return True def undo(): if language_created: subprocess.check_output(["su", "-c", "droplang plpgsql critic", "postgres"]) if database_created: subprocess.check_output(["su", "-c", "psql -v ON_ERROR_STOP=1 -c 'DROP DATABASE \"critic\";'", "postgres"]) if user_created: subprocess.check_output(["su", "-c", "psql -v ON_ERROR_STOP=1 -c 'DROP USER \"%s\";'" % installation.system.username, "postgres"]) ================================================ FILE: installation/extensions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. def prepare(mode, arguments, data): data["installation.extensions.enabled"] = False data["installation.extensions.critic_v8_jsshell"] = "NOT_INSTALLED" data["installation.extensions.default_flavor"] = "js/v8" if mode == "upgrade": import configuration data["installation.extensions.enabled"] = \ configuration.extensions.ENABLED try: data["installation.extensions.critic_v8_jsshell"] = \ configuration.extensions.FLAVORS["js/v8"]["executable"] except (KeyError, AttributeError): pass try: data["installation.extensions.default_flavor"] = \ configuration.extensions.DEFAULT_FLAVOR except AttributeError: pass return True ================================================ FILE: installation/externals/.gitignore ================================================ depot_tools/ ================================================ FILE: installation/externals/MIT-LICENSE.Chosen.md ================================================ #### Chosen - by Patrick Filler for [Harvest](http://getharvest.com) - Copyright (c) 2011-2013 by Harvest Available for use under the [MIT License](http://en.wikipedia.org/wiki/MIT_License) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: installation/externals/MIT-LICENSE.jQuery.txt ================================================ Copyright 2012 jQuery Foundation and other contributors http://jquery.com/ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: installation/files.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import shutil import errno import py_compile import installation created_dir = [] created_file = [] renamed = [] copied_files = 0 modified_files = 0 sources_modified = False resources_modified = False def compile_file(filename): global created_file if not filename.endswith(".py"): return True try: path = os.path.join(installation.paths.install_dir, filename) with installation.utils.as_critic_system_user(): py_compile.compile(path, doraise=True) except py_compile.PyCompileError as error: print """ ERROR: Failed to compile %s:\n%s """ % (filename, error) return False else: created_file.append(path + "c") return True def copyfile(source, destination): if os.path.islink(source): if os.path.lexists(destination): os.unlink(destination) os.symlink(os.readlink(source), destination) else: shutil.copyfile(source, destination) def skip(path): filename = os.path.basename(path) if filename == "unittest.py" or filename.endswith("_unittest.py"): return not installation.config.is_testing return False def install(data): source_dir = os.path.join(installation.root_dir, "src") target_dir = installation.paths.install_dir # Note: this is an array since it's modified in a nested scope. compilation_failed = [] def copy(path): global copied_files source = os.path.join(source_dir, path) target = os.path.join(target_dir, path) if os.path.isdir(source): os.mkdir(target, 0755) os.chown(target, installation.system.uid, installation.system.gid) created_dir.append(target) return True else: copyfile(source, target) created_file.append(target) if not os.path.islink(target): if path.startswith("hooks/"): mode = 0755 else: mode = 0644 os.chmod(target, mode) os.lchown(target, installation.system.uid, installation.system.gid) copied_files += 1 if not compile_file(path): compilation_failed.append(path) return False def process(path=""): for entry in os.listdir(os.path.join(source_dir, path)): name = os.path.join(path, entry) if skip(name): continue if copy(name): process(name) process() sys.path.insert(0, installation.paths.install_dir) if compilation_failed: return False print "Copied %d files into %s ..." % (copied_files, target_dir) return True def upgrade(arguments, data): source_dir = installation.root_dir target_dir = data["installation.paths.install_dir"] # Note: this is an array since it's modified in a nested scope. compilation_failed = [] uid = installation.system.uid gid = installation.system.gid def chown(directory): os.chown(directory, uid, gid) for name in os.listdir(directory): path = os.path.join(directory, name) if os.path.isdir(path): chown(path) elif path.endswith(".pyc"): os.chown(path, uid, gid) elif path.endswith(".pyo"): os.unlink(path) chown(target_dir) git = data["installation.prereqs.git"] old_sha1 = data["sha1"] new_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() old_has_src = installation.utils.get_tree_sha1(git, old_sha1, "src") def isResource(path): return path.endswith(".css") or path.endswith(".js") or path.endswith(".txt") def remove(old_source_path, target_path): full_target_path = os.path.join(target_dir, target_path) backup_path = os.path.join(os.path.dirname(full_target_path), "_" + os.path.basename(target_path)) if not os.path.isfile(full_target_path): return old_file_sha1 = installation.utils.get_file_sha1( git, old_sha1, old_source_path) current_file_sha1 = installation.utils.hash_file( git, full_target_path) assert old_file_sha1 is not None if old_file_sha1 != current_file_sha1: def generateVersion(label, path): if label == "installed": source = installation.utils.run_git( [git, "cat-file", "blob", old_file_sha1], cwd=installation.root_dir) with open(path, "w") as target: target.write(source) update_query = installation.utils.UpdateModifiedFile( arguments, message="""\ A source file is about to be removed, but the existing source file appears to have been edited since it was installed. Installed version: %(installed)s Current version : %(current)s Not removing the file can cause unpredictable results. """, versions={ "installed": full_target_path + ".org", "current": full_target_path }, options=[ ("r", "remove the file"), ("k", "keep the file"), ("d", ("installed", "current")) ], generateVersion=generateVersion) if update_query.prompt() == "r": remove_file = True else: remove_file = False else: remove_file = True if remove_file: print "Removing file: %s" % target_path if not arguments.dry_run: os.rename(full_target_path, backup_path) renamed.append((full_target_path, backup_path)) if target_path.endswith(".py"): if os.path.isfile(full_target_path + "c"): os.unlink(full_target_path + "c") if os.path.isfile(full_target_path + "o"): os.unlink(full_target_path + "o") def copy(old_source_path, new_source_path, target_path): global copied_files, modified_files global resources_modified, sources_modified full_source_path = os.path.join(source_dir, new_source_path) full_target_path = os.path.join(target_dir, target_path) backup_path = os.path.join(os.path.dirname(full_target_path), "_" + os.path.basename(target_path)) if skip(new_source_path) or not os.path.isfile(full_source_path): remove(old_source_path, target_path) return if os.path.isfile(full_source_path) and os.path.isdir(full_target_path): print """ The directory %s is about to be deleted because a file is about to be installed in its place. Please make sure it doesn't contain anything that shouldn't be deleted. """ % full_target_path if not installation.input.yes_or_no("Do you want to delete the directory?", default=False): return False print "Removing directory: %s" % target_path if not arguments.dry_run: os.rename(full_target_path, backup_path) renamed.append((full_target_path, backup_path)) if not os.path.isfile(full_target_path): print "New file: %s" % target_path if not arguments.dry_run: try: os.makedirs(os.path.dirname(full_target_path), 0755) except OSError as error: if error.errno == errno.EEXIST: pass else: raise copyfile(full_source_path, full_target_path) created_file.append(full_target_path) copied_files += 1 if isResource(target_path): resources_modified = True else: sources_modified = True else: old_file_sha1 = installation.utils.get_file_sha1( git, old_sha1, old_source_path) new_file_sha1 = installation.utils.get_file_sha1( git, new_sha1, new_source_path) assert old_file_sha1 is not None assert new_file_sha1 is not None current_file_sha1 = installation.utils.hash_file( git, full_target_path) if current_file_sha1 != new_file_sha1: if current_file_sha1 != old_file_sha1: def generateVersion(label, path): if label == "installed": source = installation.utils.run_git( [git, "cat-file", "blob", old_file_sha1], cwd=installation.root_dir) with open(full_target_path + ".org", "w") as target: target.write(source) elif label == "updated": copyfile(full_source_path, full_target_path + ".new") update_query = installation.utils.UpdateModifiedFile( arguments, message="""\ A source file is about to be updated, but the existing source file appears to have been edited since it was installed. Installed version: %(installed)s Current version : %(current)s Updated version : %(updated)s Not installing the updated version can cause unpredictable results. """, versions={ "installed": full_target_path + ".org", "current": full_target_path, "updated": full_target_path + ".new" }, options=[ ("i", "install the updated version"), ("k", "keep the current version"), ("do", ("installed", "current")), ("dn", ("current", "updated")) ], generateVersion=generateVersion) install_file = update_query.prompt() == "i" else: install_file = True if install_file: print "Updated file: %s" % target_path if not arguments.dry_run: os.rename(full_target_path, backup_path) renamed.append((full_target_path, backup_path)) copyfile(full_source_path, full_target_path) created_file.append(full_target_path) if not compile_file(target_path): compilation_failed.append(target_path) modified_files += 1 if isResource(target_path): resources_modified = True else: sources_modified = True if target_path.startswith("hooks/"): mode = 0755 else: mode = 0644 if not arguments.dry_run: if not os.path.islink(full_target_path): os.chmod(full_target_path, mode) os.lchown(full_target_path, installation.system.uid, installation.system.gid) differences = installation.utils.run_git( [git, "diff", "--numstat", "%s..%s" % (old_sha1, new_sha1)], cwd=installation.root_dir) changed_paths = set() for line in differences.splitlines(): _, _, path = map(str.strip, line.split(None, 2)) if path.startswith("src/"): changed_paths.add(path[len("src/"):]) elif not old_has_src: if os.path.isfile(os.path.join(target_dir, path)): changed_paths.add(path) for path in sorted(changed_paths): if old_has_src: old_source_path = os.path.join("src", path) else: old_source_path = path if copy(old_source_path=old_source_path, new_source_path=os.path.join("src", path), target_path=path) is False: return False if compilation_failed: return False if copied_files == 0 and modified_files == 0: print "No new or modified source files." return True def undo(): map(os.unlink, reversed(created_file)) map(os.rmdir, reversed(created_dir)) for target, backup in renamed: os.rename(backup, target) def finish(mode, arguments, data): for target, backup in renamed: if os.path.isdir(backup): shutil.rmtree(backup) else: os.unlink(backup) ================================================ FILE: installation/git.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import subprocess import installation def install(data): socket_path = os.path.join(installation.paths.run_dir, "main", "sockets", "githook.unix") subprocess.check_call([installation.prereqs.git.path, "config", "--system", "critic.socket", socket_path]) return True ================================================ FILE: installation/httpd.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import re import subprocess import time import installation arguments = None instance = None created_file = [] renamed = [] def backup_path(path): return os.path.join(os.path.dirname(path), "_" + os.path.basename(path)) def undoable_remove(path): os.rename(path, backup_path(path)) renamed.append((path, backup_path(path))) def process_configuration_file( mode, data, template_path, target_path, message=None): global created_file, renamed with open(template_path, "r") as template_file: template = template_file.read().decode("utf-8") source = template % data if mode == "install": write_target = True else: with open(target_path, "r") as target_file: target = target_file.read().decode("utf-8") if source != target: def generateVersion(label, path): if label == "updated": with open(path, "w") as target: target.write(source.encode("utf-8")) update_query = installation.utils.UpdateModifiedFile( arguments, message=message, versions={ "current": target_path, "updated": target_path + ".new" }, options=[ ("i", "install the updated version"), ("k", "keep the current version"), ("d", ("current", "updated")) ], generateVersion=generateVersion) write_target = update_query.prompt() == "i" else: write_target = False if write_target: if not getattr(arguments, "dry_run", False): undoable_remove(target_path) print "Updated file: %s" % target_path if write_target and not getattr(arguments, "dry_run", False): with open(target_path, "w") as target_file: created_file.append(target_path) os.chmod(target_path, 0640) target_file.write(source.encode("utf-8")) class Service(object): def __init__(self): self.stopped = False def service_command(self, command, errors_are_fatal): print try: subprocess.check_call(["service", self.service_name, command]) except subprocess.CalledProcessError: print "WARNING: The %s service failed to %s." % (self.display_name, command) if errors_are_fatal: print """ You can now either abort this Critic installation/upgrade, or you can go ahead anyway, fix the configuration problem manually (now or later), and then make sure the %(name)s service is running yourself using the command service %(name)s (start|restart) Note that if you don't abort, the Critic system will most likely not be accessible until the configuration problem has been fixed. """ % { "name": self.service_name } return not installation.input.yes_or_no( "Do you want to abort this Critic installation/upgrade?") return True def start(self, errors_are_fatal=True): print if not self.service_command("start", errors_are_fatal): return False self.stopped = False return True def stop(self, errors_are_fatal=False): print if not self.service_command("stop", errors_are_fatal): return False self.stopped = True return True def restart(self): print if not self.stop(): return False return self.start() def prepare(self, mode, arguments, data): return True def install(self, data): return True def upgrade(self, arguments, data): return True def undo(self): if self.stopped: self.start() class Apache(Service): display_name = "Apache" service_name = "apache2" etc_dir = "/etc/apache2" template_dir = "installation/templates/apache" def __init__(self): self.template_path = os.path.join( installation.root_dir, self.template_dir, "site.%s" % installation.config.access_scheme) self.site_enabled = False self.default_site_disabled = False def get_version(self): output = subprocess.check_output([installation.prereqs.apache2ctl.path, "-v"]) match = re.search("Server version:\s*Apache/([^\s\n]*)", output, re.M) if not match: return None return match.group(1) def prepare(self, mode, arguments, data): if installation.config.auth_mode == "critic": pass_auth = "On" else: pass_auth = "Off" data["installation.apache.pass_auth"] = pass_auth return True def restart(self): if not self.stop(): return False time.sleep(1) return self.start() def setup(self): version = self.get_version() if version and version.startswith("2.2."): self.site_suffix = "" self.default_site = "default" else: self.site_suffix = ".conf" self.default_site = "000-default" self.target_path = os.path.join( self.etc_dir, "sites-available", "critic-main%s" % self.site_suffix) def install(self, data): self.setup() process_configuration_file( "install", data, self.template_path, self.target_path) subprocess.check_call([installation.prereqs.a2enmod.path, "expires"]) subprocess.check_call([installation.prereqs.a2enmod.path, "rewrite"]) subprocess.check_call([installation.prereqs.a2enmod.path, "wsgi"]) subprocess.check_call([installation.prereqs.a2ensite.path, "critic-main"]) self.site_enabled = True output = subprocess.check_output( [installation.prereqs.a2dissite.path, self.default_site], env={ "LANG": "C" }) if ("Site %s disabled." % self.default_site) in output: self.default_site_disabled = True return self.restart() def upgrade(self, arguments, data): self.setup() # If the configuration file doesn't exist, we're probably migrating the # system from one web server to another, so run the whole installation # procedure instead. if not os.path.isfile(self.target_path): return install(data) process_configuration_file( "upgrade", data, self.template_path, self.target_path, """\ The Apache site definition is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if the modifications are not installed, the system is likely to break. """) return True def undo(self): if self.site_enabled: subprocess.check_call( [installation.prereqs.a2dissite.path, "critic-main"]) if self.default_site_disabled: subprocess.check_call( [installation.prereqs.a2ensite.path, self.default_site]) self.restart() class nginx(Service): display_name = service_name = "nginx" etc_dir = "/etc/nginx" template_dir = "installation/templates/nginx" def __init__(self): self.site_enabled = False self.default_site_disabled = False self.template_path = os.path.join( installation.root_dir, self.template_dir, "site.%s" % installation.config.access_scheme) self.target_path = os.path.join( self.etc_dir, "sites-available/critic-main") self.enabled_path = os.path.join( self.etc_dir, "sites-enabled/critic-main") self.default_site_path = os.path.join( self.etc_dir, "sites-enabled/default") def install(self, data): process_configuration_file( "install", data, self.template_path, self.target_path) os.symlink(self.target_path, self.enabled_path) self.site_enabled = True if os.path.islink(self.default_site_path): os.unlink(self.default_site_path) self.default_site_disabled = True return self.restart() def upgrade(self, arguments, data): # If the configuration file doesn't exist, we're probably migrating the # system from one web server to another, so run the whole installation # procedure instead. if not os.path.isfile(self.target_path): return install(data) process_configuration_file( "upgrade", data, self.template_path, self.target_path, """\ The nginx site definition is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if the modifications are not installed, the system is likely to break. """) return True def undo(self): if self.site_enabled: os.unlink(self.enabled_path) if self.default_site_disabled: os.symlink( os.path.join(self.etc_dir, "sites-available/default"), self.default_site_path) self.restart() class uWSGIBackend(Service): display_name = "uWSGI" service_name = "uwsgi" etc_dir = "/etc/uwsgi" template_dir = "installation/templates/uwsgi" def __init__(self): self.app_enabled = False self.template_path = os.path.join( installation.root_dir, self.template_dir, "app.backend.ini") self.target_path = os.path.join( self.etc_dir, "apps-available/critic-backend-main.ini") self.enabled_path = os.path.join( self.etc_dir, "apps-enabled/critic-backend-main.ini") def install(self, data): process_configuration_file( "install", data, self.template_path, self.target_path) os.symlink(self.target_path, self.enabled_path) self.app_enabled = True return self.restart() def upgrade(self, arguments, data): # If the configuration file doesn't exist, we're probably migrating the # system from one web server to another, so run the whole installation # procedure instead. if not os.path.isfile(self.target_path): return install(data) process_configuration_file( "upgrade", data, self.template_path, self.target_path, """\ The uWSGI back-end app definition is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if the modifications are not installed, the system is likely to break. """) return True def undo(self): if self.app_enabled: os.unlink(self.enabled_path) self.restart() class uWSGIFrontend(Service): display_name = "uWSGI" service_name = "uwsgi" etc_dir = "/etc/uwsgi" template_dir = "installation/templates/uwsgi" def __init__(self): self.app_enabled = False self.template_path = os.path.join( installation.root_dir, self.template_dir, "app.frontend.ini.%s" % installation.config.access_scheme) self.target_path = os.path.join( self.etc_dir, "apps-available/critic-frontend-main.ini") self.enabled_path = os.path.join( self.etc_dir, "apps-enabled/critic-frontend-main.ini") def install(self, data): process_configuration_file( "install", data, self.template_path, self.target_path) os.symlink(self.target_path, self.enabled_path) self.app_enabled = True return self.restart() def upgrade(self, arguments, data): # If the configuration file doesn't exist, we're probably migrating the # system from one web server to another, so run the whole installation # procedure instead. if not os.path.isfile(self.target_path): return install(data) process_configuration_file( "upgrade", data, self.template_path, self.target_path, """\ The uWSGI front-end app definition is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if the modifications are not installed, the system is likely to break. """) return True def undo(self): if self.app_enabled: os.unlink(self.enabled_path) self.restart() class Multiple(): def __init__(self, *services): self.services = services def prepare(self, *args): return all(service.prepare(*args) for service in self.services) def install(self, *args): return all(service.install(*args) for service in self.services) def upgrade(self, *args): return all(service.upgrade(*args) for service in self.services) def undo(self): for service in self.services: service.undo() def start(self): return all(service.start() for service in self.services) def stop(self): return all(service.stop() for service in self.services) def restart(self): return all(service.restart() for service in self.services) def prepare(mode, args, data): global arguments, instance arguments = args data["installation.httpd.username"] = "www-data" data["installation.httpd.groupname"] = "www-data" if installation.config.web_server_integration == "apache": instance = Apache() backend_service = Apache.service_name elif installation.config.web_server_integration == "uwsgi": instance = Multiple(uWSGIFrontend(), uWSGIBackend()) backend_service = uWSGIBackend.service_name elif installation.config.web_server_integration == "nginx+uwsgi": instance = Multiple(nginx(), uWSGIBackend()) backend_service = uWSGIBackend.service_name else: return True data["installation.httpd.backend_service"] = backend_service return instance.prepare(mode, arguments, data) def install(data): if instance: return instance.install(data) return True def upgrade(arguments, data): if instance: return instance.upgrade(arguments, data) return True def undo(): if instance: instance.undo() map(os.unlink, created_file) for target, backup in renamed: os.rename(backup, target) def finish(mode, arguments, data): for target, backup in renamed: os.unlink(backup) def start(): if instance: return instance.start() return True def stop(): if instance: return instance.stop() return True ================================================ FILE: installation/initd.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import os.path import pwd import grp import subprocess import installation created_file = [] renamed = [] rclinks_added = False servicemanager_started = False servicemanager_stopped = False def stop(identity="main"): global servicemanager_stopped servicemanager_stopped = True print try: subprocess.check_call(["service", "critic-%s" % identity, "stop"]) except subprocess.CalledProcessError: return False return True def start(identity="main"): print try: subprocess.check_call(["service", "critic-%s" % identity, "start"]) except subprocess.CalledProcessError: return False global servicemanager_started servicemanager_started = True return True def restart(identity="main"): print try: subprocess.check_call(["service", "critic-%s" % identity, "restart"]) except subprocess.CalledProcessError: return False return True def install(data): global servicemanager_started, rclinks_added source_path = os.path.join(installation.root_dir, "installation", "templates", "initd") target_path = os.path.join("/etc", "init.d", "critic-main") with open(target_path, "w") as target: created_file.append(target_path) os.chmod(target_path, 0755) os.chown(target_path, installation.system.uid, installation.system.gid) with open(source_path, "r") as source: target.write((source.read().decode("utf-8") % data).encode("utf-8")) subprocess.check_call(["update-rc.d", "critic-main", "defaults"]) rclinks_added = True start() return True def upgrade(arguments, data): source_path = os.path.join(installation.root_dir, "installation", "templates", "initd") target_path = os.path.join("/etc", "init.d", "critic-main") backup_path = os.path.join(os.path.dirname(target_path), "_" + os.path.basename(target_path)) source = open(source_path, "r").read().decode("utf-8") % data target = open(target_path, "r").read().decode("utf-8") system_uid = pwd.getpwnam(data["installation.system.username"]).pw_uid system_gid = grp.getgrnam(data["installation.system.groupname"]).gr_gid if source != target: def generateVersion(label, path): if label == "updated": with open(path, "w") as target: target.write(source.encode("utf-8")) update_query = installation.utils.UpdateModifiedFile( arguments, message="""\ The SysV init script is about to be updated. Please check that no local modifications are being overwritten. Current version: %(current)s Updated version: %(updated)s Please note that if the modifications are not installed, the system is likely to break. """, versions={ "current": target_path, "updated": target_path + ".new" }, options=[ ("i", "install the updated version"), ("k", "keep the current version"), ("d", ("current", "updated")) ], generateVersion=generateVersion) write_target = update_query.prompt() == "i" else: write_target = False if write_target: print "Updated file: %s" % target_path if not arguments.dry_run: os.rename(target_path, backup_path) renamed.append((target_path, backup_path)) with open(target_path, "w") as target: created_file.append(target_path) os.chmod(target_path, 0755) os.chown(target_path, system_uid, system_gid) target.write(source.encode("utf-8")) return True def undo(): if servicemanager_started: stop() elif servicemanager_stopped: start() map(os.unlink, created_file) for target, backup in renamed: os.rename(backup, target) if rclinks_added: subprocess.check_call(["update-rc.d", "critic-main", "remove"]) def finish(mode, arguments, data): for target, backup in renamed: os.unlink(backup) ================================================ FILE: installation/input.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import inpututils headless = False def yes_or_no(prompt, default=None): if headless: if default is None: print """ ERROR: yes/no input requested in headless mode! Prompt: %s """ % prompt sys.exit(1) else: print "%s %s" % (prompt, "y" if default else "n") return default return inpututils.yes_or_no(prompt, default) def string(prompt, default=None, check=None): if headless: if default is None: print """ ERROR: string input requested in headless mode! Prompt: %s """ % prompt sys.exit(1) else: print "%s %s" % (prompt, default) if not check or inpututils.apply_check(check, default): return default else: sys.exit(1) return inpututils.string(prompt, default, check) def password(prompt, default=None, twice=True): if headless: if default is None: print """ ERROR: password input requested in headless mode! Prompt: %s """ % prompt sys.exit(1) else: print "%s %s" % (prompt, "****") return default return inpututils.password(prompt, default, twice) ================================================ FILE: installation/lifecycle.json ================================================ { "branch": "stable/1", "stable": true } ================================================ FILE: installation/migrate.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys import json import installation def scripts_to_run(data): git = data["installation.prereqs.git"] old_sha1 = data["sha1"] performed_migrations = data.get("migrations", []) scripts = [] if os.path.exists("installation/migrations"): for script in os.listdir("installation/migrations"): if not script.endswith(".py"): continue if script in performed_migrations: continue script_path = os.path.join("installation/migrations", script) if installation.utils.get_file_sha1(git, old_sha1, script_path) is not None: # The migration script already existed when Critic was installed # and there's thus no point in running it now. continue date_added = installation.utils.get_initial_commit_date(git, script_path) scripts.append((date_added, script)) scripts.sort() scripts = [script for (date_added, script) in scripts] return scripts def will_modify_dbschema(data): for script in scripts_to_run(data): if script.startswith("dbschema."): return True return False def upgrade(arguments, data): if "migrations" not in data: data["migrations"] = [] for script in scripts_to_run(data): script_path = os.path.join("installation/migrations", script) print print "Running %s ..." % script if arguments.dry_run: continue env = os.environ.copy() # This is "/etc/critic/main", set by upgrade.py, or something else # if the --etc-dir/--identity arguments were used. env["PYTHONPATH"] = sys.path[0] + ":" + installation.root_dir installation.process.check_input([sys.executable, script_path, "--uid=%s" % installation.system.uid, "--gid=%d" % installation.system.gid], stdin=json.dumps(data), env=env) data["migrations"].append(script) return True ================================================ FILE: installation/migrations/dbschema.altertable.branches.add.archived.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'archived' column already exists. cursor.execute("SELECT archived FROM branches") except psycopg2.ProgrammingError: db.rollback() cursor.execute("""ALTER TABLE branches ADD archived BOOLEAN NOT NULL DEFAULT FALSE""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.changesets.parent.dropnotnull.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() # This command doesn't fail if the column already doesn't have a NOT # NULL constraint, so no reason to catch errors or try to determine # whether the constraint is there. cursor.execute("ALTER TABLE changesets ALTER parent DROP NOT NULL") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.commentchainchanges.addressed_by.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Make sure the columns don't already exist. cursor.execute("SELECT from_addressed_by, to_addressed_by FROM commentchainchanges") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise cursor.execute("""ALTER TABLE commentchainchanges ADD from_addressed_by INTEGER REFERENCES commits, ADD to_addressed_by INTEGER REFERENCES commits""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.commentchainchanges.drop.review.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() cursor.execute("""ALTER TABLE commentchainchanges DROP COLUMN IF EXISTS review CASCADE""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.commentchainlines.drop.commit.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Check if the 'commit' column already doesn't exist. cursor.execute("SELECT commit FROM commentchainlines") except psycopg2.ProgrammingError: # Seems it doesn't, so just exit. sys.exit(0) cursor.execute("""ALTER TABLE commentchainlines DROP commit""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.comments.time.setdefaultnow.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2016 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() # This command doesn't fail if the column already has a DEFAULT, so no reason to # catch errors or try to determine whether the constraint is there. cursor.execute("ALTER TABLE comments ALTER time SET DEFAULT NOW()") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.previousreachable.rebase.ondeletecascade.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() # This command doesn't fail if the foreign key constraint already has # "on delete cascade", and there's really no reason to try to figure # if it has; easier to just drop it and re-add it. cursor.execute("""ALTER TABLE previousreachable DROP CONSTRAINT previousreachable_rebase_fkey, ADD FOREIGN KEY (rebase) REFERENCES reviewrebases (id) ON DELETE CASCADE""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.repositories.drop.branch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Check if the 'branch' column already doesn't exist. cursor.execute("SELECT branch FROM repositories") except psycopg2.ProgrammingError: # Seems it doesn't, so just exit. sys.exit(0) cursor.execute("""ALTER TABLE repositories DROP branch""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.repositories.drop.relay.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os import shutil parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'relay' column already doesn't exist (and also fetch all the # relay paths for use below.) cursor.execute("SELECT relay FROM repositories") except psycopg2.ProgrammingError: # Seems it doesn't exist, so just exit. sys.exit(0) failed = False for (relay_path,) in cursor: try: shutil.rmtree(relay_path) except OSError as error: print ("WARNING: Failed to remove directory: %s\n Error: %s" % (relay_path, error)) failed = True if failed: print """ Some obsolete directories could not be removed. They will no longer be used by Critic, so you probably want to look into deleting them manually. """ cursor.execute("ALTER TABLE repositories DROP relay") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviewfilechanges.rename-columns.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Make sure the columns don't already exist. cursor.execute("SELECT from_state, to_state FROM reviewfilechanges") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise cursor.execute("""ALTER TABLE reviewfilechanges RENAME "from" TO from_state""") cursor.execute("""ALTER TABLE reviewfilechanges RENAME "to" TO to_state""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviewmergeconfirmations.add.tail.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'tail' column already exists. cursor.execute("SELECT tail FROM reviewmergeconfirmations") except psycopg2.ProgrammingError: db.rollback() cursor.execute("""ALTER TABLE reviewmergeconfirmations ADD tail INTEGER REFERENCES commits ON DELETE CASCADE""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviewrebases.add.equivalent_merge.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os import subprocess parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'equivalent_merge' column already exists. cursor.execute("SELECT equivalent_merge FROM reviewrebases") except psycopg2.ProgrammingError: db.rollback() else: # No error; change appears to have been made already. db.close() sys.exit(0) def fetch_commit_sha1(commit_id): cursor.execute("SELECT sha1 FROM commits WHERE id=%s", (commit_id,)) (sha1,) = cursor.fetchone() return sha1 def get_parent_sha1s(repository_path, sha1): output = subprocess.check_output( [configuration.executables.GIT, "log", "-1", "--format=%P", sha1], cwd=repository_path) return output.strip().split() def is_ancestor_of(repository_path, ancestor_sha1, descendant_sha1): try: merge_base_sha1 = subprocess.check_output( [configuration.executables.GIT, "merge-base", ancestor_sha1, descendant_sha1], cwd=repository_path).strip() except subprocess.CalledProcessError: return False else: return merge_base_sha1 == ancestor_sha1 cursor.execute("""ALTER TABLE reviewrebases ADD equivalent_merge INTEGER REFERENCES commits""") # # Move all references to equivalent merges stored in the |old_head| column of # existing review rebases over to the new |equivalent_merge| column, and restore # the value of the |old_head| to be the actual head of the review branch before # the rebase. # cursor.execute("""SELECT repositories.path, reviewrebases.id, reviewrebases.old_head, reviewrebases.old_upstream, reviewrebases.new_upstream FROM reviewrebases JOIN reviews ON (reviews.id=reviewrebases.review) JOIN branches ON (branches.id=reviews.branch) JOIN repositories ON (repositories.id=branches.repository) WHERE new_head IS NOT NULL AND old_upstream IS NOT NULL AND new_upstream IS NOT NULL""") for repository_path, rebase_id, old_head_id, old_upstream_id, new_upstream_id in cursor.fetchall(): old_head_sha1 = fetch_commit_sha1(old_head_id) old_head_parent_sha1s = get_parent_sha1s(repository_path, old_head_sha1) if len(old_head_parent_sha1s) != 2: # Old head is not a merge commit (or is an 3-or-more-way merge,) so # can't be an equivalent merge. continue old_upstream_sha1 = fetch_commit_sha1(old_upstream_id) new_upstream_sha1 = fetch_commit_sha1(new_upstream_id) if old_head_parent_sha1s[1] != new_upstream_sha1: # An equivalent merge should be a merge with the real old head as the # first parent and the new upstream as the second parent. We can't # really check the first parent in a meaningful way, but if the second # parent is "wrong", then this can't be an equivalent merge. continue if not is_ancestor_of(repository_path, old_upstream_sha1, new_upstream_sha1): # Old upstream is not an ancestor of the new upstream, meaning this is # not a "fast-forward" rebase. Such rebases don't have an equivalent # merge, but rather a "replayed rebase". The replayed rebase however # isn't stored in the |old_head| column, so there is nothing to restore. continue # Alright, we're pretty sure that the old head is in fact an equivalent # merge commit. Store it in the new |equivalent_merge| column and restore # the |old_head| column to the equivalent merge's first parent. cursor.execute("SELECT id FROM commits WHERE sha1=%s", (old_head_parent_sha1s[0],)) (real_old_head_id,) = cursor.fetchone() cursor.execute("""UPDATE reviewrebases SET old_head=%s, equivalent_merge=%s WHERE id=%s""", (real_old_head_id, old_head_id, rebase_id)) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviewrebases.add.replayed_rebase.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os import subprocess parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'replayed_rebase' column already exists. cursor.execute("SELECT replayed_rebase FROM reviewrebases") except psycopg2.ProgrammingError: db.rollback() else: # No error; change appears to have been made already. db.close() sys.exit(0) cursor.execute("""ALTER TABLE reviewrebases ADD replayed_rebase INTEGER REFERENCES commits""") # # Find all replayed rebases and store them in the new |replayed_rebase| column. # We identify them via 'conflicts' changesets added for review, whose child # (right-hand side) is the new head of a rebase. The parent (left-hand side) of # such a changeset will be the replayed rebase commit. # # Note: It is theoretically possible for such a 'conflicts' changeset to exist # that is not actually indicative of a replayed rebase, if the rebase's new head # is a merge commit, and that merge commit is an equivalent merge commit created # for an earlier rebase of the same review. # # Also note: While theoretically possible, the aforementioned possibility is not # likely to have happened in practice. # cursor.execute("""SELECT DISTINCT changesets.parent, reviewrebases.id FROM reviewrebases JOIN reviewchangesets ON (reviewchangesets.review=reviewrebases.review) JOIN changesets ON (changesets.id=reviewchangesets.changeset AND changesets.child=reviewrebases.new_head) WHERE changesets.type='conflicts'""") cursor.executemany("""UPDATE reviewrebases SET replayed_rebase=%s WHERE id=%s""", cursor.fetchall()) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviewrecipientfilters.uid-can-be-null.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() cursor.execute("""ALTER TABLE reviewrecipientfilters DROP CONSTRAINT IF EXISTS reviewrecipientfilters_pkey, DROP CONSTRAINT IF EXISTS reviewrecipientfilters_review_uid_key, ALTER uid DROP NOT NULL, ADD UNIQUE (review, uid)""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.reviews.add.origin.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Check if the 'origin' column already exists. cursor.execute("SELECT origin FROM reviews") except psycopg2.ProgrammingError: # Seems it doesn't. db.rollback() else: sys.exit(0) # Add the reviews.origin column. cursor.execute( """ALTER TABLE reviews ADD origin INTEGER REFERENCES branches ON DELETE SET NULL""") # Copy the information in branches.review over to reviews.origin. cursor.execute("""SELECT id, review FROM branches WHERE review IS NOT NULL""") rows = cursor.fetchall() cursor.executemany("""UPDATE reviews SET origin=%s WHERE id=%s""", rows) # Drop the old branches.review column. cursor.execute("""ALTER TABLE branches DROP review""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.systemidentities.add.installed_sha1.installed_at.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Martin Olsson # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Check if the 'installed_sha1' column already exists. cursor.execute("SELECT installed_sha1 FROM systemidentities") except psycopg2.ProgrammingError: db.rollback() cursor.execute("ALTER TABLE systemidentities ADD installed_sha1 CHAR(40)") cursor.execute("UPDATE systemidentities SET installed_sha1=''") cursor.execute("ALTER TABLE systemidentities ALTER installed_sha1 SET NOT NULL") db.commit() try: # Check if the 'installed_at' column already exists. cursor.execute("SELECT installed_at FROM systemidentities") except psycopg2.ProgrammingError: db.rollback() cursor.execute("ALTER TABLE systemidentities ADD installed_at TIMESTAMP DEFAULT NOW()") cursor.execute("ALTER TABLE systemidentities ALTER installed_at SET NOT NULL") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.systemidentities.url-prefix.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: cursor.execute("SELECT key, url_prefix FROM systemidentities") except psycopg2.ProgrammingError: # We seem to have converted the table already, so just exit. sys.exit(0) url_prefixes = cursor.fetchall() cursor.execute("""ALTER TABLE systemidentities DROP url_prefix, ADD anonymous_scheme VARCHAR(5), ADD authenticated_scheme VARCHAR(5), ADD hostname VARCHAR(256)""") if configuration.base.ACCESS_SCHEME in ("http", "https"): anonymous_scheme = authenticated_scheme = configuration.base.ACCESS_SCHEME else: anonymous_scheme = "http" authenticated_scheme = "https" for key, url_prefix in url_prefixes: if url_prefix.lower().startswith("https://"): hostname = url_prefix[len("https://"):] elif url_prefix.lower().startswith("http://"): hostname = url_prefix[len("http://"):] else: # This would only happen if the system administrator manually # modified the 'systemidentities' table, and any URL constructed # with this URL prefix in the past would most likely have been # broken already. print """\ WARNING: System identity %s's URL prefix was not recognized as either HTTP or HTTPS. It's assumed to be a plain hostname. The URL prefix was: %r""" % (key, url_prefix) hostname = url_prefix cursor.execute("""UPDATE systemidentities SET anonymous_scheme=%s, authenticated_scheme=%s, hostname=%s WHERE key=%s""", (anonymous_scheme, authenticated_scheme, hostname, key)) cursor.execute("""ALTER TABLE systemidentities ALTER anonymous_scheme SET NOT NULL, ALTER authenticated_scheme SET NOT NULL, ALTER hostname SET NOT NULL""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.usergitemails.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() # It's tricky to check whether constraints and indexes already exist, # so this script simply attempts to run commands that won't fail even # if run multiple times. cursor.execute("""ALTER TABLE usergitemails DROP CONSTRAINT IF EXISTS usergitemails_pkey, ADD PRIMARY KEY (email, uid)""") cursor.execute("DROP INDEX IF EXISTS usergitemails_uid") cursor.execute("CREATE INDEX usergitemails_uid ON usergitemails (uid)") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.altertable.usersessions.add.labels.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import installation # Handles command line arguments and sets uid/gid. installation.utils.start_migration() dbschema = installation.utils.DatabaseSchema() dbschema.create_column("usersessions", "labels", "VARCHAR(256)") ================================================ FILE: installation/migrations/dbschema.createindex.misc.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() def create_index(table, columns): name = "%s_%s" % (table, "_".join(columns)) cursor.execute("DROP INDEX IF EXISTS %s" % name) cursor.execute("CREATE INDEX %s ON %s (%s)" % (name, table, ", ".join(columns))) # Replaced by index over 'uid' and 'state'. cursor.execute("DROP INDEX IF EXISTS reviewfilechanges_uid") create_index("reviewfiles", ["review", "state"]) create_index("reviewfilechanges", ["uid", "state"]) create_index("commentchains", ["review", "type", "state"]) create_index("comments", ["id", "chain"]) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtable.accesstokens.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import installation # Handles command line arguments and sets uid/gid. installation.utils.start_migration() dbschema = installation.utils.DatabaseSchema() # New definitions in dbschema.user.git. dbschema.update(""" CREATE TYPE systemaccesstype AS ENUM ( -- The system is accessed as a named user. 'user', -- The system is accessed by a system service or similar. 'system', -- The system is accessed anonymously. 'anonymous' ); CREATE TABLE accesstokens ( id SERIAL PRIMARY KEY, -- The type of access granted by this access token. access_type systemaccesstype NOT NULL DEFAULT 'user', -- The user (when access_type='user') or NULL. uid INTEGER REFERENCES users ON DELETE CASCADE, -- First part of access token ("username"). part1 VARCHAR(32) NOT NULL, -- Second part of access token ("password"). part2 VARCHAR(32) NOT NULL, -- Access token title. title VARCHAR(256), UNIQUE (part1, part2), CONSTRAINT valid_user CHECK ((access_type='user' AND uid IS NOT NULL) OR (access_type!='user' AND uid IS NULL)) ); """) ================================================ FILE: installation/migrations/dbschema.createtable.knownremotes.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM knownremotes") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise cursor.execute("""CREATE TABLE knownremotes ( url VARCHAR(256) PRIMARY KEY, pushing BOOLEAN NOT NULL )""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtable.scheduledreviewbrancharchivals.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM scheduledreviewbrancharchivals") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise # Create the table. cursor.execute(""" CREATE TABLE scheduledreviewbrancharchivals ( review INTEGER PRIMARY KEY REFERENCES reviews (id), deadline TIMESTAMP NOT NULL ); """) # For each closed or dropped review, schedule an archival of the review branch. # These archivals may end up being ignored, for instance because review branch # archiving was disabled altogether. # # The archiving is randomly distributed over a four week period starting two # weeks from now. cursor.execute("""INSERT INTO scheduledreviewbrancharchivals (review, deadline) SELECT id, NOW() + INTERVAL '2 weeks' + random() * INTERVAL '4 weeks' FROM reviews WHERE state IN ('closed', 'dropped')""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtable.timezones.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os import datetime parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM timezones") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() cursor.execute("""CREATE TABLE timezones ( name VARCHAR(256) PRIMARY KEY, abbrev VARCHAR(16) NOT NULL, utc_offset INTERVAL NOT NULL )""") # Additional timezones are copied from 'pg_timezone_names' by the Watchdog # service on startup. cursor.execute("INSERT INTO timezones (name, abbrev, utc_offset) VALUES (%s, %s, %s)", ("Universal/UTC", "UTC", datetime.timedelta())) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtable.useremails.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM useremails") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise # Create the table. cursor.execute("""CREATE TABLE useremails ( id SERIAL PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, email VARCHAR(256) NOT NULL, verified BOOLEAN, verification_token VARCHAR(256), UNIQUE (uid, email) )""") # Create records for all current email addresses in the system. Set verified to # NULL which means the addresses can be used, but that they haven't gone through # the verification process. cursor.execute("""INSERT INTO useremails (uid, email) SELECT id, email FROM users WHERE email IS NOT NULL""") # Drop the old 'users.email' column. cursor.execute("ALTER TABLE users DROP email") # And create a new one based on the information we copied over to the new table. cursor.execute("ALTER TABLE users ADD email INTEGER REFERENCES useremails") cursor.execute("SELECT id, uid FROM useremails") cursor.executemany("UPDATE users SET email=%s WHERE id=%s", cursor.fetchall()) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtable.usersessions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM usersessions") # Above statement should have thrown a psycopg2.ProgrammingError, but it # didn't, so just exit. sys.exit(0) except psycopg2.ProgrammingError: db.rollback() except: raise cursor.execute("""CREATE TABLE usersessions ( key CHAR(28) PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users, atime TIMESTAMP DEFAULT NOW() )""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.createtables.accesscontrol.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import installation # Handles command line arguments and sets uid/gid. installation.utils.start_migration() dbschema = installation.utils.DatabaseSchema() # New definitions in dbschema.user.sql. dbschema.update(""" CREATE TYPE accesscontrolrule AS ENUM ( 'allow', 'deny' ); CREATE TABLE accesscontrolprofiles ( id SERIAL PRIMARY KEY, title TEXT, -- Access token that this profile belongs to. access_token INTEGER REFERENCES accesstokens ON DELETE CASCADE, http accesscontrolrule NOT NULL DEFAULT 'allow', repositories accesscontrolrule NOT NULL DEFAULT 'allow', extensions accesscontrolrule NOT NULL DEFAULT 'allow', UNIQUE (access_token) ); CREATE TYPE httprequestmethod AS ENUM ( 'GET', 'HEAD', 'OPTIONS', 'POST', 'PUT', 'DELETE' ); -- Exceptions for HTTP requests. CREATE TABLE accesscontrol_http ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- HTTP request method. NULL means "all methods". request_method httprequestmethod, -- Python regular expression that must match the entire path. NULL means -- "all paths". path_pattern TEXT ); CREATE INDEX accesscontrol_http_profile ON accesscontrol_http (profile); CREATE TABLE useraccesscontrolprofiles ( -- The type of access that is controlled. access_type systemaccesstype NOT NULL DEFAULT 'user', -- The user (when access_type='user') or NULL. If access_type='user' and -- this is NULL, then this is the default profile association. uid INTEGER REFERENCES users, -- Access control profile. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, CONSTRAINT valid_user CHECK (access_type='user' OR uid IS NULL) ); CREATE INDEX useraccesscontrolprofiles_uid ON useraccesscontrolprofiles (uid); CREATE TABLE labeledaccesscontrolprofiles ( -- Authentication labels from user authentication, typically indicating some -- type of group memberships. Labels should be sorted lexicographically and -- separated by pipe ('|') characters. labels VARCHAR(256) PRIMARY KEY, -- Access control profile. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE ); """) # New definitions in dbschema.git.sql. dbschema.update(""" CREATE TYPE repositoryaccesstype AS ENUM ( 'read', 'modify' ); CREATE TABLE accesscontrol_repositories ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- Type of access. NULL means "any type". access_type repositoryaccesstype, -- Repository to access. NULL means "any repository". repository INTEGER REFERENCES repositories ON DELETE CASCADE ); CREATE INDEX accesscontrol_repositories_profile ON accesscontrol_repositories (profile); """) # Check if dbschema.extensions.sql has been loaded at all. It wasn't until # extension support (the extend.py script) was fully added. If the 'extensions' # table doesn't exist, it obviously hasn't, and the tables below would be added # along with everything else when dbschema.extensions.sql is loaded by # extend.py. if dbschema.table_exists("extensions"): # New definitions in dbschema.extensions.sql. dbschema.update(""" CREATE TYPE extensionaccesstype AS ENUM ( 'install', 'execute' ); CREATE TABLE accesscontrol_extensions ( id SERIAL PRIMARY KEY, -- The profile this exception belongs to. profile INTEGER NOT NULL REFERENCES accesscontrolprofiles ON DELETE CASCADE, -- Type of extension access. NULL means "any type". access_type extensionaccesstype, -- Extension key: / for user extensions and -- for system extensions. NULL means "any extension". extension_key TEXT ); CREATE INDEX accesscontrol_extensions_profile ON accesscontrol_extensions (profile); """) ================================================ FILE: installation/migrations/dbschema.droptable.knownhosts.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: cursor.execute("SELECT 1 FROM knownhosts") except psycopg2.ProgrammingError: # Seems it doesn't exist, so just exit. sys.exit(0) cursor.execute("DROP TABLE knownhosts") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.extension-filterhook-role.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os import re parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() def table_exists(table_name): try: cursor.execute("SELECT 1 FROM %s" % table_name) # Above statement would have thrown a psycopg2.ProgrammingError if the # table didn't exist, but it didn't, so the table must exist. return True except psycopg2.ProgrammingError: db.rollback() return False def createtable(statement): (table_name,) = re.search("CREATE TABLE (\w+)", statement).groups() # Make sure the table doesn't already exist. if not table_exists(table_name): cursor.execute(statement) db.commit() def createindex(statement): (index_name,) = re.search("CREATE INDEX (\w+)", statement).groups() cursor.execute("DROP INDEX IF EXISTS %s" % index_name) cursor.execute(statement) db.commit() def run_statements(statements): for statement in statements.split(";"): statement = statement.strip() if not statement: pass elif statement.startswith("CREATE TABLE"): createtable(statement) elif statement.startswith("CREATE INDEX"): createindex(statement) else: print >>sys.stderr, "Unexpected SQL statement: %r" % statement sys.exit(1) # First check if dbschema.extensions.sql has been loaded at all. It wasn't # until extension support (the extend.py script) was fully added. If the # 'extensions' table doesn't exist, it obviously hasn't, and the tables below # would be added along with everything else when dbschema.extensions.sql is # loaded by extend.py. # # Also, the statements below depend on the basic extensions tables existing due # to foreign keys they set up. if not table_exists("extensions"): sys.exit(0) run_statements(""" CREATE TABLE extensionfilterhookroles ( role INTEGER NOT NULL REFERENCES extensionroles ON DELETE CASCADE, name VARCHAR(64) NOT NULL, title VARCHAR(64) NOT NULL, role_description TEXT, data_description TEXT ); CREATE TABLE extensionhookfilters ( id SERIAL PRIMARY KEY, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, extension INTEGER NOT NULL REFERENCES extensions ON DELETE CASCADE, repository INTEGER NOT NULL REFERENCES repositories ON DELETE CASCADE, name VARCHAR(64) NOT NULL, path TEXT NOT NULL, data TEXT ); CREATE INDEX extensionhookfilters_uid_extension ON extensionhookfilters (uid, extension); CREATE INDEX extensionhookfilters_repository ON extensionhookfilters (repository); CREATE TABLE extensionfilterhookevents ( id SERIAL PRIMARY KEY, filter INTEGER NOT NULL REFERENCES extensionhookfilters ON DELETE CASCADE, review INTEGER NOT NULL REFERENCES reviews ON DELETE CASCADE, uid INTEGER NOT NULL REFERENCES users ON DELETE CASCADE, data TEXT ); CREATE TABLE extensionfilterhookcommits ( event INTEGER NOT NULL REFERENCES extensionfilterhookevents ON DELETE CASCADE, commit INTEGER NOT NULL REFERENCES commits ); CREATE INDEX extensionfilterhookcommits_event ON extensionfilterhookcommits (event); CREATE TABLE extensionfilterhookfiles ( event INTEGER NOT NULL REFERENCES extensionfilterhookevents ON DELETE CASCADE, file INTEGER NOT NULL REFERENCES files ); CREATE INDEX extensionfilterhookfiles_event ON extensionfilterhookfiles (event); """) db.close() ================================================ FILE: installation/migrations/dbschema.external-authentication.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() def create(table_name, statement): try: # Make sure the table doesn't already exist. cursor.execute("SELECT 1 FROM %s" % table_name) # Above statement would have thrown a psycopg2.ProgrammingError if the # table didn't exist, but it didn't, so assume the table doesn't need to # be added. return except psycopg2.ProgrammingError: db.rollback() cursor.execute(statement) db.commit() create("externalusers", """ CREATE TABLE externalusers ( id SERIAL PRIMARY KEY, uid INTEGER REFERENCES users, provider VARCHAR(16) NOT NULL, account VARCHAR(256) NOT NULL, email VARCHAR(256), token VARCHAR(256), UNIQUE (provider, account) ); """) create("oauthstates", """ CREATE TABLE oauthstates ( state VARCHAR(64) PRIMARY KEY, url TEXT, time TIMESTAMP NOT NULL DEFAULT NOW() ); """) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.files-and-directories.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() def column_exists(table, column): try: cursor.execute("SELECT %s FROM %s LIMIT 1" % (column, table)) return True except psycopg2.ProgrammingError: db.rollback() return False added = [column_exists("files", "path"), column_exists("filters", "path"), column_exists("reviewfilters", "path")] removed = [column_exists("files", "directory"), column_exists("files", "name"), column_exists("filters", "directory"), column_exists("filters", "file"), column_exists("reviewfilters", "directory"), column_exists("reviewfilters", "file"), column_exists("directories", "id")] if all(added) and not any(removed): # All expected modifications appear to have taken place already. sys.exit(0) elif any(added) or not all(removed): # Some modifications appear to have taken place already, but not # all. This is bad, and possibly unrecoverable. It's probably # not a good idea to just run the commands below. sys.stderr.write("""\ The database schema appears to be in an inconsistent state! Please see installation/migrations/dbschema.files-and-directories.py and try to figure out which of the commands in it to run. Alternatively, restore a database backup from before the previous upgrade attempt, and then try running upgrade.py again. """) sys.exit(1) # Add 'path' column to 'files' table. cursor.execute("ALTER TABLE files ADD path TEXT") cursor.execute("UPDATE files SET path=fullfilename(id)") cursor.execute("ALTER TABLE files ALTER path SET NOT NULL") cursor.execute("CREATE UNIQUE INDEX files_path_md5 ON files (MD5(path))") cursor.execute("CREATE INDEX files_path_gin ON files USING gin (STRING_TO_ARRAY(path, '/'))") # Modify 'filters' table similarly. cursor.execute("ALTER TABLE filters ADD path TEXT") cursor.execute("UPDATE filters SET path=fullfilename(file) WHERE file>0") cursor.execute("UPDATE filters SET path=COALESCE(NULLIF(fulldirectoryname(directory), ''), '/') WHERE file=0") cursor.execute("ALTER TABLE filters ALTER path SET NOT NULL") cursor.execute("CREATE UNIQUE INDEX filters_repository_uid_path_md5 ON filters (repository, uid, MD5(path))") # Modify 'reviewfilters' table similarly. cursor.execute("ALTER TABLE reviewfilters ADD path TEXT") cursor.execute("UPDATE reviewfilters SET path=fullfilename(file) WHERE file>0") cursor.execute("UPDATE reviewfilters SET path=COALESCE(NULLIF(fulldirectoryname(directory), ''), '/') WHERE file=0") cursor.execute("ALTER TABLE reviewfilters ALTER path SET NOT NULL") cursor.execute("CREATE UNIQUE INDEX reviewfilters_review_uid_path_md5 ON reviewfilters (review, uid, MD5(path))") # Modify 'reviewfilterchanges' table similarly. cursor.execute("ALTER TABLE reviewfilterchanges ADD path TEXT") cursor.execute("UPDATE reviewfilterchanges SET path=fullfilename(file) WHERE file>0") cursor.execute("UPDATE reviewfilterchanges SET path=fulldirectoryname(directory) WHERE file=0") cursor.execute("ALTER TABLE reviewfilterchanges ALTER path SET NOT NULL") # Drop the now redundant 'directories' table. cursor.execute("ALTER TABLE files DROP directory, DROP name") cursor.execute("ALTER TABLE filters DROP directory, DROP file, DROP specificity") cursor.execute("ALTER TABLE reviewfilters DROP directory, DROP file") cursor.execute("ALTER TABLE reviewfilterchanges DROP directory, DROP file") cursor.execute("DROP TABLE directories") # Drop various utility functions that are no longer necessary. cursor.execute("DROP FUNCTION IF EXISTS filepath()") cursor.execute("DROP FUNCTION IF EXISTS directorypath()") cursor.execute("DROP FUNCTION IF EXISTS subdirectories()") cursor.execute("DROP FUNCTION IF EXISTS containedfiles()") cursor.execute("DROP FUNCTION IF EXISTS fullfilename()") cursor.execute("DROP FUNCTION IF EXISTS fulldirectoryname()") cursor.execute("DROP FUNCTION IF EXISTS findfile()") cursor.execute("DROP FUNCTION IF EXISTS finddirectory()") db.commit() # ALTER TYPE ... ADD VALUE cannot be executed inside a transaction block. db.autocommit = True # Add filter type "ignored". cursor.execute("ALTER TYPE filtertype ADD VALUE 'ignored'") db.close() ================================================ FILE: installation/migrations/dbschema.fixup-extensionroles.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: # The extensions part of the database schema might not have been loaded at # all; it isn't until extend.py is used to enable extensions support. cursor.execute("SELECT 1 FROM extensions") except psycopg2.ProgrammingError: sys.exit(0) cursor.execute("""SELECT id, version, script, function, extensionpageroles.path, extensioninjectroles.path, extensionprocesscommitsroles.role IS NULL FROM extensionroles LEFT OUTER JOIN extensionpageroles ON (extensionpageroles.role=id) LEFT OUTER JOIN extensioninjectroles ON (extensioninjectroles.role=id) LEFT OUTER JOIN extensionprocesscommitsroles ON (extensionprocesscommitsroles.role=id)""") roles = set() duplicates = [] for row in cursor: role_id = row[0] role_key = row[1:] if role_key in roles: duplicates.append(role_id) else: roles.add(role_key) if duplicates: print "Removing %d duplicate rows from extensionroles." % len(duplicates) cursor.execute("DELETE FROM extensionroles WHERE id=ANY (%s)", (duplicates,)) db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.per-repository-or-filter-preferences.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() def column_exists(table, column): try: cursor.execute("SELECT %s FROM %s LIMIT 1" % (column, table)) return True except psycopg2.ProgrammingError: db.rollback() return False added = [column_exists("preferences", "per_system"), column_exists("preferences", "per_user"), column_exists("preferences", "per_repository"), column_exists("preferences", "per_filter"), column_exists("userpreferences", "repository"), column_exists("userpreferences", "filter")] removed = [column_exists("preferences", "default_string"), column_exists("preferences", "default_integer")] if all(added) and not any(removed): # All expected modifications appear to have taken place already. sys.exit(0) elif any(added) or not all(removed): # Some modifications appear to have taken place already, but not # all. This is bad, and possibly unrecoverable. It's probably # not a good idea to just run the commands below. sys.stderr.write("""\ The database schema appears to be in an inconsistent state! Please see installation/migrations/dbschema.per-repository-or-filter-preferences.py and try to figure out which of the commands in it to run. Alternatively, restore a database backup from before the previous upgrade attempt, and then try running upgrade.py again. """) sys.exit(1) # Drop the exiting 'userpreferences' PRIMARY KEY, since it conflicts with having # multiple settings for different repositories and filters. cursor.execute("""ALTER TABLE userpreferences DROP CONSTRAINT userpreferences_pkey""") # Add new columns to 'preferences'. cursor.execute("""ALTER TABLE preferences ADD per_system BOOLEAN NOT NULL DEFAULT TRUE, ADD per_user BOOLEAN NOT NULL DEFAULT TRUE, ADD per_repository BOOLEAN NOT NULL DEFAULT FALSE, ADD per_filter BOOLEAN NOT NULL DEFAULT FALSE""") # Add new columns to 'userpreferences'. cursor.execute("""ALTER TABLE userpreferences ALTER uid DROP NOT NULL, ADD repository INTEGER REFERENCES repositories ON DELETE CASCADE, ADD filter INTEGER REFERENCES filters ON DELETE CASCADE""") # Move current system-wide default values over to the 'userpreferences' table as # rows with uid=NULL. cursor.execute("""INSERT INTO userpreferences (item, integer, string) SELECT item, default_integer, default_string FROM preferences""") # Drop old default value columns from 'preferences'. cursor.execute("""ALTER TABLE preferences DROP default_string, DROP default_integer""") # Add new constraints to 'userpreferences'. cursor.execute("""ALTER TABLE userpreferences ADD CONSTRAINT check_uid_filter CHECK (filter IS NULL OR uid IS NOT NULL), ADD CONSTRAINT check_repository_filter CHECK (repository IS NULL OR filter IS NULL)""") # Add indexes used to check various uniqueness requirements involving NULL # values. cursor.execute("""CREATE UNIQUE INDEX userpreferences_item ON userpreferences (item) WHERE uid IS NULL AND repository IS NULL AND filter IS NULL""") cursor.execute("""CREATE UNIQUE INDEX userpreferences_item_uid ON userpreferences (item, uid) WHERE uid IS NOT NULL AND repository IS NULL AND filter IS NULL""") cursor.execute("""CREATE UNIQUE INDEX userpreferences_item_repository ON userpreferences (item, repository) WHERE uid IS NULL AND repository IS NOT NULL AND filter IS NULL""") cursor.execute("""CREATE UNIQUE INDEX userpreferences_item_uid_repository ON userpreferences (item, uid, repository) WHERE uid IS NOT NULL AND repository IS NOT NULL AND filter IS NULL""") cursor.execute("""CREATE UNIQUE INDEX userpreferences_item_uid_filter ON userpreferences (item, uid, filter) WHERE uid IS NOT NULL AND repository IS NULL AND filter IS NOT NULL""") db.commit() db.close() ================================================ FILE: installation/migrations/dbschema.review-constraints-tweaking.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import psycopg2 import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() try: cursor.execute("CREATE INDEX reviewmessageids_review ON reviewmessageids (review)") except psycopg2.ProgrammingError: # The index probably already exists. db.rollback() else: db.commit() cursor.execute("""ALTER TABLE branches DROP CONSTRAINT IF EXISTS branches_review_fkey, ADD CONSTRAINT branches_review_fkey FOREIGN KEY (review) REFERENCES reviews ON DELETE CASCADE""") cursor.execute("""ALTER TABLE checkbranchnotes DROP CONSTRAINT IF EXISTS checkbranchnotes_review_fkey, ADD CONSTRAINT checkbranchnotes_review_fkey FOREIGN KEY (review) REFERENCES reviews ON DELETE CASCADE""") db.commit() db.close() ================================================ FILE: installation/migrations/git.check-keepalive-references.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os import subprocess parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) os.environ["HOME"] = data["installation.paths.data_dir"] os.chdir(os.environ["HOME"]) db = psycopg2.connect(database="critic") cursor = db.cursor() cursor.execute("""SELECT repositories.path, reviews.id, branches.id, branches.name, commits.sha1 FROM repositories JOIN branches ON (branches.repository=repositories.id) JOIN reviews ON (reviews.branch=branches.id) JOIN commits ON (commits.id=branches.head) ORDER BY repositories.id, reviews.id""") current_repository_path = None keepalive_refs = None branch_heads = None sys.stdout.write("Verifying keepalive references ...\n") sys.stdout.flush() for repository_path, review_id, branch_id, branch_name, head_sha1 in cursor.fetchall(): if repository_path != current_repository_path: keepalive_refs = set(subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=" + repository_path, "for-each-ref", "--format=%(objectname)", "refs/keepalive/"]).splitlines()) branch_heads = dict( line.rsplit(":", 1) for line in subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=" + repository_path, "for-each-ref", "--format=%(refname):%(objectname)", "refs/heads/"]).splitlines()) current_repository_path = repository_path sys.stdout.write("\r\x1b[K %s\n" % current_repository_path) sys.stdout.write("\r\x1b[K r/%d" % review_id) sys.stdout.flush() def add_keepalive(sha1, message): keepalive_refs.add(sha1) try: subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=" + repository_path, "update-ref", "refs/keepalive/" + sha1, sha1, "0" * 40], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: what = "failed to add" else: what = "added" sys.stdout.write("\r\x1b[Kr/%d: %s keepalive ref: %s (%s)\n" % (review_id, what, sha1, message)) sys.stdout.flush() #################################################################### # Make sure the review branch in the Git repository references the # # expected commit, or that the commit it ought to reference is at # # least kept alive. # #################################################################### if branch_heads.get("refs/heads/" + branch_name) != head_sha1: if head_sha1 not in keepalive_refs: if "refs/heads/" + branch_name not in branch_heads: message = "missing review branch" else: message = "incorrect review branch" add_keepalive(head_sha1, message) #################################################################### # Make sure all "old head" commits from all past rebases of the # # review are properly referenced by a keepalive ref. # #################################################################### cursor.execute("""SELECT commits.id, commits.sha1 FROM commits JOIN reviewrebases ON (reviewrebases.old_head=commits.id) WHERE reviewrebases.review=%s AND reviewrebases.new_head IS NOT NULL""", (review_id,)) for old_head_id, old_head_sha1 in cursor.fetchall(): if old_head_sha1 not in keepalive_refs: # There might exist an "equivalent merge commit" that has # the recorded old head commit as one of its parent, and # that is kept alive. Normally, that merge commit would be # recorded as the old head instead, but in old reviews this # is not the case if the merge was "clean". cursor.execute("""SELECT commits.sha1 FROM commits JOIN edges ON (edges.child=commits.id) WHERE edges.parent=%s""", (old_head_id,)) for candidate_sha1, in cursor: if candidate_sha1 in keepalive_refs: # Note: Won't bother verifying that this actually is # an equivalent merge commit, and not some random # other commit with our old head as its parent. If # it's kept alive it's kept alive. break else: add_keepalive(old_head_sha1, "rebase old head") sys.stdout.write("\r\x1b[K") sys.stdout.flush() ================================================ FILE: installation/migrations/git.clean-up-temporary-references.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os import subprocess parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) os.environ["HOME"] = data["installation.paths.data_dir"] os.chdir(os.environ["HOME"]) db = psycopg2.connect(database="critic") cursor = db.cursor() cursor.execute("SELECT path FROM repositories") for (path,) in cursor: temporary_refs = subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=%s" % path, "for-each-ref", "--format=%(refname)", "refs/temporary/", "refs/commit/"]).splitlines() for temporary_ref in temporary_refs: subprocess.check_call( [data["installation.prereqs.git"], "--git-dir=%s" % path, "update-ref", "-d", temporary_ref]) if temporary_refs: print "%s: purged %d temporary refs" % (path, len(temporary_refs)) ================================================ FILE: installation/migrations/git.convert-replays-into-keepalives.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os import subprocess parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) os.environ["HOME"] = data["installation.paths.data_dir"] os.chdir(os.environ["HOME"]) db = psycopg2.connect(database="critic") cursor = db.cursor() cursor.execute("SELECT path FROM repositories") for (path,) in cursor: output = subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=%s" % path, "for-each-ref", "--format=%(objectname):%(refname)", "refs/replays/"]) replay_refs = [line.split(":") for line in output.splitlines()] for sha1, ref_name in replay_refs: subprocess.check_call( [data["installation.prereqs.git"], "--git-dir=%s" % path, "update-ref", "refs/keepalive/" + sha1, sha1]) subprocess.check_call( [data["installation.prereqs.git"], "--git-dir=%s" % path, "update-ref", "-d", ref_name, sha1]) if replay_refs: print ("%s: converted %d replay refs into keepalives" % (path, len(replay_refs))) ================================================ FILE: installation/migrations/git.rename-keepalive-chain.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os import subprocess OLD_KEEPALIVE_REF_CHAIN = "refs/keepalive-chain" NEW_KEEPALIVE_REF_CHAIN = "refs/internal/keepalive-chain" parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) os.environ["HOME"] = data["installation.paths.data_dir"] os.chdir(os.environ["HOME"]) db = psycopg2.connect(database="critic") cursor = db.cursor() cursor.execute("SELECT path FROM repositories") for (path,) in cursor: try: # Create the new keepalive chain ref. subprocess.check_output( [data["installation.prereqs.git"], "--git-dir=%s" % path, "update-ref", NEW_KEEPALIVE_REF_CHAIN, OLD_KEEPALIVE_REF_CHAIN], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: # Assume this was because OLD_KEEPALIVE_REF_CHAIN didn't exist. pass else: # Delete the old keepalive chain ref. This command fails if the old # ref's value changed between this command and the previous one. subprocess.check_call( [data["installation.prereqs.git"], "--git-dir=%s" % path, "update-ref", "-d", OLD_KEEPALIVE_REF_CHAIN, NEW_KEEPALIVE_REF_CHAIN]) ================================================ FILE: installation/migrations/installation.config-pyc-file-permissions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Martin Olsson # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import argparse parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) import configuration config_dir = os.path.dirname(configuration.__file__) for entry in os.listdir(config_dir): if entry.endswith(".py"): if entry.startswith("_") and os.path.exists(os.path.join(config_dir, entry[1:])): # If the upgrade modifies a configuration file, say file.py, it # will keep a backup of the file stored as _file.py (also in the # configuration directory) and there won't be a pyc file for the # backup, so skip ahead to avoid unnecessarily printing the below warning. continue config_file = os.path.join(config_dir, entry) pyc_file = config_file + "c" try: os.chmod(pyc_file, os.stat(config_file).st_mode) except Exception as e: print("WARNING: installation.config-pyc-file-permissions.py " "failed to restrict file permissions for '%s'. Please make " "sure all .pyc files in the Critic configuration directory " "exists, belongs to critic:critic and are chmod'd similar " "to the corresponding .py file. The specific error " "reported was: %s" % (pyc_file, e)) ================================================ FILE: installation/migrations/news.filter-system-rewrite.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() text = """\ Improved Filters ================ Critic's Filters mechanism has been improved, in two significant ways: * filter paths can now contain wildcards, and * a third filter type, Ignored, has been added, that can be used to exclude some files or directories otherwise matched by other filters. For more details, see the (new) tutorial on the subject of filters. The UI for managing filters on your Home page has also been significantly changed; now displaying all filter in all repositories instead of only filters in a selected repository.""" cursor.execute("SELECT id FROM newsitems WHERE text=%s", (text,)) if cursor.fetchone(): # Identical news item already exists. sys.exit(0) cursor.execute("INSERT INTO newsitems (text) VALUES (%s)", (text,)) db.commit() db.close() ================================================ FILE: installation/migrations/news.review-branch-archival.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Widell, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() text = """\ Review branch archival ====================== This Critic system now supports automatic archival of obsolete review branches. This means that review branch refs that belong to reviews that are finished and closed, or dropped, are eventually deleted from the repository. For more information, see the [Review branch archival][tutorial] tutorial. From now on, archival of review branches is scheduled when reviews are closed or dropped. For each existing already closed or dropped reviews in this system, archival will have been scheduled at a random time 2-6 weeks after the upgrade. This news item's timestamp indicates when the upgrade took place. [tutorial]: /tutorial?item=archival """ cursor.execute("SELECT id FROM newsitems WHERE text=%s", (text,)) if cursor.fetchone(): # Identical news item already exists. sys.exit(0) cursor.execute("INSERT INTO newsitems (text) VALUES (%s)", (text,)) db.commit() db.close() # Also print a "news" bulletin to the system administrator that # performs the upgrade: print """ NOTE: This update adds a review branch archival mechanism, enabled by default. To find out more about it, including how to disable it, please see the administration tutorial: http:///tutorial?item=administration#review_branch_archival """ ================================================ FILE: installation/migrations/news.review-quick-search.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) import configuration db = psycopg2.connect(**configuration.database.PARAMETERS) cursor = db.cursor() text = """\ Review Quick Search =================== Critic's mechanism for searching for reviews has been upgraded. The existing search page has been made somewhat more user-friendly and capable. More significantly, a new "quick search" feature has been added, which is a search dialog activated by pressing the F key on any Critic page (for instance this one.) This dialog allows input of a search query and can be used to perform the same searches as the main search page. For more details, see the (new) tutorial on the subject of searching.""" cursor.execute("SELECT id FROM newsitems WHERE text=%s", (text,)) if cursor.fetchone(): # Identical news item already exists. sys.exit(0) cursor.execute("INSERT INTO newsitems (text) VALUES (%s)", (text,)) db.commit() db.close() ================================================ FILE: installation/migrations/preference.commit.diff.rulerColumn.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Rafał Chłodnicki, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import psycopg2 import json import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) data = json.load(sys.stdin) db = psycopg2.connect(database="critic") cursor = db.cursor() # Make sure the preference doesn't already exist. cursor.execute("SELECT 1 FROM preferences WHERE item=%s", ("commit.diff.rulerColumn",)) if cursor.fetchone(): sys.exit(0) cursor.execute("INSERT INTO preferences (item, type, default_integer, description) VALUES (%s, %s, %s, %s)", ("commit.diff.rulerColumn", "integer", 0, "The column at which a ruler is shown. Can be set to 0 to disable the ruler.")) db.commit() db.close() ================================================ FILE: installation/paths.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import shutil import installation etc_dir = "/etc/critic" bin_dir = "/usr/bin" install_dir = "/usr/share/critic" data_dir = "/var/lib/critic" cache_dir = "/var/cache/critic" git_dir = "/var/git" log_dir = "/var/log/critic" run_dir = "/var/run/critic" def prepare(mode, arguments, data): global etc_dir, install_dir, data_dir, cache_dir, git_dir, log_dir, run_dir if mode == "install": all_ok = True print """ Critic Installation: Paths ========================== """ def is_good_dir(path): if not path: return "empty path" elif not path.startswith("/"): return "must be an absolute path" elif os.path.exists(path) and not os.path.isdir(path): return "exists and is not a directory" def is_new_dir(path): error = is_good_dir(path) if error: return error if os.path.exists(path): return "directory already exists (NOTE: if Critic is already " \ "installed and you want to upgrade to the latest " \ "version of Critic, then run upgrade.py rather than " \ "re-running install.py)" if arguments.etc_dir: error = is_new_dir(arguments.etc_dir) if error: print "Invalid --etc-dir argument: %s." % error return False etc_dir = arguments.etc_dir else: all_ok = False etc_dir = installation.input.string(prompt="Where should Critic's configuration files be installed?", default=etc_dir, check=is_new_dir) if arguments.install_dir: error = is_new_dir(arguments.install_dir) if error: print "Invalid --install-dir argument: %s." % error return False install_dir = arguments.install_dir else: all_ok = False install_dir = installation.input.string(prompt="Where should Critic's source code be installed?", default=install_dir, check=is_new_dir) if arguments.data_dir: error = is_new_dir(arguments.data_dir) if error: print "Invalid --data-dir argument: %s." % error return False data_dir = arguments.data_dir else: all_ok = False data_dir = installation.input.string(prompt="Where should Critic's persistent data files live?", default=data_dir, check=is_new_dir) if arguments.cache_dir: error = is_new_dir(arguments.cache_dir) if error: print "Invalid --cache-dir argument: %s." % error return False cache_dir = arguments.cache_dir else: all_ok = False cache_dir = installation.input.string(prompt="Where should Critic's temporary data files live?", default=cache_dir, check=is_new_dir) if arguments.git_dir: error = is_new_dir(arguments.git_dir) if error: print "Invalid --git-dir argument: %s." % error return False git_dir = arguments.git_dir else: all_ok = False git_dir = installation.input.string(prompt="Where should Critic's Git repositories live?", default=git_dir, check=is_new_dir) if arguments.log_dir: error = is_new_dir(arguments.log_dir) if error: print "Invalid --log-dir argument: %s." % error return False log_dir = arguments.log_dir else: all_ok = False log_dir = installation.input.string(prompt="Where should Critic's log files live?", default=log_dir, check=is_good_dir) if arguments.run_dir: error = is_new_dir(arguments.run_dir) if error: print "Invalid --run-dir argument: %s." % error return False run_dir = arguments.run_dir else: all_ok = False run_dir = installation.input.string(prompt="Where should Critic's runtime files live?", default=run_dir, check=is_good_dir) if all_ok: print "All okay." else: import configuration def strip_identity(path): if os.path.basename(path) == configuration.base.SYSTEM_IDENTITY: return os.path.dirname(path) else: return path etc_dir = strip_identity(configuration.paths.CONFIG_DIR) install_dir = configuration.paths.INSTALL_DIR data_dir = configuration.paths.DATA_DIR cache_dir = strip_identity(configuration.paths.CACHE_DIR) git_dir = configuration.paths.GIT_DIR log_dir = strip_identity(configuration.paths.LOG_DIR) run_dir = strip_identity(configuration.paths.RUN_DIR) data["installation.paths.etc_dir"] = etc_dir data["installation.paths.install_dir"] = install_dir data["installation.paths.data_dir"] = data_dir data["installation.paths.cache_dir"] = cache_dir data["installation.paths.git_dir"] = git_dir data["installation.paths.log_dir"] = log_dir data["installation.paths.run_dir"] = run_dir return True created = [] def mkdir(path, mode=0750): global created if not os.path.isdir(path): if not os.path.isdir(os.path.dirname(path)): mkdir(os.path.dirname(path), mode) if not installation.quiet: print "Creating directory '%s' ..." % path os.mkdir(path, mode) created.append(path) os.chown(path, installation.system.uid, installation.system.gid) def mkdirs(): import stat mkdir(os.path.join(etc_dir, "main")) mkdir(bin_dir) mkdir(install_dir, 0755) mkdir(os.path.join(data_dir, "relay")) mkdir(os.path.join(data_dir, "temporary")) mkdir(os.path.join(data_dir, "outbox", "sent"), mode=0700) mkdir(os.path.join(cache_dir, "main", "highlight")) mkdir(git_dir) mkdir(os.path.join(log_dir, "main")) mkdir(os.path.join(run_dir, "main", "sockets"), mode=0755) mkdir(os.path.join(run_dir, "main", "wsgi")) if installation.config.coverage_dir: mkdir(installation.config.coverage_dir) os.chmod(git_dir, 0770 | stat.S_ISUID | stat.S_ISGID) def install(data): mkdirs() return True def upgrade(arguments, data): mkdirs() return True def undo(): map(shutil.rmtree, reversed(created)) ================================================ FILE: installation/prefs.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import json import textwrap import subprocess import installation def add_preference(db, item, data, silent=False): relevance = data.get("relevance", {}) cursor = db.cursor() cursor.execute("""INSERT INTO preferences (item, type, description, per_system, per_user, per_repository, per_filter) VALUES (%s, %s, %s, %s, %s, %s, %s)""", (item, data["type"], data["description"], relevance.get("system", True), relevance.get("user", True), relevance.get("repository", False), relevance.get("filter", False))) if data["type"] == "string": cursor.execute("""INSERT INTO userpreferences (item, string) VALUES (%s, %s)""", (item, data["default"])) else: cursor.execute("""INSERT INTO userpreferences (item, integer) VALUES (%s, %s)""", (item, int(data["default"]))) if not silent and not installation.quiet: print "Added preference: '%s'" % item def update_preference(db, item, data, type_changed): relevance = data.get("relevance", {}) cursor = db.cursor() cursor.execute("""UPDATE preferences SET type=%s, description=%s, per_system=%s, per_user=%s, per_repository=%s, per_filter=%s WHERE item=%s""", (data["type"], data["description"], relevance.get("system", True), relevance.get("user", True), relevance.get("repository", False), relevance.get("filter", False), item)) if data["type"] == "string": cursor.execute("""UPDATE userpreferences SET integer=NULL, string=%s WHERE item=%s AND uid IS NULL AND repository IS NULL""", (data["default"], item)) else: cursor.execute("""UPDATE userpreferences SET integer=%s, string=NULL WHERE item=%s AND uid IS NULL AND repository IS NULL""", (int(data["default"]), item)) if type_changed: # Delete all per-user or per-repository overrides; they will be of an # incorrect type. cursor.execute("""DELETE FROM userpreferences WHERE item=%s AND (uid IS NOT NULL OR repository IS NOT NULL)""", (item,)) def remove_preference(db, item): cursor = db.cursor() cursor.execute("DELETE FROM userpreferences WHERE item=%s", (item,)) cursor.execute("DELETE FROM preferences WHERE item=%s", (item,)) def load_preferences(db): cursor = db.cursor() cursor.execute("""SELECT preferences.item, type, integer, string, description FROM preferences JOIN userpreferences USING (item) WHERE uid IS NULL AND repository IS NULL""") preferences = {} for item, item_type, default_integer, default_string, description in cursor: data = { "type": item_type, "description": description } if item_type == "string": data["default"] = default_string elif item_type == "boolean": data["default"] = bool(default_integer) else: data["default"] = default_integer preferences[item] = data return preferences def install(data): path = os.path.join(installation.root_dir, "src", "data", "preferences.json") with open(path) as preferences_file: preferences = json.load(preferences_file) import dbutils with installation.utils.as_critic_system_user(): with dbutils.Database() as db: for item in sorted(preferences.keys()): add_preference(db, item, preferences[item], silent=True) db.commit() if not installation.quiet: print "Added %d preferences." % len(preferences) return True def upgrade(arguments, data): git = data["installation.prereqs.git"] path = "src/data/preferences.json" old_sha1 = data["sha1"] old_file_sha1 = installation.utils.get_file_sha1(git, old_sha1, path) new_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() new_file_sha1 = installation.utils.get_file_sha1(git, new_sha1, path) if old_file_sha1: old_source = installation.utils.run_git([git, "cat-file", "blob", old_file_sha1], cwd=installation.root_dir) old_preferences = json.loads(old_source) else: old_preferences = {} preferences_path = os.path.join(installation.root_dir, path) with open(preferences_path) as preferences_file: new_preferences = json.load(preferences_file) def update_preferences(old_preferences, new_preferences, db_preferences): for item in new_preferences.keys(): if item not in db_preferences: add_preference(db, item, new_preferences[item]) elif db_preferences[item] != new_preferences[item]: type_changed = False if db_preferences[item]["type"] != new_preferences[item]["type"]: # If the type has changed, we really have to update it; code # will depend on it having the right type. update = True type_changed = True elif item in old_preferences \ and db_preferences[item] == old_preferences[item]: # The preference in the database is identical to what we # originally installed; there should be no harm in updating # it. update = True elif db_preferences[item]["default"] == new_preferences[item]["default"]: # The default value is the same => only description or flags # has changed. Probably safe to silently update. update = True else: if item in old_preferences \ and db_preferences[item]["default"] != old_preferences[item]["default"]: # The default value appears to have been changed in the # database. Ask the user before overwriting it with an # updated default value. print print textwrap.fill( "The default value for the preference '%s' has been " "changed in this version of Critic, but it appears to " "also have been modified in the database." % item) default = False else: # The default value has changed, and we don't know if # the value is what was originally installed, because we # don't know what was originally installed. Ask the # user before overwriting the current value. print print textwrap.fill( "The default value for the preference '%s' has been " "changed in this version of Critic." % item) default = True print print " Value in database: %r" % db_preferences[item]["default"] print " New/updated value: %r" % new_preferences[item]["default"] print update = installation.input.yes_or_no( "Would you like to update the database with the new value?", default=default) if update: update_preference(db, item, new_preferences[item], type_changed) # Only check for preferences to remove if the preference data has # changed. Otherwise, every upgrade would ask to remove any extra # preferences in the database. if old_file_sha1 != new_file_sha1: for item in db_preferences.keys(): if item not in new_preferences: if item in old_preferences \ and db_preferences[item] == old_preferences[item]: # The preference in the database is identical to what we # originally installed; there should be no harm in # updating it. remove = True else: print print textwrap.fill( "The preference '%s' exists in the database but " "not in the installation data, meaning it would " "not have been added to the database if this " "version of Critic was installed from scratch." % item) print remove = installation.input.yes_or_no( "Would you like to remove it from the database?", default=True) if remove: remove_preference(db, item) db.commit() import dbutils with installation.utils.as_critic_system_user(): with dbutils.Database() as db: update_preferences(old_preferences, new_preferences, load_preferences(db)) return True ================================================ FILE: installation/prereqs.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import os.path import re import subprocess import installation this_module = sys.modules[__name__] def find_executable(name): for search_path in os.environ["PATH"].split(":"): path = os.path.join(search_path, name) if os.path.isfile(path) and os.access(path, os.X_OK): return path return None headless = False aptget = None aptget_approved = False aptget_updated = False need_blankline = False installed_packages = [] def blankline(): global need_blankline if need_blankline: print need_blankline = False def install_packages(*packages): global aptget, aptget_approved, aptget_updated, need_blankline, all_ok if aptget is None: aptget = find_executable("apt-get") if aptget and not aptget_approved: all_ok = False print """\ Found 'apt-get' executable in your $PATH. This script can attempt to install missing software using it. """ aptget_approved = installation.input.yes_or_no( prompt="Do you want to use 'apt-get' to install missing packages?", default=True) if not aptget_approved: aptget = False if aptget: aptget_env = os.environ.copy() if headless: aptget_env["DEBIAN_FRONTEND"] = "noninteractive" if not aptget_updated: subprocess.check_output( [aptget, "-qq", "update"], env=aptget_env) aptget_updated = True aptget_output = subprocess.check_output( [aptget, "-qq", "-y", "install"] + list(packages), env=aptget_env) installed = {} for line in aptget_output.splitlines(): match = re.match(r"^Setting up ([^ ]+) \(([^)]+)\) \.\.\.", line) if match: package_name, version = match.groups() if package_name in packages: need_blankline = True installed_packages.append((package_name, version)) installed[package_name] = version print "Installed: %s (%s)" % (package_name, version) return installed else: return False class Prerequisite(object): def __init__(self, name, packages, message): self.name = name self.packages = packages self.message = message setattr(this_module, name, self) def install(self): if self.check(): return True if self.packages is None: print "ERROR: Installing '%s' is not supported!" % self.name return False if aptget_approved and install_packages(*self.packages): if self.check(): return True blankline() print self.message print if not aptget_approved: install_packages(*self.packages) if self.check(): return True print "ERROR: Installing '%s' failed!" % self.name return False class Executable(Prerequisite): def __init__(self, name, packages, message): super(Executable, self).__init__(name, packages, message) self.path = None def check(self): if not self.path: self.path = find_executable(self.name) return bool(self.path) def install(self): if self.check(): return True blankline() print "No '%s' executable found in $PATH" % self.name print return super(Executable, self).install() class PythonLibrary(Prerequisite): def __init__(self, name, packages, message): super(PythonLibrary, self).__init__(name, packages, message) self.available = False def check(self): if not self.available: try: subprocess.check_output( [sys.executable, "-c", "import " + self.name], stderr=subprocess.STDOUT) self.available = True except subprocess.CalledProcessError: pass return self.available def install(self): if self.check(): return True blankline() print "Failed to import '%s'" % self.name print return super(PythonLibrary, self).install() class CustomCheck(Prerequisite): """Perform a custom check, and otherwise install packages""" def __init__(self, callback, name, packages, message): super(CustomCheck, self).__init__(name, packages, message) self.callback = callback self.available = False def check(self): if not self.available: if self.callback(): self.available = True return self.available def install(self): if self.check(): return True return super(CustomCheck, self).install() # This one is hardcoded to the running interpreter (rather than what we might # find in the search path.) Executable("python", None, None).path = sys.executable prerequisites = [ # We won't bother trying to install this; it won't be missing. Executable("tar", None, None), Executable("git", ["git-core"], """\ Make sure the Git version control system is installed. Is Debian/Ubuntu the package you need to install is 'git-core' (or 'git' in newer versions, but 'git-core' typically still works.) The source code can be downloaded here: https://github.com/git/git"""), Executable("psql", ["postgresql", "postgresql-client"], """\ Make sure the PostgreSQL database server and its client utilities are installed. In Debian/Ubuntu, the packages you need to install are 'postgresql' and 'postgresql-client'."""), PythonLibrary("psycopg2", ["python-psycopg2"], """\ Failed to import the 'psycopg2' module, which is used to access the PostgreSQL database from Python. In Debian/Ubuntu, the module is provided by the 'python-psycopg2' package. The source code can be downloaded here: http://www.initd.org/psycopg/download/"""), PythonLibrary("requests", ["python-requests"], """\ Failed to import the 'requests' module, which is used to perform URL requests. In Debian/Ubuntu, the module is provided by the 'python-requests' package. The source code can be downloaded here: https://github.com/kennethreitz/requests"""), PythonLibrary("pygments", ["python-pygments"], """\ Failed to import the 'pygments' module, which is used for syntax highlighting. In Debian/Ubuntu, the module is provided by the 'python-pygments' package. The source code can be downloaded here: http://pygments.org/download/"""), ] # The passlib library is only needed if Critic is configured to do # authentication, so doesn't go into the list above yet. passlib_library = PythonLibrary("passlib", ["python-passlib"], """\ Failed to import the 'passlib' module, which is required when Critic is configured to handle user authentication itself. In Debian/Ubuntu, the module is provided by the 'python-passlib' package. The source code can be downloaded here: https://pypi.python.org/pypi/passlib""") def check_mod_wsgi(): return os.path.isfile("/etc/apache2/mods-available/wsgi.load") # Things to check and install if Apache is to be used. apache_prerequisites = [ Executable("apache2ctl", ["apache2", "libapache2-mod-wsgi"], """\ Make sure the Apache web server is installed. In Debian/Ubuntu, the package you need to install is 'apache2'. In addition, the mod_wsgi Apache module needs to be installed. In Debian/Ubuntu, the package you need to install is 'libapache2-mod-wsgi'."""), # Additional executables that we use but that should have been installed # along with apache2ctl. Executable("a2enmod", None, None), Executable("a2ensite", None, None), Executable("a2dismod", None, None), Executable("a2dissite", None, None), # This extra check is really only needed if Apache was already installed # (and thus not installed by the prerequisite above). CustomCheck(check_mod_wsgi, "mod_wsgi", ["libapache2-mod-wsgi"], """\ The WSGI Apache module (mod_wsgi) doesn't appear to be installed. Make sure it's installed. In Debian/Ubuntu, the package you need to install is 'libapache2-mod-wsgi'. The source code can be downloaded here: http://code.google.com/p/modwsgi/wiki/DownloadTheSoftware?tm=2"""), ] # Things to check and install if nginx is to be used. nginx_prerequisites = [ Executable("nginx", ["nginx"], """\ Make sure the nginx web server is installed. In Debian/Ubuntu, the package you need to install is 'nginx'."""), ] # Things to check and install if uWSGI is to be used. uwsgi_prerequisites = [ Executable("uwsgi", ["uwsgi", "uwsgi-plugin-python"], """\ Make sure the uWSGI application container server is installed. In Debian/Ubuntu, the package you need to install is 'uwsgi'. In addition, the uWSGI Python plugin needs to be installed. In Debian/Ubuntu, the package you need to install is 'uwsgi-plugin-python'."""), # This extra check is really only needed if uWSGI was already installed (and # thus not installed by the prerequisite above). Executable("uwsgi_python", ["uwsgi-plugin-python"], """\ The uWSGI Python plugin doesn't appear to be installed. Make sure it's installed. In Debian/Ubuntu, the package you need to install is 'uwsgi-plugin-python'."""), ] def resolve_prerequisites(): if installation.config.auth_mode == "critic": prerequisites.append(passlib_library) if installation.config.web_server_integration == "apache": prerequisites.extend(apache_prerequisites) if "nginx" in installation.config.web_server_integration: prerequisites.extend(nginx_prerequisites) if "uwsgi" in installation.config.web_server_integration: prerequisites.extend(uwsgi_prerequisites) def prepare(mode, arguments, data): global headless headless = arguments.headless return True def install(data): resolve_prerequisites() print """ Critic Installation: Prerequisites ================================== """ if not all(prerequisite.install() for prerequisite in prerequisites): return False if installed_packages: blankline() print "Installed %d packages." % len(installed_packages) print else: print "All prerequisites available." data["installation.prereqs.python"] = python.path data["installation.prereqs.git"] = git.path data["installation.prereqs.tar"] = tar.path return True def upgrade(arguments, data): import configuration python.path = configuration.executables.PYTHON git.path = configuration.executables.GIT tar.path = configuration.executables.TAR resolve_prerequisites() if not all(prerequisite.install() for prerequisite in prerequisites): return False data["installation.prereqs.python"] = python.path data["installation.prereqs.git"] = git.path data["installation.prereqs.tar"] = tar.path return True ================================================ FILE: installation/process.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import subprocess def check_input(args, stdin, **kwargs): assert isinstance(stdin, str) child = subprocess.Popen(args, stdin=subprocess.PIPE, **kwargs) stdout, stderr = child.communicate(stdin) if child.returncode != 0: raise subprocess.CalledProcessError(child.returncode, args, None) return stdout ================================================ FILE: installation/qs/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. __doc__ = "Quick-start utilities." import sqlite import data ================================================ FILE: installation/qs/data.py ================================================ import sys import os import pwd import grp import subprocess import json import multiprocessing import installation def config(key): return "installation.config." + key def system(key): return "installation.system." + key def admin(key): return "installation.admin." + key def database(key): return "installation.database." + key def prereqs(key): return "installation.prereqs." + key def paths(key): return "installation.paths." + key def smtp(key): return "installation.smtp." + key def extensions(key): return "installation.extensions." + key def username(): return pwd.getpwuid(os.getuid()).pw_name def groupname(): return grp.getgrgid(pwd.getpwuid(os.getuid()).pw_gid).gr_name def which(name): return subprocess.check_output("which " + name, shell=True).strip() def generate(arguments, database_path): data = { config("password_hash_schemes"): [installation.config.default_password_hash_scheme], config("default_password_hash_scheme"): installation.config.default_password_hash_scheme, config("minimum_password_hash_time"): installation.config.minimum_password_hash_time, config("minimum_rounds"): { installation.config.default_password_hash_scheme: 100 }, config("auth_database"): "internal", system("username"): username(), system("email"): username() + "@localhost", system("groupname"): groupname(), admin("username"): username(), admin("email"): username() + "@localhost", admin("fullname"): username(), system("hostname"): "localhost", system("recipients"): arguments.system_recipient or [username() + "@localhost"], config("auth_mode"): "critic", config("session_type"): "cookie", config("allow_anonymous_user"): True, config("allow_user_registration"): True, config("verify_email_addresses"): arguments.testing, config("access_scheme"): "http", config("enable_access_tokens"): True, config("repository_url_types"): ["http"], config("default_encodings"): ["utf-8", "latin-1"], database("driver"): "sqlite", database("parameters"): { "database": database_path }, config("is_development"): True, config("coverage_dir"): None, prereqs("python"): sys.executable, prereqs("git"): which("git"), prereqs("tar"): which("tar"), paths("etc_dir"): installation.paths.etc_dir, paths("install_dir"): installation.paths.install_dir, paths("data_dir"): installation.paths.data_dir, paths("cache_dir"): installation.paths.cache_dir, paths("log_dir"): installation.paths.log_dir, paths("run_dir"): installation.paths.run_dir, paths("git_dir"): installation.paths.git_dir, smtp("host"): arguments.smtp_host, smtp("port"): arguments.smtp_port, smtp("username"): json.dumps(arguments.smtp_username), smtp("password"): json.dumps(arguments.smtp_password), smtp("use_ssl"): False, smtp("use_starttls"): False, config("is_quickstart"): True, config("is_testing"): arguments.testing, config("ldap_url"): "", config("ldap_search_base"): "", config("ldap_create_user"): False, config("ldap_username_attribute"): "", config("ldap_fullname_attribute"): "", config("ldap_email_attribute"): "", config("ldap_cache_max_age"): 600, extensions("enabled"): False, extensions("critic_v8_jsshell"): "NOT_INSTALLED", extensions("default_flavor"): "js/v8", config("highlight.max_workers"): multiprocessing.cpu_count(), # Setting changeset.max_workers to 1 is a workaround for some race # conditions causing duplicate rows in (at least) the files table. config("changeset.max_workers"): 1, config("archive_review_branches"): True, config("web_server_integration"): "none" } def provider(name): prefix = "provider_%s." % name return { config(prefix + "enabled"): False, config(prefix + "allow_user_registration"): False, config(prefix + "verify_email_addresses"): False, config(prefix + "client_id"): None, config(prefix + "client_secret"): None, config(prefix + "bypass_createuser"): False, config(prefix + "redirect_uri"): None } data.update(provider("github")) data.update(provider("google")) return data ================================================ FILE: installation/qs/sqlite.py ================================================ import sqlite3 import os import re import datetime import installation IntegrityError = sqlite3.IntegrityError ProgrammingError = sqlite3.ProgrammingError OperationalError = sqlite3.OperationalError def convert_date(value): try: return datetime.datetime.fromtimestamp(int(value)) except ValueError: return datetime.datetime.strptime(value, "%Y-%m-%d") def convert_datetime(value): try: return datetime.datetime.fromtimestamp(int(value)) except ValueError: return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S") def convert_interval(value): try: return datetime.timedelta(seconds=int(value)) except ValueError: return 0 def convert_boolean(value): return bool(int(value)) sqlite3.register_converter("DATE", convert_date) sqlite3.register_converter("TIMESTAMP", convert_datetime) sqlite3.register_converter("INTERVAL", convert_interval) sqlite3.register_converter("BOOLEAN", convert_boolean) def sqltokens(command): return re.findall(r"""\$\d+|!=|<>|<=|>=|'(?:''|[^'])*'|"(?:[^"])*"|\w+|[^\s]""", command) def sqlcommands(filename): path = os.path.join(installation.root_dir, filename) script = [] with open(path) as script_file: for line in script_file: fragment, _, comment = line.strip().partition("--") fragment = fragment.strip() if fragment: script.append(fragment) script = " ".join(script) return filter(None, map(str.strip, script.split(";"))) def replace(query, old, new): tokens = query if isinstance(query, list) else sqltokens(query) old = sqltokens(old) new = sqltokens(new) start = 0 try: while True: for anchor_offset, anchor_token in enumerate(old): if anchor_token[0] != "$": break offset = map(str.upper, tokens).index(old[anchor_offset].upper(), start) - anchor_offset data = {} for index in range(len(old)): if old[index][0] == "$": data[old[index]] = tokens[offset + index] elif tokens[offset + index].upper() != old[index].upper(): start = offset + 1 break else: if data: use_new = map(lambda token: data.get(token, token), new) else: use_new = new tokens[offset:offset + len(old)] = use_new start = offset + len(use_new) except (IndexError, ValueError): return " ".join(tokens) class Cursor(object): def __init__(self, connection): self.cursor = connection.cursor() def massage(self, query, parameters): self.flags = set() while "=ANY (%s)" in query: for index, parameter in enumerate(parameters): if isinstance(parameter, (list, set, tuple)): query = query.replace("=ANY (%s)", " IN (%s)" % ", ".join(["?"] * len(parameter)), 1) parameters[index:index + 1] = parameter break else: assert False, "Failed to translate all occurrences of '=ANY (%s)' in query!" if query.endswith(" RETURNING id"): self.flags.add("returning_id") query = query[:-len(" RETURNING id")] tokens = sqltokens(query.replace("%s", "?")) replace( tokens, "EXTRACT ('epoch' FROM NOW() - $1)", "strftime('%s', 'now') - $1") replace( tokens, "EXTRACT ('epoch' FROM (MIN($1) - NOW()))", "MIN($1) - strftime('%s', 'now')") replace(tokens, "NOW()", "cast(strftime('%s', 'now') as integer)") replace(tokens, "TRUE", "1") replace(tokens, "FALSE", "0") replace(tokens, "'1 day'", str(24 * 60 * 60)) replace(tokens, "next::text", "datetime(next, 'unixepoch')") replace(tokens, "commit", '"commit"') replace(tokens, "transaction", '"transaction"') replace(tokens, "MD5($1)", "$1") replace(tokens, "FETCH FIRST ROW ONLY", "") replace(tokens, "ASC NULLS FIRST", "ASC") replace(tokens, "DESC NULLS LAST", "DESC") replace(tokens, "chaincomments(commentchains.id)", """(SELECT COUNT(*) FROM comments WHERE chain=commentchains.id AND state='current')""") replace(tokens, "chainunread(commentchains.id, ?)", """(SELECT COUNT(*) FROM commentstoread JOIN comments ON (comments.id=commentstoread.comment) WHERE comments.chain=commentchains.id AND comments.state='current' AND commentstoread.uid=?)""") replace(tokens, "character_length(", "length(") replace(tokens, "FOR UPDATE NOWAIT", "") replace(tokens, "FOR UPDATE", "") replace(tokens, "~", "regexp") replace(tokens, "INTERVAL ?", "interval_seconds(?)") return " ".join(tokens) def execute(self, query, parameters=()): parameters = list(parameters) query = self.massage(query, parameters) try: self.cursor.execute(query, parameters) except sqlite3.OperationalError as error: raise Exception("Invalid query: %r %r" % (error.message, query)) except sqlite3.InterfaceError as error: raise Exception("Invalid parameters: %r %r for %r" % (error.message, parameters, query)) if "returning_id" in self.flags: self.cursor.execute("SELECT last_insert_rowid()") def executemany(self, query, parameters=()): if "=ANY (%s)" in query: # We'll rewrite the query string depending on the parameters, so # must use execute() for each set of parameters. for values in parameters: self.execute(query, values) return parameters = [list(values) for values in parameters] query = self.massage(query, None) self.cursor.executemany(query, parameters) def fetchone(self): return self.cursor.fetchone() def fetchall(self): return self.cursor.fetchall() def __iter__(self): return iter(self.cursor) def regexp(pattern, string): return re.search(pattern, string) is not None def interval_seconds(string): match = re.match(r"(?:(-?\d+)\s+days?)?\s*" r"(?:(-?\d+)\s+hours?)?\s*" r"(?:(-?\d+)\s+minutes?)?\s*" r"(?:(-?\d+)\s+seconds?)?", string, re.I) days, hours, minutes, seconds = match.groups() result = 0 if days is not None: result += 86400 * int(days) if hours is not None: result += 3600 * int(hours) if minutes is not None: result += 60 * int(minutes) if seconds is not None: result += int(seconds) return result class Connection(object): def __init__(self, **parameters): self.connection = sqlite3.connect( detect_types=sqlite3.PARSE_DECLTYPES, **parameters) self.connection.create_function("regexp", 2, regexp) self.connection.create_function("interval_seconds", 1, interval_seconds) self.connection.text_factory = str # Foreign keys are disabled by default by SQLite; this enables them. # This is a safe-guard against incorrect inserts or updates, but most # importantly, it makes cascading deletes work, which we depend on. self.connection.execute("PRAGMA foreign_keys=ON") def cursor(self): return Cursor(self.connection) def commit(self): return self.connection.commit() def rollback(self): return self.connection.rollback() def close(self): return self.connection.close() def connect(**parameters): return Connection(**parameters) def import_schema(database_path, filenames, quiet=False, verbose=False): failed = False enumerations = {} commands = [] db = sqlite3.connect(database_path) for filename in filenames: commands.extend(sqlcommands(filename)) for command in commands: if command.startswith("SET "): # Skip SET; only used to control the output from psql. continue elif re.match(r"CREATE (?:UNIQUE )?INDEX \w+_(?:md5|gin)", command) \ or re.match(r"CREATE (?:UNIQUE )?INDEX .* WHERE ", command): # Fancy index stuff not supported by sqlite. Since they are # optional (sans performance requirements) we just skip them. continue elif command.startswith("CREATE TABLE ") \ or command.startswith("CREATE INDEX ") \ or command.startswith("CREATE UNIQUE INDEX ") \ or command.startswith("CREATE VIEW ") \ or command.startswith("INSERT INTO "): tokens = sqltokens(command) replace(tokens, "DEFAULT NOW()", "DEFAULT (cast(strftime('%s', 'now') as integer))") replace(tokens, "TRUE", "1") replace(tokens, "FALSE", "0") replace(tokens, "INTERVAL '0'", "0") replace(tokens, "SERIAL PRIMARY KEY", "INTEGER PRIMARY KEY") replace(tokens, "commit", '"commit"') replace(tokens, "transaction", '"transaction"') for name, values in enumerations.items(): replace(tokens, "$1 " + name, "$1 text check ($1 in (%s))" % ", ".join(values)) command = " ".join(tokens) elif re.match(r"CREATE TYPE \w+ AS ENUM", command): tokens = sqltokens(command) name = tokens[2] values = filter(lambda token: re.match("'.*'$", token), tokens[tokens.index("(") + 1:tokens.index(")")]) enumerations[name] = values continue elif command.startswith("ALTER TABLE "): # Used to add constraints after table creation, which sqlite doesn't # support. continue else: print "Unrecognized:", command failed = True if verbose: words = command.split() for word in words: if word.upper() != word: print word break print word, try: db.execute(command) except Exception as error: print "Failed:", command print " " + str(error) failed = True if not failed: db.commit() return not failed ================================================ FILE: installation/smtp.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import json import installation host = None port = None username = None password = None use_ssl = None use_starttls = None def add_arguments(mode, parser): if mode == "install": parser.add_argument("--smtp-host", help="SMTP server hostname (or IP)") parser.add_argument("--smtp-port", help="SMTP server port") parser.add_argument("--smtp-no-auth", action="store_true", help="no SMTP authentication required") parser.add_argument("--smtp-username", help="SMTP authentication username") parser.add_argument("--smtp-password", help="SMTP authentication password") # Using smtplib.SMTP_SSL() parser.add_argument("--smtp-ssl", dest="smtp_use_ssl", action="store_const", const=True, help="use SSL(/TLS) when connecting to SMTP server") parser.add_argument("--smtp-no-ssl-tls", dest="smtp_use_ssl", action="store_const", const=False, help="don't use SSL(/TLS) when connecting to SMTP server") # Using smtplib.SMTP() + starttls() parser.add_argument("--smtp-starttls", dest="smtp_use_starttls", action="store_const", const=True, help="use STARTTLS when connecting to SMTP server") parser.add_argument("--smtp-no-starttls", dest="smtp_use_starttls", action="store_const", const=False, help="don't use STARTTLS when connecting to SMTP server") parser.add_argument("--skip-testmail", action="store_true", help="do not send a test e-mail to verify that given SMTP settings actually work") parser.add_argument("--skip-testmail-check", action="store_true", help="do not ask whether the test e-mail arrived correctly") def prepare(mode, arguments, data): global host, port, username, password, use_ssl, use_starttls if mode == "install" or "installation.smtp.host" not in data: print """ Critic Installation: SMTP ========================= Critic needs an SMTP server to use for outgoing email traffic. Emails are sent to regular Critic users to notify about changes in reviews, as well as to the system administrator to alert about problems. """ host = "localhost" use_ssl = False use_starttls = False def valid_port(value): try: if not (0 < int(value) < 65536): raise ValueError except ValueError: return "must be a valid TCP port number" if mode == "install": if arguments.smtp_use_ssl and arguments.smtp_use_starttls: print "Invalid arguments: only one of --smtp-ssl and --smtp-starttls can be enabled." return False first = True else: # This case, an upgrade where installation.smtp.host is not recorded # in "data"; happens when upgrading from a pre-5f0389f commit to # 5f0389f or later. Since upgrade.py doesn't have --smtp-* command # line arguments, ignore "arguments" variable and go straight to # manual input. first = False while True: if first and arguments.smtp_use_ssl is not None: use_ssl = arguments.smtp_use_ssl else: use_ssl = installation.input.yes_or_no("Use SSL when connecting to the SMTP server?", default=use_ssl) if not use_ssl: if first and arguments.smtp_use_starttls is not None: use_starttls = arguments.smtp_use_starttls else: use_starttls = installation.input.yes_or_no("Use STARTTLS when connecting to the SMTP server?", default=use_starttls) if first and arguments.smtp_host: host = arguments.smtp_host else: host = installation.input.string("SMTP host:", default=host) if first and arguments.smtp_port: error = valid_port(arguments.smtp_port) if error: print "Invalid --smtp-port argument: %s." % error return False port = arguments.smtp_port else: if port is None: if use_ssl: port = "465" else: port = "25" port = installation.input.string("SMTP port:", default=port, check=valid_port) need_password = False if first and arguments.smtp_username: username = arguments.smtp_username need_password = True elif (not first or not arguments.smtp_no_auth) \ and installation.input.yes_or_no("Does the SMTP server require authentication?", default=username is not None): username = installation.input.string("SMTP username:", default=username) need_password = True if need_password: if first and arguments.smtp_password: password = arguments.smtp_password else: password = installation.input.password("SMTP password:", default=password, twice=False) print if (not first or not arguments.skip_testmail) \ and installation.input.yes_or_no("Do you want to send a test email to verify the SMTP configuration?", default=True if first else None): import smtplib import email.mime.text import email.header recipient = installation.input.string("To which email address?", default=installation.admin.email) failed = None try: try: if use_ssl: connection = smtplib.SMTP_SSL(host, port, timeout=5) else: connection = smtplib.SMTP(host, port, timeout=5) except: failed = "Couldn't connect to the SMTP server." raise if use_starttls: try: connection.starttls() except: failed = "Failed to start TLS." raise if username is not None: try: connection.login(username, password) except: failed = "Failed to login." raise message = email.mime.text.MIMEText("This is the configuration test email from Critic.", "plain", "us-ascii") message["From"] = email.header.Header("Critic System <%s>" % installation.system.email) message["To"] = email.header.Header(recipient) message["Subject"] = email.header.Header("Test email from Critic") try: connection.sendmail(installation.system.email, [recipient], message.as_string()) except: failed = "Failed to send the email." raise try: connection.quit() except: failed = "Failed to close connection." raise print print "Test email sent to %s." % recipient print except Exception as exception: if not failed: failed = str(exception) if failed: print """ Couldn't send the test email: %s Please check the configuration! """ % failed elif (first and arguments.skip_testmail_check) \ or installation.input.yes_or_no("Did the test email arrive correctly?") \ or not installation.input.yes_or_no("Do you want to modify the configuration?", default=True): break else: break first = False port = int(port) else: import configuration host = configuration.smtp.HOST port = configuration.smtp.PORT use_ssl = configuration.smtp.USE_SSL use_starttls = configuration.smtp.USE_STARTTLS credentials_path = os.path.join(configuration.paths.CONFIG_DIR, "configuration/smtp-credentials.json") try: with open(credentials_path) as credentials_file: credentials = json.load(credentials_file) username = credentials["username"] password = credentials["password"] except: username = getattr(configuration.smtp, "USERNAME") password = getattr(configuration.smtp, "PASSWORD") data["installation.smtp.host"] = host data["installation.smtp.port"] = port data["installation.smtp.username"] = json.dumps(username) data["installation.smtp.password"] = json.dumps(password) data["installation.smtp.use_ssl"] = use_ssl data["installation.smtp.use_starttls"] = use_starttls return True def finish(mode, arguments, data): del data["installation.smtp.username"] del data["installation.smtp.password"] ================================================ FILE: installation/system.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import pwd import grp import subprocess import argparse import installation hostname = None username = "critic" email = None uid = None groupname = "critic" gid = None create_system_user = None created_system_user = False create_system_group = None created_system_group = False def fetch_uid_gid(): global uid, gid uid = pwd.getpwnam(username).pw_uid gid = grp.getgrnam(groupname).gr_gid def add_arguments(mode, parser): if mode != "install": parser.add_argument("--system-recipient", action="append", dest="system_recipients", help=argparse.SUPPRESS) return parser.add_argument("--system-hostname", action="store", help="FQDN of the system") parser.add_argument("--system-username", action="store", help="name of system user to run as") parser.add_argument("--force-create-system-user", action="store_true", help=("don't prompt for permission to create a new " "system user if doesn't exist")) parser.add_argument("--system-email", action="store", help="address used as sender of emails") parser.add_argument("--system-groupname", action="store", help="name of system group to run as") parser.add_argument("--force-create-system-group", action="store_true", help=("don't prompt for permission to create a new " "system group if it doesn't exist")) parser.add_argument("--system-recipient", action="append", dest="system_recipients", metavar="SYSTEM_RECIPIENT", help=("email recipient of automatic messages from " "the system")) def prepare(mode, arguments, data): global hostname, username, email, create_system_user global groupname, create_system_group global uid, gid if mode == "install": print """ Critic Installation: System =========================== """ if arguments.system_hostname: hostname = arguments.system_hostname else: try: hostname = subprocess.check_output(["hostname", "--fqdn"]).strip() except: pass hostname = installation.input.string(prompt="What is the machine's FQDN?", default=hostname) while True: if arguments.system_username: username = arguments.system_username else: username = installation.input.string(prompt="What system user should Critic run as?", default=username) try: pwd.getpwnam(username) user_exists = True except: user_exists = False if user_exists: print """ The system user '%s' already exists. """ % username if installation.input.yes_or_no(prompt="Use the existing system user '%s'?" % username, default=True): create_system_user = False break else: print """ The system user '%s' doesn't exists. """ % username if arguments.force_create_system_user or installation.input.yes_or_no(prompt="Create a system user named '%s'?" % username, default=True): create_system_user = True break while True: if arguments.system_groupname: groupname = arguments.system_groupname else: groupname = installation.input.string(prompt="What system group should Critic run as?", default=groupname) try: grp.getgrnam(groupname) group_exists = True except: group_exists = False if group_exists: print """ The system group '%s' already exists. """ % groupname if installation.input.yes_or_no(prompt="Use the existing system group '%s'?" % groupname, default=True): create_system_group = False break else: print """ The system group '%s' doesn't exists. """ % groupname if arguments.force_create_system_group or installation.input.yes_or_no(prompt="Create a system group named '%s'?" % groupname, default=True): create_system_group = True break if arguments.system_email: email = arguments.system_email else: email = installation.input.string(prompt="What address should be used as the sender of emails from the system?", default=("%s@%s" % (username, hostname))) else: import configuration hostname = configuration.base.HOSTNAME username = configuration.base.SYSTEM_USER_NAME email = configuration.base.SYSTEM_USER_EMAIL try: groupname = configuration.base.SYSTEM_GROUP_NAME except AttributeError: groupname = data["installation.system.groupname"] fetch_uid_gid() data["installation.system.hostname"] = hostname data["installation.system.username"] = username data["installation.system.email"] = email data["installation.system.groupname"] = groupname return True def install(data): global uid, gid if create_system_group: print "Creating group '%s' ..." % groupname subprocess.check_call(["addgroup", "--quiet", "--system", groupname]) if create_system_user: print "Creating user '%s' ..." % username subprocess.check_call( ["adduser", "--quiet", "--system", "--ingroup=%s" % groupname, "--home=%s" % installation.paths.data_dir, "--disabled-login", username]) uid = pwd.getpwnam(username).pw_uid gid = grp.getgrnam(groupname).gr_gid return True def undo(): if created_system_user: print "Deleting user '%s' ..." % username subprocess.check_call(["deluser", "--system", username]) if created_system_group: print "Deleting group '%s' ..." % groupname subprocess.check_call(["delgroup", "--system", groupname]) ================================================ FILE: installation/templates/apache/site.both ================================================ WSGIApplicationGroup %%{GLOBAL} WSGIProcessGroup critic-main WSGIDaemonProcess critic-main processes=2 \ threads=25 \ home=%(installation.paths.install_dir)s \ python-path=%(installation.paths.etc_dir)s/main:%(installation.paths.install_dir)s \ user=%(installation.system.username)s \ group=%(installation.system.groupname)s ServerAdmin %(installation.admin.email)s ServerName %(installation.system.hostname)s WSGIImportScript %(installation.paths.install_dir)s/wsgistartup.py \ process-group=critic-main \ application-group=%%{GLOBAL} WSGIScriptAlias / %(installation.paths.install_dir)s/wsgi.py WSGIPassAuthorization %(installation.apache.pass_auth)s # Possible values include: debug, info, notice, warn, error, crit, # alert, emerg. LogLevel warn ErrorLog %(installation.paths.log_dir)s/main/error.log CustomLog %(installation.paths.log_dir)s/main/access.log combined Alias /static-resource/ "%(installation.paths.install_dir)s/resources/" Options FollowSymLinks AllowOverride None Order allow,deny Allow from all ExpiresActive On ExpiresDefault A2592000 ServerAdmin %(installation.admin.email)s ServerName %(installation.system.hostname)s # Uncomment these lines, and create a suitable ssl.conf file, to # actually enable SSL support. #SSLEngine on #Include /etc/apache2/ssl/ssl.conf WSGIImportScript %(installation.paths.install_dir)s/wsgistartup.py \ process-group=critic-main \ application-group=%%{GLOBAL} WSGIScriptAlias / %(installation.paths.install_dir)s/wsgi.py WSGIPassAuthorization %(installation.apache.pass_auth)s # Possible values include: debug, info, notice, warn, error, crit, # alert, emerg. LogLevel warn ErrorLog %(installation.paths.log_dir)s/main/error.log CustomLog %(installation.paths.log_dir)s/main/access.log combined Alias /static-resource/ "%(installation.paths.install_dir)s/resources/" Options FollowSymLinks AllowOverride None Order allow,deny Allow from all ExpiresActive On ExpiresDefault A2592000 ================================================ FILE: installation/templates/apache/site.http ================================================ ServerAdmin %(installation.admin.email)s ServerName %(installation.system.hostname)s WSGIApplicationGroup %%{GLOBAL} WSGIProcessGroup critic-main WSGIDaemonProcess critic-main processes=2 \ threads=25 \ home=%(installation.paths.install_dir)s \ python-path=%(installation.paths.etc_dir)s/main:%(installation.paths.install_dir)s \ user=%(installation.system.username)s \ group=%(installation.system.groupname)s WSGIImportScript %(installation.paths.install_dir)s/wsgistartup.py \ process-group=critic-main \ application-group=%%{GLOBAL} WSGIScriptAlias / %(installation.paths.install_dir)s/wsgi.py WSGIPassAuthorization %(installation.apache.pass_auth)s # Possible values include: debug, info, notice, warn, error, crit, # alert, emerg. LogLevel warn ErrorLog %(installation.paths.log_dir)s/main/error.log CustomLog %(installation.paths.log_dir)s/main/access.log combined Alias /static-resource/ "%(installation.paths.install_dir)s/resources/" Options FollowSymLinks AllowOverride None Order allow,deny Allow from all ExpiresActive On ExpiresDefault A2592000 ================================================ FILE: installation/templates/apache/site.https ================================================ ServerAdmin %(installation.admin.email)s ServerName %(installation.system.hostname)s RewriteEngine on RewriteCond %%{HTTP_HOST} (.*) RewriteRule ^(.*) https://%%1$1 [L,R,NE] ServerAdmin %(installation.admin.email)s ServerName %(installation.system.hostname)s # Uncomment these lines, and create a suitable ssl.conf file, to # actually enable SSL support. #SSLEngine on #Include /etc/apache2/ssl/ssl.conf WSGIApplicationGroup %%{GLOBAL} WSGIProcessGroup critic-main WSGIDaemonProcess critic-main processes=2 \ threads=25 \ home=%(installation.paths.install_dir)s \ python-path=%(installation.paths.etc_dir)s/main:%(installation.paths.install_dir)s \ user=%(installation.system.username)s \ group=%(installation.system.groupname)s WSGIImportScript %(installation.paths.install_dir)s/wsgistartup.py \ process-group=critic-main \ application-group=%%{GLOBAL} WSGIScriptAlias / %(installation.paths.install_dir)s/wsgi.py WSGIPassAuthorization %(installation.apache.pass_auth)s # Possible values include: debug, info, notice, warn, error, crit, # alert, emerg. LogLevel warn ErrorLog %(installation.paths.log_dir)s/main/error.log CustomLog %(installation.paths.log_dir)s/main/access.log combined Alias /static-resource/ "%(installation.paths.install_dir)s/resources/" Options FollowSymLinks AllowOverride None Order allow,deny Allow from all ExpiresActive On ExpiresDefault A2592000 ================================================ FILE: installation/templates/configuration/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import base import auth import paths import services import mimetypes import executables import limits import extensions import database import smtp import debug ================================================ FILE: installation/templates/configuration/auth.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # Accepted password hash schemes. They need to be supported by the passlib # Python package; see http://packages.python.org/passlib for details. PASSWORD_HASH_SCHEMES = %(installation.config.password_hash_schemes)r # Default password hash scheme. Must be included in PASSWORD_HASH_SCHEMES. DEFAULT_PASSWORD_HASH_SCHEME = %(installation.config.default_password_hash_scheme)r # (Approximate) minimum password hash time in seconds. Higher means safer # passwords (more difficult to decrypt using brute-force) but slower sign-in # operation. MINIMUM_PASSWORD_HASH_TIME = %(installation.config.minimum_password_hash_time)r # Calibrated minimum rounds per password hash scheme. MINIMUM_ROUNDS = %(installation.config.minimum_rounds)r # External authentication providers. PROVIDERS = { # GitHub OAuth-based authentication. "github": { "enabled": %(installation.config.provider_github.enabled)r, # Allow authenticated user to create a Critic user. "allow_user_registration": %(installation.config.provider_github.allow_user_registration)r, # Verify user email addresses provided by GitHub. "verify_email_addresses": %(installation.config.provider_github.verify_email_addresses)r, # Client ID and secret. These are generated by registering an # application at https://github.com/settings/applications/new. "client_id": %(installation.config.provider_github.client_id)r, "client_secret": %(installation.config.provider_github.client_secret)r, # Bypass /createuser on first sign in, creating a user automatically. "bypass_createuser": %(installation.config.provider_github.bypass_createuser)r, # Authentication callback URI. This same URI must be provided # to GitHub when registering the application. The path # component must be "/oauth/github". "redirect_uri": %(installation.config.provider_github.redirect_uri)r }, # Google OAuth-based authentication. "google": { "enabled": %(installation.config.provider_google.enabled)r, # Allow authenticated user to create a Critic user. "allow_user_registration": %(installation.config.provider_google.allow_user_registration)r, # Verify user email addresses provided by Google. "verify_email_addresses": %(installation.config.provider_google.verify_email_addresses)r, # Client ID and secret. These are generated by creating a project at # https://cloud.google.com/console/project, and then creating an OAuth2 # client id using the project administration UI. "client_id": %(installation.config.provider_google.client_id)r, "client_secret": %(installation.config.provider_google.client_secret)r, # Bypass /createuser on first sign in, creating a user automatically. "bypass_createuser": %(installation.config.provider_google.bypass_createuser)r, # Authentication callback URI. This same URI must be provided # to Google when creating the OAuth2 client id. The path # component must be "/oauth/google". "redirect_uri": %(installation.config.provider_google.redirect_uri)r }, } # Authentication databases. DATABASES = { # Using Critic's own user database for authentication. "internal": {}, # Using an LDAP database for authentication. "ldap": { # Input fields. # # Each element is a tuple containing: # [0]: True if the field should use # [1]: Internal field identifier # [2]: Field label # [3]: (Optional) Longer description / help text "fields": [ (False, "username", "Username:"), (True, "password", "Password:"), ], # LDAP server URL. "url": "%(installation.config.ldap_url)s", # Use TLS when connecting to LDAP server. "use_tls": True, # Credentials field. # # Identifier of the field whose value will be used as the credentials # (e.g. password) in the bind request used for authentication. "credentials": "password", # The following two values are all interpreted as Python format strings # that can reference field values, e.g. using "%%(username)s". The input # values will have been escaped for safe usage in LDAP expressions. # LDAP search base. "search_base": "%(installation.config.ldap_search_base)s", # LDAP search filter. "search_filter": "(uid=%%(username)s)", # The following settings control if and how Critic user records are # created after successful authentication of a user. # If true, Critic user records are created automatically if # authentication succeeds but a matching record is not found. "create_user": %(installation.config.ldap_create_user)r, # User name LDAP attribute. # # This is the LDAP attribute whose value is used as the Critic username, # both when looking for an existing user record and when creating a new # one (if one isn't found.) # # If the attribute is missing or empty it will be considered an # authentication error. "username_attribute": "%(installation.config.ldap_username_attribute)s", # Full name LDAP attribute. # # This is the LDAP attribute to use as the (initial) full name when # creating a new Critic user record. It is not used if an existing user # record is found. # # If the attribute is missing or empty, the user is created with the # username as full name. "fullname_attribute": "%(installation.config.ldap_fullname_attribute)s", # Email LDAP attribute. # # This is the LDAP attribute to use as the (initial) primary email # address when creating a new Critic user record. It is not used if an # existing user record is found. # # If the attribute is missing or empty, the user is created with no # primary email address. "email_attribute": "%(installation.config.ldap_email_attribute)s", # List of required LDAP groups. # # If the list is empty, no group membership is required. "require_groups": [ # { # # Distinguished name of the required group. # "dn": "cn=SomeGroup,ou=Groups,dc=example,dc=com", # # # Group attribute containing the list of members. # "members_attribute": "memberUid", # # # Value to search for in the list of members. # # # # The value is interpreted as a Python format string, and can # # reference field values. It can also reference the # # distinguished name of the user signing in as "%%(dn)s". # "member_value": "%%(username)s", # }, ], # Maximum age of cached successful authentication attempts, in seconds. # If set to zero, caching is disabled altogether. "cache_max_age": %(installation.config.ldap_cache_max_age)r, }, } DATABASE = %(installation.config.auth_database)r ENABLE_ACCESS_TOKENS = %(installation.config.enable_access_tokens)r ================================================ FILE: installation/templates/configuration/base.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # The name of the system identity which this configuration applies to. SYSTEM_IDENTITY = "main" # The name of the system user that Critic runs as. SYSTEM_USER_NAME = "%(installation.system.username)s" # The email address to use in the "Sender:" header in all generated # emails, and in the "From:" header in emails unless there's a real # user whose email address it makes sense to use instead. SYSTEM_USER_EMAIL = "%(installation.system.email)s" # The name of the system group that Critic runs as. SYSTEM_GROUP_NAME = "%(installation.system.groupname)s" # List of recipients of system messages such as automatic error # reports generated when unexpected errors occur. SYSTEM_RECIPIENTS = %(installation.system.recipients)r # The primary FQDN of the server. This is used when generating message IDs for # emails, and should *not* be different in different system identities, since # then email threading will not work properly. HOSTNAME = "%(installation.system.hostname)s" # The way Critic identifies/authenticates users: "host", "critic" or the name of # one of the supported external authentication providers. AUTHENTICATION_MODE = "%(installation.config.auth_mode)s" # If AUTHENTICATION_MODE="critic", type of session: "httpauth" or "cookie". If # AUTHENTICATION_MODE=, this must be "cookie". SESSION_TYPE = "%(installation.config.session_type)s" # If AUTHENTICATION_MODE!="host" and SESSION_TYPE="cookie", maximum age of # session in seconds. Zero means no maximum age; session is valid until user # logs out. SESSION_MAX_AGE = 0 # Allow (restricted) anonymous access to the system. Only supported if # AUTHENTICATION_MODE!="host" and SESSION_TYPE="cookie". ALLOW_ANONYMOUS_USER = %(installation.config.allow_anonymous_user)r # Web server (HTTPD) integration. # # Supported alternatives: # # "apache" => Apache (2.2 or 2.4) with mod_wsgi. # "nginx+uwsgi" => nginx with uWSGI as WSGI back-end. # "uwsgi" => uWSGI as both HTTP front-end and WSGI back-end. # "none" => no supported integration (manual by system administrator). WEB_SERVER_INTEGRATION = "%(installation.config.web_server_integration)s" # Access scheme: "http", "https" or "both". ACCESS_SCHEME = "%(installation.config.access_scheme)s" # Supported repository URL types (when displayed in UI and in emails): # # "git" => "git://hostname/path.git" # "http" => "http://hostname/path.git" or "https://hostname/path.git" # "ssh" => "ssh://hostname/path.git" # "host" => "hostname:/path.git" # # where 'hostname' is the system's FQDN and 'path.git' is the repository's path # relative configuration.paths.GIT_DIR. # # The 'http' choice means HTTP or HTTPS depending on the ACCESS_SCHEME setting # and whether the user is anonymous or not. # # Note: Only 'http' is currently supported natively by Critic. For 'git' to # work, the system administrator must configure 'git daemon' to run manually. # For 'ssh' and 'host' to work (they mean the same thing, only with different # syntax) system user accounts must be created, and SSH access provided. See # the system administration tutorial for more information. REPOSITORY_URL_TYPES = %(installation.config.repository_url_types)r # Default encodings to attempt to decode text (such as source code) # as, in order of decreasing precedence. The encoding names should be # valid for use as the encoding argument to Python's str.decode() # function. DEFAULT_ENCODINGS = %(installation.config.default_encodings)r # Allow (unattended) user registration. If False, user registration can still # be enabled for a specific external user authentication provider; see auth.py. ALLOW_USER_REGISTRATION = %(installation.config.allow_user_registration)r # Regular expression (source) that user names provided by new users must match. # A None value is equivalent to a pattern that matches all strings. Empty user # names or user names containing only white-space characters are not allowed, # regardless this setting. # # Note that users created by the system administrator using criticctl are # not subjected to this restriction. USER_NAME_PATTERN = r"^\w[-\._\w]*\w$" # Description of above pattern, shown to the user if a provided user name fails # to match the pattern. USER_NAME_PATTERN_DESCRIPTION = ( "Must contain only alpha-numerics, periods, underscores or dashes, and " "must start and end with alpha-numerics, and be at least two characters " "long.") # Require verification of email addresses provided by users before sending # emails to them. This does not affect email addresses set by the system # administrator via the 'criticctl' utility or via the web interface. VERIFY_EMAIL_ADDRESSES = %(installation.config.verify_email_addresses)r # Archive review branches belonging to closed or dropped reviews a configurable # amount of time after the review was closed or dropped. Archiving a review # branch means deleting the review branch ref, but the commits on the branch are # kept alive for all eternity. An archived branch can be resurrected at any # time, and is resurrected automatically should the review be reopened. # # Archiving branches avoids an ever increasing number of refs in repositories # that over time leads to performance degradation. ARCHIVE_REVIEW_BRANCHES = %(installation.config.archive_review_branches)r ================================================ FILE: installation/templates/configuration/database.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # The database server: "postgresql" for a real install, "sqlite" for a quick- # start. DRIVER = %(installation.database.driver)r # Dictionary whose members are passed as keyword arguments to # psycopg2.connect(). PARAMETERS = %(installation.database.parameters)r ================================================ FILE: installation/templates/configuration/debug.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # True if this is a development installation of the system; False if it is a # production installation. Causes stack traces from unexpected exceptions to be # displayed to all users rather than only to those with the "developer" role. # Also changes the favicon and the color of the "Opera" text in page headers # from red to black. IS_DEVELOPMENT = %(installation.config.is_development)r # True if this is an installation by the automatic testing framework. IS_TESTING = %(installation.config.is_testing)r # True if this is an instance started using the installation/quickstart.py # script. IS_QUICKSTART = %(installation.config.is_quickstart)r # Directory to write code coverage results to. If None, code coverage is not # written, and more importantly, not measured in the first place. COVERAGE_DIR = %(installation.config.coverage_dir)r ================================================ FILE: installation/templates/configuration/executables.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # Python executable. PYTHON = "%(installation.prereqs.python)s" # Git executable. GIT = "%(installation.prereqs.git)s" # Add these to the environment when running Git commands GIT_ENV = { } # Tar executable. TAR = "%(installation.prereqs.tar)s" # JSShell executable (only needed for extensions support.) JSSHELL = None ================================================ FILE: installation/templates/configuration/extensions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration import os.path # Whether extension support is enabled. If False, the rest of the # configuration in this file is irrelevant. ENABLED = %(installation.extensions.enabled)s # Where to search for system extensions. SYSTEM_EXTENSIONS_DIR = os.path.join(configuration.paths.DATA_DIR, "extensions") # Name of directory under users' $HOME in which to search for user extensions. # If set to None, user extensions support is disabled. USER_EXTENSIONS_DIR = "CriticExtensions" FLAVORS = { "js/v8": { "executable": "%(installation.extensions.critic_v8_jsshell)s", "library": os.path.join(configuration.paths.INSTALL_DIR, "library", "js", "v8") } } DEFAULT_FLAVOR = "%(installation.extensions.default_flavor)s" # Directory into which extension version snapshots are installed. INSTALL_DIR = os.path.join(configuration.paths.DATA_DIR, "extension-snapshots") # Directory into which extension repository work copies are created. WORKCOPY_DIR = os.path.join(configuration.paths.DATA_DIR, "temporary", "EXTENSIONS") # Long timeout, in seconds. Used for extension "Page" roles. LONG_TIMEOUT = 300 # Short timeout, in seconds. Used for all other roles. SHORT_TIMEOUT = 5 ================================================ FILE: installation/templates/configuration/limits.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # When a file is added, and it has more lines than these limits, # output a "File was added." placeholder instead of the entire file, # with an option to fetch all the lines. The _RECOGNIZED option is # for a file in a recognized (and syntax highlighted) language, the # _UNRECOGNIZED option is for other files. MAXIMUM_ADDED_LINES_RECOGNIZED = 8000 MAXIMUM_ADDED_LINES_UNRECOGNIZED = 2000 # Reject any single ref update that causes more than this many new # commits to be added to the repository. The likely cause for hitting # this limit is pushing a branch to the wrong repository, which just # causes bloat in the receiving repository. PUSH_COMMIT_LIMIT = 10000 # For branches containing more commits than this, fall back to simpler # branch log rendering for performance reasons. MAXIMUM_REACHABLE_COMMITS = 4000 # Maximum number of commits when /createreview is loaded with the # 'branch' URI parameter to create a review of all commits on a branch. MAXIMUM_REVIEW_COMMITS = 2000 ================================================ FILE: installation/templates/configuration/mimetypes.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. MIMETYPES = { "txt": "text/plain", "html": "text/html", "xml": "application/xml", "xsl": "application/xml", "svg": "image/svg+xml", "css": "text/css", "js": "text/javascript", "json": "application/json", "png": "image/png", "gif": "image/gif", "jpg": "image/jpeg", "ico": "image/vnd.microsoft.icon", "bmp": "image/x-bmp" } ================================================ FILE: installation/templates/configuration/paths.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration import os.path # Directory where system configuration is stored. CONFIG_DIR = os.path.join("%(installation.paths.etc_dir)s", configuration.base.SYSTEM_IDENTITY) # Directory where the main system is installed. INSTALL_DIR = "%(installation.paths.install_dir)s" # Directory under which data files of a more permanent nature are # stored. DATA_DIR = "%(installation.paths.data_dir)s" # Directory under which cache files that can be discarded at any time # are stored. CACHE_DIR = os.path.join("%(installation.paths.cache_dir)s", configuration.base.SYSTEM_IDENTITY) # Directory in which log files are stored. LOG_DIR = os.path.join("%(installation.paths.log_dir)s", configuration.base.SYSTEM_IDENTITY) # Directory in which pid files are stored. RUN_DIR = os.path.join("%(installation.paths.run_dir)s", configuration.base.SYSTEM_IDENTITY) # Directory in which WSGI daemon process pid files are stored. WSGI_PIDFILE_DIR = os.path.join(RUN_DIR, "wsgi") # Directory in which Unix socket files are created. SOCKETS_DIR = os.path.join(RUN_DIR, "sockets") # Directory where the main (public) git repositories are stored. GIT_DIR = "%(installation.paths.git_dir)s" # Directory in which emails are stored pending delivery. OUTBOX = os.path.join(DATA_DIR, "outbox") ================================================ FILE: installation/templates/configuration/services.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration import os.path def service(name, address=0, module=0, pidfile_path=0, logfile_path=0, loglevel=0): if address == 0: address = os.path.join(configuration.paths.SOCKETS_DIR, name + ".unix") if module == 0: module = "background." + name if pidfile_path == 0: pidfile_path = os.path.join(configuration.paths.RUN_DIR, name + ".pid") if logfile_path == 0: logfile_path = os.path.join(configuration.paths.LOG_DIR, name + ".log") if loglevel == 0: loglevel = "info" return { "name": name, "address": address, "module": module, "pidfile_path": pidfile_path, "logfile_path": logfile_path, "loglevel": loglevel } HIGHLIGHT = service(name="highlight") CHANGESET = service(name="changeset") GITHOOK = service(name="githook") BRANCHTRACKER = service(name="branchtracker", address=None) MAILDELIVERY = service(name="maildelivery", address=None) WATCHDOG = service(name="watchdog", address=None) MAINTENANCE = service(name="maintenance", address=None) EXTENSIONTASKS = service(name="extensiontasks", address=None) EXTENSIONRUNNER = service(name="extensionrunner") SERVICEMANAGER = service(name="servicemanager") HIGHLIGHT["cache_dir"] = os.path.join(configuration.paths.CACHE_DIR, "highlight") HIGHLIGHT["min_context_length"] = 5 HIGHLIGHT["max_context_length"] = 256 HIGHLIGHT["max_workers"] = %(installation.config.highlight.max_workers)d HIGHLIGHT["compact_at"] = (3, 15) CHANGESET["max_workers"] = %(installation.config.changeset.max_workers)d CHANGESET["rss_limit"] = 1024 ** 3 CHANGESET["purge_at"] = (2, 15) # Timeout (in seconds) passed to smtplib.SMTP(). MAILDELIVERY["timeout"] = 10 WATCHDOG["rss_soft_limit"] = 1024 ** 3 WATCHDOG["rss_hard_limit"] = 2 * WATCHDOG["rss_soft_limit"] MAINTENANCE["maintenance_at"] = (4, 0) EXTENSIONRUNNER["cached_processes"] = 5 SERVICEMANAGER["services"] = [HIGHLIGHT, CHANGESET, GITHOOK, BRANCHTRACKER, MAILDELIVERY, WATCHDOG, MAINTENANCE, EXTENSIONTASKS, EXTENSIONRUNNER] ================================================ FILE: installation/templates/configuration/smtp-credentials.json ================================================ { "username": %(installation.smtp.username)s, "password": %(installation.smtp.password)s } ================================================ FILE: installation/templates/configuration/smtp.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. HOST = %(installation.smtp.host)r PORT = %(installation.smtp.port)r USE_SSL = %(installation.smtp.use_ssl)r USE_STARTTLS = %(installation.smtp.use_starttls)r MAX_ATTEMPTS = 10 ================================================ FILE: installation/templates/criticctl ================================================ #!%(installation.prereqs.python)s # -*- mode: python -*- import sys import argparse import os import pwd import grp # To avoid accidentally creating files owned by root. sys.dont_write_bytecode = True system_user_uid = pwd.getpwnam("%(installation.system.username)s").pw_uid system_user_gid = grp.getgrnam("%(installation.system.groupname)s").gr_gid try: os.setegid(system_user_gid) os.seteuid(system_user_uid) except OSError: print >>sys.stderr, "ERROR: Failed to set UID = %(installation.system.username)s. Run as root?" sys.exit(1) argv = sys.argv[1:] global_argv = [] command = None command_argv = [] parser = argparse.ArgumentParser( description="Critic administration interface", usage="%%(prog)s [-h] [--etc-dir ETC_DIR] [--identify IDENTITY] COMMAND [options]", add_help=False) parser.add_argument("--help", "-h", action="store_true", help="show this help message and exit") parser.add_argument("--etc-dir", "-e", default="%(installation.paths.etc_dir)s", help="Critic configuration directory [default=%(installation.paths.etc_dir)s]") parser.add_argument("--identity", "-i", default="main", help="system identity to manage [default=main]") while argv: argument = argv[0] if argument in ("--help", "-h"): global_argv.append(argument) del argv[0] continue elif argument in ("--etc-dir", "-e", "--identity", "-i"): global_argv.extend(argv[:2]) del argv[:2] continue elif argument.startswith("--etc-dir=") \ or argument.startswith("-e") \ or argument.startswith("--identity=") \ or argument.startswith("-i"): global_argv.append(argument) del argv[0] continue elif argument.startswith("-"): # Invalid argument; add it to global_argv so that parser.parse_args() # below fails. global_argv.append(argument) break if argv: command = argv[0] command_argv = argv[1:] arguments = parser.parse_args(global_argv) etc_path = os.path.join(arguments.etc_dir, arguments.identity) class Error(Exception): pass try: if not os.access(arguments.etc_dir, os.R_OK | os.X_OK): raise Error("Directory is inaccessible: %%s" %% arguments.etc_dir) if not os.path.isdir(etc_path): raise Error("Invalid identity: %%s" %% arguments.identity) sys.path.insert(0, etc_path) try: import configuration except ImportError: raise Error("Failed to import: configuration") sys.path.insert(1, configuration.paths.INSTALL_DIR) sys.path.insert(2, configuration.paths.DATA_DIR) try: import maintenance.criticctl except ImportError: raise Error("Failed to import: maintenance.criticctl") sys.exit(maintenance.criticctl.main(parser, arguments.help, command, command_argv)) except Error as error: if arguments.help: parser.print_help() print print >>sys.stderr, "ERROR: %%s" %% error.message sys.exit(1) ================================================ FILE: installation/templates/initd ================================================ #!/bin/sh set -e ### BEGIN INIT INFO # Provides: critic-main # Required-Start: postgresql $local_fs $remote_fs $network $time # Required-Stop: postgresql $local_fs $remote_fs $network $time # Should-Start: # Should-Stop: # X-Start-Before: %(installation.httpd.backend_service)s # X-Stop-After: %(installation.httpd.backend_service)s # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: Critic code review system (main) ### END INIT INFO critic_etc=%(installation.paths.etc_dir)s/main critic_root=%(installation.paths.install_dir)s critic_run=%(installation.paths.run_dir)s pidfile=%(installation.paths.run_dir)s/main/servicemanager.pid . /lib/lsb/init-functions start () { log_daemon_msg "Starting Critic service manager" "servicemanager.py" PYTHONPATH=$critic_etc:$critic_root %(installation.prereqs.python)s -m background.servicemanager log_end_msg $? } stop () { log_daemon_msg "Stopping Critic service manager" "servicemanager.py" if test -f $pidfile then pid=$(cat $pidfile) if kill -TERM $pid then while test -f $pidfile do sleep 0.1 done else rm $pidfile fi fi log_end_msg 0 } case "$1" in start) start ;; stop) stop ;; restart) stop start ;; *) echo "Usage: $0 {start|stop|restart}" exit 1 ;; esac exit 0 ================================================ FILE: installation/templates/nginx/site.both ================================================ server { listen 80; listen [::]:80; listen 443 ssl; listen [::]:443 ssl; server_name %(installation.system.hostname)s; # Further SSL configuration is required! # #ssl_certificate ???.crt #ssl_certificate_key ???.key #ssl_protocols ??? #ssl_ciphers ??? location / { uwsgi_pass unix://%(installation.paths.run_dir)s/main/sockets/uwsgi.unix; include uwsgi_params; } location /static-resource/ { alias %(installation.paths.install_dir)s/resources/; expires 30d; types { text/css css; text/javascript js; image/png png; } } } ================================================ FILE: installation/templates/nginx/site.http ================================================ server { listen 80; listen [::]:80; server_name %(installation.system.hostname)s; location / { uwsgi_pass unix://%(installation.paths.run_dir)s/main/sockets/uwsgi.unix; include uwsgi_params; } location /static-resource/ { alias %(installation.paths.install_dir)s/resources/; expires 30d; types { text/css css; text/javascript js; image/png png; } } } ================================================ FILE: installation/templates/nginx/site.https ================================================ server { listen 80; server_name %(installation.system.hostname)s; return 301 https://$server_name$request_uri; } server { listen 443 ssl; listen [::]:443 ssl; server_name %(installation.system.hostname)s; # Further SSL configuration is required! # #ssl_certificate ???.crt #ssl_certificate_key ???.key #ssl_protocols ??? #ssl_ciphers ??? location / { uwsgi_pass unix://%(installation.paths.run_dir)s/main/sockets/uwsgi.unix; include uwsgi_params; } location /static-resource/ { alias %(installation.paths.install_dir)s/resources/; expires 30d; types { text/css css; text/javascript js; image/png png; } } } ================================================ FILE: installation/templates/uwsgi/app.backend.ini ================================================ [uwsgi] plugins = python master = true socket = %(installation.paths.run_dir)s/main/sockets/uwsgi.unix # Make %(installation.httpd.username)s the owner of the socket, so that it can connect. chown-socket = %(installation.httpd.username)s:%(installation.system.groupname)s chmod-socket = 660 python-path = %(installation.paths.etc_dir)s/main python-path = %(installation.paths.install_dir)s wsgi-file = %(installation.paths.install_dir)s/wsgi.py processes = 2 threads = 25 # Run as the Critic system user/group. uid = %(installation.system.username)s gid = %(installation.system.groupname)s ================================================ FILE: installation/templates/uwsgi/app.frontend.ini.both ================================================ [uwsgi] master = true # Use a shared socket to allow binding to a privileged port without running as # root. shared-socket = :80 http = =0 # Further SSL configuration is required! #shared-socket = :443 #https = =1,???.crt,???.key,HIGH # Redirect to the Critic backend. http-to = %(installation.paths.run_dir)s/main/sockets/uwsgi.unix # Run as the "web server" user/group. uid = %(installation.httpd.username)s gid = %(installation.httpd.groupname)s ================================================ FILE: installation/templates/uwsgi/app.frontend.ini.http ================================================ [uwsgi] master = true # Use a shared socket to allow binding to a privileged port without running as # root. shared-socket = :80 http = =0 # Redirect to the Critic backend. http-to = %(installation.paths.run_dir)s/main/sockets/uwsgi.unix # Run as the "web server" user/group. uid = %(installation.httpd.username)s gid = %(installation.httpd.groupname)s ================================================ FILE: installation/templates/uwsgi/app.frontend.ini.https ================================================ [uwsgi] master = true # Use a shared socket to allow binding to a privileged port without running as # root. shared-socket = :80 http-to-https = =0 # Further SSL configuration is required! #shared-socket = :443 #https = =1,???.crt,???.key,HIGH # Redirect to the Critic backend. http-to = %(installation.paths.run_dir)s/main/sockets/uwsgi.unix # Run as the "web server" user/group. uid = %(installation.httpd.username)s gid = %(installation.httpd.groupname)s ================================================ FILE: installation/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys import textwrap import subprocess import tempfile import datetime import hashlib import contextlib import json import installation class UpdateModifiedFile: def __init__(self, arguments, message, versions, options, generateVersion): """\ Constructor. Arguments: arguments Command-line arguments. message Printed once. versions Dictionary (label => path) of file versions involved. options List of options to present to the user. prompt Prompt printed when asking what to do. """ self.__arguments = arguments self.__message = message self.__versions = versions self.__options = options self.__option_keys = [key for key, action in options] self.__option_map = dict((key, action) for key, action in options) self.__generateVersion = generateVersion self.__generated = [] def printMessage(self): print self.__message % self.__versions def printOptions(self): alternatives = [] for key, action in self.__options: if isinstance(action, str): alternatives.append("'%s' to %s" % (key, action)) else: alternatives.append("'%s' to display the differences between the %s version and the %s version" % (key, action[0], action[1])) print textwrap.fill("Input %s and %s." % (", ".join(alternatives[:-1]), alternatives[-1])) print def displayDifferences(self, from_version, to_version): print print "=" * 80 diff = subprocess.Popen(["diff", "-u", self.__versions[from_version], self.__versions[to_version]]) diff.wait() print "=" * 80 print def prompt(self): if self.__arguments.headless: # The first choice is typically "install updated version" or "remove # (obsolete) file" and is appropriate when --headless was used. return self.__options[0][0] try: for label, path in self.__versions.items(): if not os.path.exists(path): self.__generateVersion(label, path) self.__generated.append(path) self.printMessage() while True: self.printOptions() def validResponse(value): if value not in self.__option_keys: return "please answer %s or %s" % (", ".join(self.__option_keys[:-1]), self.__option_keys[-1]) response = installation.input.string("What do you want to do?", check=validResponse) action = self.__option_map[response] if isinstance(action, str): print return response from_version, to_version = action self.displayDifferences(from_version, to_version) finally: for path in self.__generated: os.unlink(path) def run_git(args, **kwargs): with installation.utils.as_effective_user_from_path( os.path.join(installation.root_dir, ".git")): return subprocess.check_output(args, **kwargs) def update_from_template(arguments, data, template_path, target_path, message): git = data["installation.prereqs.git"] old_commit_sha1 = data["sha1"] new_commit_sha1 = run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() old_template = read_file(git, old_commit_sha1, template_path) new_template = read_file(git, new_commit_sha1, template_path) old_source = old_template.decode("utf-8") % data new_source = new_template.decode("utf-8") % data with open(target_path) as target_file: current_source = target_file.read().decode("utf-8") if current_source == new_source: # The current version is what we would install now. Nothing to do. return elif old_source == current_source: # The current version is what we installed (or would have installed with # the old template and current settings.) Update the target file # without asking. write_target = True else: def generate_version(label, path): if label == "installed": source = old_source elif label == "updated": source = new_source else: return write_file(path, source.encode("utf-8")) versions = """\ Installed version: %(installed)s Current version: %(current)s Updated version: %(updated)s""" update_query = UpdateModifiedFile( arguments, message=message % { "versions": versions }, versions={ "installed": target_path + ".org", "current": target_path, "updated": target_path + ".new" }, options=[ ("i", "install the updated version"), ("k", "keep the current version"), ("do", ("installed", "current")), ("dn", ("current", "updated")) ], generateVersion=generate_version) write_target = update_query.prompt() == "i" if write_target: print "Updated file: %s" % target_path if not arguments.dry_run: backup_path = os.path.join(os.path.dirname(target_path), ".%s.org" % os.path.basename(target_path)) copy_file(target_path, backup_path) with open(target_path, "w") as target_file: target_file.write(new_source.encode("utf-8")) return backup_path def write_file(path, source): # Use os.open() with O_EXCL to avoid trampling some existing file. fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_EXCL) with os.fdopen(fd, "w") as target: target.write(source) def copy_file(source_path, target_path): with open(source_path) as source: stat = os.fstat(source.fileno()) # Use os.open() with O_EXCL to avoid trampling some existing file. fd = os.open(target_path, os.O_WRONLY | os.O_CREAT | os.O_EXCL) with os.fdopen(fd, "w") as target: target.write(source.read()) os.fchmod(target.fileno(), stat.st_mode) os.fchown(target.fileno(), stat.st_uid, stat.st_gid) def hash_file(git, path): if os.path.islink(path): value = os.readlink(path) else: with open(path) as file: value = file.read() return hashlib.sha1("blob %d\0%s" % (len(value), value)).hexdigest() def get_entry_sha1(git, commit_sha1, path, entry_type): lstree = run_git([git, "ls-tree", commit_sha1, path], cwd=installation.root_dir).strip() if lstree: lstree_mode, lstree_type, lstree_sha1, lstree_path = lstree.split() assert lstree_type == entry_type assert lstree_path == path return lstree_sha1 else: return None def get_file_sha1(git, commit_sha1, path): return get_entry_sha1(git, commit_sha1, path, "blob") def get_tree_sha1(git, commit_sha1, path): return get_entry_sha1(git, commit_sha1, path, "tree") def read_file(git, commit_sha1, path): file_sha1 = get_file_sha1(git, commit_sha1, path) if file_sha1 is None: return None return run_git([git, "cat-file", "blob", file_sha1], cwd=installation.root_dir) def get_initial_commit_date(git, path): initial_commit_timestamp = run_git([git, "log", "--oneline", "--format=%ct", "--", path], cwd=installation.root_dir).splitlines()[-1] return datetime.datetime.fromtimestamp(int(initial_commit_timestamp)) def clean_root_pyc_files(): print "Cleaning up .pyc files owned by root ..." for root, _, files in os.walk(installation.root_dir): for file in files: file = os.path.join(root, file) if file.endswith(".pyc") and os.stat(file).st_uid == 0: os.unlink(file) @contextlib.contextmanager def temporary_cwd(): saved_cwd = os.getcwd() os.chdir(tempfile.gettempdir()) try: yield finally: os.chdir(saved_cwd) @contextlib.contextmanager def as_critic_system_user(): if installation.is_quick_start: yield return saved_cwd = os.getcwd() os.chdir(tempfile.gettempdir()) os.setegid(installation.system.gid) os.seteuid(installation.system.uid) try: yield finally: os.seteuid(os.getresuid()[0]) os.setegid(os.getresgid()[0]) os.chdir(saved_cwd) @contextlib.contextmanager def as_effective_user_from_path(path): stat = os.stat(path) os.setegid(stat.st_gid) os.seteuid(stat.st_uid) try: yield finally: os.seteuid(os.getresuid()[0]) os.setegid(os.getresgid()[0]) def deunicode(v): if type(v) == unicode: return v.encode("utf-8") elif type(v) == list: return map(deunicode, v) elif type(v) == dict: return dict([(deunicode(a), deunicode(b)) for a, b in v.items()]) else: return v def read_install_data(arguments, fail_softly=False): etc_path = os.path.join(arguments.etc_dir, arguments.identity) if not os.path.isdir(etc_path): if fail_softly: return None print """ ERROR: %s: no such directory HINT: Make sure the --etc-dir[=%s] and --identity[=%s] options correctly define where the installed system's configuration is stored.""" % (etc_path, arguments.etc_dir, arguments.identity) sys.exit(1) sys.path.insert(0, etc_path) try: import configuration except ImportError: if fail_softly: return None print """ ERROR: Failed to import 'configuration' module. HINT: Make sure the --etc-dir[=%s] and --identity[=%s] options correctly define where the installed system's configuration is stored.""" % (arguments.etc_dir, arguments.identity) sys.exit(1) install_data_path = os.path.join(configuration.paths.INSTALL_DIR, ".install.data") if not os.path.isfile(install_data_path): if fail_softly: return None print """\ %s: no such file This installation of Critic appears to be incomplete or corrupt.""" % install_data_path sys.exit(1) try: with open(install_data_path, "r") as install_data_file: install_data = deunicode(json.load(install_data_file)) if not isinstance(install_data, dict): raise ValueError except ValueError: if fail_softly: return None print """\ %s: failed to parse JSON object to dictionary This installation of Critic appears to be incomplete or corrupt.""" % install_data_path sys.exit(1) return install_data def write_install_data(arguments, install_data): install_data_path = os.path.join(installation.paths.install_dir, ".install.data") if not getattr(arguments, "dry_run", False): with open(install_data_path, "w") as install_data_file: json.dump(install_data, install_data_file) os.chown(install_data_path, installation.system.uid, installation.system.gid) os.chmod(install_data_path, 0640) def start_migration(): import sys import argparse import os parser = argparse.ArgumentParser() parser.add_argument("--uid", type=int) parser.add_argument("--gid", type=int) arguments = parser.parse_args() os.setgid(arguments.gid) os.setuid(arguments.uid) class DatabaseSchema(object): """Database schema updating utility class This class is primarily intended for use in migration scripts.""" def __init__(self): import configuration import psycopg2 self.db = psycopg2.connect(**configuration.database.PARAMETERS) def table_exists(self, table_name): import psycopg2 try: self.db.cursor().execute("SELECT 1 FROM %s LIMIT 1" % table_name) except psycopg2.ProgrammingError: self.db.rollback() return False else: # Above statement would have thrown a psycopg2.ProgrammingError if the # table didn't exist, but it didn't, so the table must exist. return True def column_exists(self, table_name, column_name): import psycopg2 try: self.db.cursor().execute("SELECT %s FROM %s LIMIT 1" % (column_name, table_name)) except psycopg2.ProgrammingError: self.db.rollback() return False else: # Above statement would have thrown a psycopg2.ProgrammingError if the # table didn't exist, but it didn't, so the table must exist. return True def create_table(self, statement): import re (table_name,) = re.search("CREATE TABLE (\w+)", statement).groups() # Make sure the table doesn't already exist. if not self.table_exists(table_name): self.db.cursor().execute(statement) self.db.commit() def create_index(self, statement): import re (index_name,) = re.search("CREATE INDEX (\w+)", statement).groups() cursor = self.db.cursor() cursor.execute("DROP INDEX IF EXISTS %s" % index_name) cursor.execute(statement) self.db.commit() def create_column(self, table_name, column_name, column_definition): if not self.column_exists(table_name, column_name): self.db.cursor().execute( "ALTER TABLE %s ADD %s %s" % (table_name, column_name, column_definition)) self.db.commit() def type_exists(self, type_name): import psycopg2 try: self.db.cursor().execute("SELECT NULL::%s" % type_name) except psycopg2.ProgrammingError: self.db.rollback() return False else: # Above statement would have thrown a psycopg2.ProgrammingError if the # type didn't exist, but it didn't, so the table must exist. return True def create_type(self, statement): import re (type_name,) = re.search("CREATE TYPE (\w+)", statement).groups() # Make sure the type doesn't already exist. if not self.type_exists(type_name): self.db.cursor().execute(statement) self.db.commit() def update(self, statements): # Remove top-level comments; they interfere with out very simple # statement identification below. Other comments are fine. lines = [line for line in statements.splitlines() if not line.startswith("--")] statements = "\n".join(lines) for statement in statements.split(";"): statement = statement.strip() if not statement: continue if statement.startswith("CREATE TABLE"): self.create_table(statement) elif statement.startswith("CREATE INDEX"): self.create_index(statement) elif statement.startswith("CREATE TYPE"): self.create_type(statement) else: print >>sys.stderr, "Unexpected SQL statement: %r" % statement sys.exit(1) def read_lifecycle(git=None, sha1=None): filename = "installation/lifecycle.json" if sha1 is None: path = os.path.join(installation.root_dir, filename) with open(path, "r") as lifecycle_file: lifecycle_source = lifecycle_file.read() else: lifecycle_source = read_file(git, sha1, filename) if lifecycle_source is None: # For systems installed before the lifecycle.json file was # introduced, hard-code the file's initial content. return { "branch": "version/1", "stable": True } return deunicode(json.loads(lifecycle_source)) ================================================ FILE: pylint.rc ================================================ [MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Pickle collected data for later comparisons. persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= [MESSAGES CONTROL] # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). # C0111 = Missing docstring # C0301 = Line too long # C0302 = Too many lines in module # C0321 = More than one statement on a single line # R0911 = Too many return statements # R0912 = Too many branches # R0913 = Too many arguments # R0914 = Too many local variables # R0915 = Too many statements disable=C0111,C0301,C0302,C0321,R0911,R0912,R0913,R0914,R0915 [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html output-format=text # Include message's id in output include-ids=no # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=yes # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no [BASIC] # Required attributes for module, separated by a comma required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions= # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression which should only match correct module level names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression which should only match correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression which should only match correct function names function-rgx=[a-z][a-z0-9_]*$|[a-z][a-z0-9]*([A-Z][a-z0-9]*)*$ # Regular expression which should only match correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]+$ # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]+$ # Regular expression which should only match correct list comprehension / # generator expression variable names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression which should only match functions or classes name which do # not require a docstring no-docstring-rgx=__.*__ [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [FORMAT] # Maximum number of characters on a single line. max-line-length=80 # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). dummy-variables-rgx=_|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes=SQLObject # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,string,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [CLASSES] # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branchs=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception ================================================ FILE: pythonversion.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys def check(message=None): if sys.version_info[0] != 2 or sys.version_info[1] < 7: print """ ERROR: Unsupported Python version! Critic requires Python 2.7.x or later, and does not support Python 3.x. """ if message: print message sys.exit(2) ================================================ FILE: quickstart.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import argparse import tempfile import shutil import wsgiref.simple_server import subprocess import threading import requests import json import signal import time import py_compile import contextlib sys.path.insert(0, os.path.dirname(__file__)) admin_username = os.environ.get("LOGNAME", "admin") parser = argparse.ArgumentParser("python quickstart.py", description="Critic instance quick-start utility script.") parser.add_argument("--quiet", action="store_true", help="Suppress most output") parser.add_argument("--testing", action="store_true", help=argparse.SUPPRESS) parser.add_argument("--admin-username", default=admin_username, help=argparse.SUPPRESS) parser.add_argument("--admin-fullname", default=admin_username, help=argparse.SUPPRESS) parser.add_argument("--admin-email", default=admin_username + "@localhost", help=argparse.SUPPRESS) parser.add_argument("--admin-password", default="1234", help=argparse.SUPPRESS) parser.add_argument("--system-recipient", action="append", help=argparse.SUPPRESS) parser.add_argument("--state-dir", "-s", help="State directory [default=temporary dir]") parser.add_argument("--http-host", default="", help="Hostname the HTTP server listens at [default=ANY]") parser.add_argument("--http-port", "-p", default=8080, type=int, help="Port the HTTP server listens at [default=8080]") parser.add_argument("--smtp-host", default="localhost", help="Hostname of SMTP server to use [default=localhost]") parser.add_argument("--smtp-port", default=25, type=int, help="Port of SMTP server to use [default=25]") parser.add_argument("--smtp-username", help="SMTP username [default=none]") parser.add_argument("--smtp-password", help="SMTP password [default=none]") parser.add_argument("--run", action="store_true", help=argparse.SUPPRESS) parser.add_argument("--run-state-dir", help=argparse.SUPPRESS) parser.add_argument("--run-http-port", type=int, help=argparse.SUPPRESS) arguments = parser.parse_args() quiet = arguments.quiet or arguments.testing if arguments.run: import critic def handle_interrupt(signum, frame): pid_filename = os.path.join(arguments.run_state_dir, "run", "main", "servicemanager.pid") if os.path.isfile(pid_filename): with open(pid_filename) as pid_file: servicemanager_pid = int(pid_file.read().strip()) os.kill(servicemanager_pid, signal.SIGTERM) while os.path.isfile(pid_filename): time.sleep(0.1) os._exit(0) signal.signal(signal.SIGINT, handle_interrupt) subprocess.check_call([sys.executable, "-m", "background.servicemanager"]) class CriticWSGIRequestHandler(wsgiref.simple_server.WSGIRequestHandler): def log_message(self, *args, **kwargs): if not quiet: wsgiref.simple_server.WSGIRequestHandler.log_message( self, *args, **kwargs) server = wsgiref.simple_server.make_server( host=arguments.http_host, port=arguments.run_http_port, app=critic.main, handler_class=CriticWSGIRequestHandler) server_address_path = os.path.join(arguments.run_state_dir, "server_address") with open(server_address_path, "w") as server_address_file: server_address_file.write("%s:%d" % (server.server_name, server.server_port)) # This call will never return. This is fine; we just want to block # forever (or until we receive a SIGINT.) server.serve_forever() failed_imports = False try: import pygments except ImportError: print """\ ERROR: Failed to import 'pygments'; code will not be syntax highlighted. HINT: On Debian/Ubuntu, install the 'python-pygments' package to eliminate this problem. """ failed_imports = True try: import passlib except ImportError: print """\ ERROR: Failed to import 'passlib'; passwords will be encrypted insecurely. HINT: On Debian/Ubuntu, install the 'python-passlib' package to eliminate this problem. """ failed_imports = True if failed_imports: if arguments.testing: print "FATAL: Won't run test suite with missing imports." sys.exit(1) else: print """\ Some functionality will be missing due to missing Python packages. Press ENTER to go ahead and quick-start Critic anyway, or CTRL-c to abort. """ try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) if arguments.testing: os.setsid() import installation import installation.qs installation.quiet = True if arguments.state_dir: state_dir = arguments.state_dir if not os.path.isdir(state_dir): os.makedirs(state_dir) else: state_dir = tempfile.mkdtemp() if arguments.testing: print "STATE=%s" % state_dir database_path = os.path.join(state_dir, "critic.db") initialize_database = not os.path.exists(database_path) add_repository = None class CompilationFailed(Exception): pass def compile_all_sources(): success = True for dirname, _, filenames in os.walk("src"): for filename in filenames: if filename[0] == ".": continue if not filename.endswith(".py"): continue path = os.path.join(dirname, filename) try: py_compile.compile(path, doraise=True) except py_compile.PyCompileError as error: if success: # First error. Create some space. print "\n" print "ERROR: Failed to compile %s:\n%s" % (path, error) success = False if not success: raise CompilationFailed() @contextlib.contextmanager def activity(what): if quiet: yield else: sys.stdout.write(what + " ...") sys.stdout.flush() yield sys.stdout.write(" done.\n") try: try: with activity("Compiling all sources"): compile_all_sources() except CompilationFailed: sys.exit(1) installation.is_quick_start = True if initialize_database: with activity("Initializing database"): installation.qs.sqlite.import_schema( database_path, filenames=installation.database.SCHEMA_FILES, quiet=quiet) installation.system.uid = os.getuid() installation.system.gid = os.getgid() installation.paths.etc_dir = os.path.join(state_dir, "etc") installation.paths.bin_dir = os.path.join(state_dir, "bin") installation.paths.install_dir = os.path.join(os.getcwd(), "src") installation.paths.data_dir = os.path.join(state_dir, "data") installation.paths.cache_dir = os.path.join(state_dir, "cache") installation.paths.log_dir = os.path.join(state_dir, "log") installation.paths.run_dir = os.path.join(state_dir, "run") installation.paths.git_dir = os.path.join(state_dir, "git") data = installation.qs.data.generate(arguments, database_path) with activity("Installing the system"): installation.paths.install(data) if not os.path.isfile(os.path.join(installation.paths.bin_dir, "criticctl")): installation.criticctl.install(data) if not os.path.isfile(os.path.join(installation.paths.etc_dir, "main", "configuration", "__init__.py")): installation.config.install(data) if initialize_database: installation.prefs.install(data) config_dir = os.path.join(installation.paths.etc_dir, "main") install_dir = installation.paths.install_dir root_dir = installation.root_dir sys.path.insert(0, config_dir) sys.path.insert(1, install_dir) if initialize_database: import dbutils import auth db = dbutils.Database() cursor = db.cursor() cursor.execute("""INSERT INTO systemidentities (key, name, anonymous_scheme, authenticated_scheme, hostname, description, installed_sha1) VALUES ('main', 'main', 'http', 'http', 'localhost', 'Main', ?)""", (subprocess.check_output("git rev-parse HEAD", shell=True).strip(),)) db.commit() admin = dbutils.User.create( db, name=arguments.admin_username, fullname=arguments.admin_fullname, email=arguments.admin_email, email_verified=None, password=auth.hashPassword(arguments.admin_password)) if not arguments.testing: if not quiet: print print ("Created administrator user %r with password '1234'" % data["installation.admin.username"]) cursor.execute("""INSERT INTO userroles (uid, role) SELECT %s, name FROM roles""", (admin.id,)) db.commit() db.close() the_system = None server_address = None server_name = None server_port = str(arguments.http_port) def startTheSystem(): global the_system, server_name, server_port, server_address server_address_path = os.path.join(state_dir, "server_address") if os.path.isfile(server_address_path): os.unlink(server_address_path) the_system = subprocess.Popen( [sys.executable] + sys.argv + ["--run", "--run-state-dir", state_dir, "--run-http-port", server_port], env={ "PYTHONPATH": ":".join([config_dir, install_dir, root_dir]) }) while not os.path.isfile(server_address_path): time.sleep(0.1) if the_system.poll() is not None: the_system = None return False with open(server_address_path) as server_address_file: server_address = server_address_file.read() server_name, _, server_port = server_address.partition(":") return True def stopTheSystem(): global the_system if the_system: the_system.send_signal(signal.SIGINT) the_system.wait() the_system = None def restartTheSystem(): compile_all_sources() stopTheSystem() startTheSystem() def getNewestModificationTime(): newest = 0 for dirpath, dirnames, filenames in os.walk("."): for filename in filenames: if filename[0] != "." and filename.endswith(".py"): path = os.path.join(dirpath, filename) newest = max(os.stat(path).st_mtime, newest) return newest running_mtime = getNewestModificationTime() startTheSystem() if not arguments.testing: print "Listening at: http://%s:%s/" % (server_name, server_port) import dbutils db = dbutils.Database() db.cursor().execute("""UPDATE systemidentities SET hostname=?""", ("%s:%s" % (server_name, server_port),)) db.commit() db.close() else: print "HTTP=%s:%s" % (server_name, server_port) if not os.listdir(installation.paths.git_dir) and not arguments.testing: if not quiet: print print "Creating critic.git repository ..." pid_filename = os.path.join( state_dir, "run", "main", "branchtracker.pid") while not os.path.isfile(pid_filename): time.sleep(0.1) current_ref = subprocess.check_output( ["git", "rev-parse", "--symbolic-full-name", "HEAD"]).strip() if current_ref.startswith("refs/heads/"): remote_branch = local_branch = current_ref[len("refs/heads/"):] if local_branch.startswith("r/"): local_branch = local_branch[2:] else: remote_branch = local_branch = "master" kwargs = {} session = requests.Session() response = session.post( "http://%s/validatelogin" % server_address, data=json.dumps({ "fields": { "username": data["installation.admin.username"], "password": "1234" }})) if response.status_code in (401, 404): kwargs["auth"] = (data["installation.admin.username"], "1234") response = session.post( "http://%s/addrepository" % server_address, data=json.dumps({ "name": "critic", "path": "critic", "mirror": { "remote_url": "file://" + installation.root_dir, "remote_branch": remote_branch, "local_branch": local_branch }}), **kwargs) if not quiet: print if arguments.testing: print "STARTED" restart_requested = False def handle_sigusr1(signum, frame): global restart_requested restart_requested = True signal.signal(signal.SIGUSR1, handle_sigusr1) while True: time.sleep(3600) if restart_requested: restart_requested = False restartTheSystem() print "RESTARTED" else: while True: current_mtime = getNewestModificationTime() if current_mtime > running_mtime: print try: with activity("Sources changed, restarting the system"): restartTheSystem() except CompilationFailed: pass else: print running_mtime = current_mtime else: time.sleep(1) except KeyboardInterrupt: pass finally: if not arguments.quiet and not arguments.testing: print "Shutting down ..." try: stopTheSystem() except NameError: # Failure happened before stopTheSystem() was declared. pass if not arguments.state_dir: if not arguments.quiet and not arguments.testing: print "Cleaing up ..." shutil.rmtree(state_dir) ================================================ FILE: src/api/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Critic API""" from apiobject import APIObject from apierror import (APIError, PermissionDenied, TransactionError, ResultDelayedError) import config import critic import user import review import reviewsummary import repository import filters import branch import commit import commitset import changeset import filechange import filediff import filecontent import log import preference import accesstoken import accesscontrolprofile import labeledaccesscontrolprofile import extension import file import comment import reply import batch import reviewablefilechange import transaction ================================================ FILE: src/api/accesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class AccessControlProfileError(api.APIError): """Base exception for all errors related to the AccessControlProfile class""" pass class InvalidAccessControlProfileId(AccessControlProfileError): """Raised when an invalid access control profile id is used""" def __init__(self, value): """Constructor""" super(InvalidAccessControlProfileId, self).__init__( "Invalid access control profile id: %d" % value) self.value = value class AccessControlProfile(api.APIObject): """Representation of a an access control profile""" RULE_VALUES = frozenset(["allow", "deny"]) @property def id(self): """The profile's unique id""" return self._impl.id @property def title(self): """The profile's title, or None""" return self._impl.title @property def access_token(self): """The access token that owns this profile, or None""" return self._impl.getAccessToken(self.critic) class Category(object): """Representation of an access control category Each category is controlled by a rule ("allow" or "deny") and a list of exceptions (possibly empty). The effective policy is the rule, unless an exception applies, in which case it's the opposite of the rule.""" def __init__(self, rule, exceptions): self.rule = rule self.exceptions = exceptions class HTTPException(object): """Representation of an exception for the "http" category The exception consists of the HTTP request method and a regular expression that must match the entire request path.""" REQUEST_METHODS = frozenset(["GET", "HEAD", "OPTIONS", "POST", "PUT", "DELETE"]) def __init__(self, exception_id, request_method, path_pattern): self.id = exception_id self.request_method = request_method self.path_pattern = path_pattern @property def http(self): """Access control category "http" This category controls web frontend requests. Exceptions are of the type HTTPException.""" return self._impl.getHTTP(self.critic) class RepositoryException(object): """Representation of an exception for the "repositories" category The exception consists of the access type ("read" or "modify") and the repository.""" ACCESS_TYPES = frozenset(["read", "modify"]) def __init__(self, exception_id, access_type, repository): self.id = exception_id self.access_type = access_type self.repository = repository @property def repositories(self): """Access control category "repositories" This category controls access to Git repositories, both via the web frontend and the Git hook. Note that read-only Git access over SSH is not controlled by access control. Exceptions are of the type RepositoryException.""" return self._impl.getRepositories(self.critic) class ExtensionException(object): """Representation of an exception for the "extensions" category The exception consists of the access type ("install" or "execute") and the extension.""" ACCESS_TYPES = frozenset(["install", "execute"]) def __init__(self, exception_id, access_type, extension): self.id = exception_id self.access_type = access_type self.extension = extension @property def extensions(self): """Access control category "extensions" This category controls access to any functionality provided by an extension. Exceptions are of the type ExtensionException.""" return self._impl.getExtensions(self.critic) def fetch(critic, profile_id): """Fetch an AccessControlProfile object with the given profile id""" import api.impl assert isinstance(critic, api.critic.Critic) return api.impl.accesscontrolprofile.fetch(critic, int(profile_id)) def fetchAll(critic, title=None): """Fetch AccessControlProfile objects for all primary profiles in the system A profile is primary if it is not the additional restrictions imposed for accesses authenticated with an access token. If |title| is not None, fetch only profiles with a matching title.""" import api.impl assert isinstance(critic, api.critic.Critic) if title is not None: title = str(title) return api.impl.accesscontrolprofile.fetchAll(critic, title) ================================================ FILE: src/api/accesstoken.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class AccessTokenError(api.APIError): """Base exception for all errors related to the AccessToken class""" pass class InvalidAccessTokenId(AccessTokenError): """Raised when an invalid access token id is used""" def __init__(self, value): """Constructor""" super(InvalidAccessTokenId, self).__init__( "Invalid access token id: %d" % value) self.value = value class AccessToken(api.APIObject): """Representation of an access token""" @property def access_type(self): """The type of access granted by this access token""" return self._impl.access_type @property def id(self): """The access token's unique id""" return self._impl.id @property def user(self): """The user authenticated by the access token, or None""" return self._impl.getUser(self.critic) @property def part1(self): """The first part of the access token""" return self._impl.part1 @property def part2(self): """The second part of the access token""" return self._impl.part2 @property def title(self): """The access token's title, or None""" return self._impl.title @property def profile(self): """The access token's access control profile""" return self._impl.getProfile(self.critic) def fetch(critic, token_id): """Fetch an AccessToken object with the given token id""" import api.impl assert isinstance(critic, api.critic.Critic) return api.impl.accesstoken.fetch(critic, int(token_id)) def fetchAll(critic, user=None): """Fetch AccessToken objects for all primary profiles in the system A profile is primary if it is not the additional restrictions imposed for accesses authenticated with an access token. If |user| is not None, return only access tokens belonging to the specified user.""" import api.impl assert isinstance(critic, api.critic.Critic) assert user is None or isinstance(user, api.user.User) return api.impl.accesstoken.fetchAll(critic, user) ================================================ FILE: src/api/apierror.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class APIError(Exception): """Base exception for all errors caused by incorrect API usage (including invalid input.)""" pass class PermissionDenied(Exception): """Exception raised on correct API usage that the current user is not allowed.""" @staticmethod def raiseUnlessAdministrator(critic): if not (critic.actual_user and critic.actual_user.hasRole("administrator")): raise PermissionDenied("Must be an administrator") @staticmethod def raiseUnlessUser(critic, required_user): if critic.actual_user != required_user: PermissionDenied.raiseUnlessAdministrator(critic) class TransactionError(APIError): """Base exception for transaction errors.""" pass class ResultDelayedError(Exception): """Base exception for all errors caused by the result being temporarily unavailable""" pass ================================================ FILE: src/api/apiobject.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class APIObject(object): """Base class of all significant API classes Exposes the Critic session object as the read-only 'critic' attribute. Also holds the reference to the internal implementation object, which should only be used in the implementation of the API.""" def __init__(self, critic, impl): self.__critic = critic self.__impl = impl def __int__(self): return self.id def __hash__(self): return hash(int(self)) def __eq__(self, other): return int(self) == int(other) @property def critic(self): """The Critic session object used to create the API object""" return self.__critic @property def _impl(self): """Underlying object implementation This value should not be used outside the implementation of the API.""" return self.__impl def _set_impl(self, impl): """Set the underlying object implementation This method should not be called outside the implementation of the API.""" self.__impl = impl ================================================ FILE: src/api/batch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class BatchError(api.APIError): pass class InvalidBatchId(BatchError): """Raised when an invalid batch id is used.""" def __init__(self, batch_id): """Constructor""" super(InvalidBatchId, self).__init__("Invalid batch id: %d" % batch_id) class Batch(api.APIObject): @property def id(self): """The batch's unique id, or None for unsubmitted changes""" return self._impl.id @property def is_empty(self): """True if the batch contains no changes""" return self._impl.isEmpty(self.critic) @property def review(self): """The review to which changes were submitted""" return self._impl.getReview(self.critic) @property def author(self): """The author of the changes in the batch The author is returned as an api.user.User object.""" return self._impl.getAuthor(self.critic) @property def timestamp(self): """The time of submission, or None for unsubmitted changes The timestamp is returned as a datetime.datetime object.""" return self._impl.timestamp @property def comment(self): """The author's overall comment The comment is returned as an api.comment.Note object, or None if the author did not provide a comment.""" return self._impl.getComment(self.critic) @property def created_comments(self): """Created comments The comments are returned as a set of api.comment.Comment objects.""" return self._impl.getCreatedComments(self.critic) @property def written_replies(self): """Written replies The replies are returned as a set of api.reply.Reply objects.""" return self._impl.getWrittenReplies(self.critic) @property def resolved_issues(self): """Resolved issues The issues are returned as a set of api.comment.Comment objects. Note that the comment objects represent the current state, and that they may be api.comment.Note objects, and if they are api.comment.Issue objects, that their `state` attribute will not necessarily be "resolved".""" return self._impl.getResolvedIssues(self.critic) @property def reopened_issues(self): """Reopened issues The issues are returned as a set of api.comment.Comment objects. Note that the comment objects represent the current state, and that they may be api.comment.Note objects, and if they are api.comment.Issue objects, that their `state` attribute will not necessarily be "open".""" return self._impl.getReopenedIssues(self.critic) @property def morphed_comments(self): """Morphed comments (comments whose types was changed) The comments are returned as a dictionary mapping api.comment.Comment objects to their new type as a string ("issue" or "note".) Note that the comment object itself represents the current state, and its type will not necessarily match the new type it's mapped to.""" return self._impl.getMorphedComments(self.critic) @property def reviewed_file_changes(self): """Reviewed file changes The reviewed changes are returned as a set of api.reviewablefilechanges.ReviewableFileChanges objects. Note that the file changes objects represent the current state, and their `reviewed_by` attribute will not necessarily be the author of this batch.""" return self._impl.getReviewedFileChanges(self.critic) @property def unreviewed_file_changes(self): """Unreviewed file changes The unreviewed changes are returned as a set of api.reviewablefilechanges.ReviewableFileChanges objects. Note that the file changes objects represent the current state, and their `reviewed_by` attribute will not necessarily be None.""" return self._impl.getUnreviewedFileChanges(self.critic) def fetch(critic, batch_id): """Fetch the Batch object with the given id""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(batch_id, int) return api.impl.batch.fetch(critic, batch_id) def fetchAll(critic, review=None, author=None): """Fetch all Batch objects If |review| is not None, only batches in the specified review are returned. If |author| is not None, only batches authored by the specified user are returned.""" import api.impl assert isinstance(critic, api.critic.Critic) assert review is None or isinstance(review, api.review.Review) assert author is None or isinstance(author, api.user.User) return api.impl.batch.fetchAll(critic, review, author) def fetchUnpublished(critic, review): """Fetch a Batch object representing current unpublished changes The Batch object's |id| and |timestamp| objects will be None to signal that the object does not represent a real object. If the current user has no unpublished changes, the object's |is_empty| attribute will be true. Only the currently authenticated user's unpublished changes are returned.""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(review, api.review.Review) return api.impl.batch.fetchUnpublished(critic, review) ================================================ FILE: src/api/branch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class BranchError(api.APIError): """Base exception for all errors related to the Branch class.""" pass class InvalidBranchId(BranchError): """Raised when an invalid branch id is used.""" def __init__(self, branch_id): """Constructor""" super(InvalidBranchId, self).__init__( "Invalid branch id: %d" % branch_id) class InvalidBranchName(BranchError): """Raised when an invalid branch name is used.""" def __init__(self, name): """Constructor""" super(InvalidBranchName, self).__init__( "Invalid branch name: %r" % name) class Branch(api.APIObject): """Representation of a Git branch, according to Critic Critic extends Git's branch concept by adding a heuristically determined base branch, and a derived restricted set of commits that belong to the branch by (initially) excluding those reachable from the base branch.""" @property def id(self): """The branch's unique id""" return self._impl.id @property def name(self): """The branch's name excluding the 'refs/heads/' prefix""" return self._impl.name @property def repository(self): """The repository that contains the branch The repository is returned as an api.repository.Repository object.""" return self._impl.getRepository(self.critic) @property def head(self): """The branch's head commit""" return self._impl.getHead(self.critic) @property def commits(self): """The commits belonging to the branch The return value is an api.commitset.CommitSet object. Note: This set of commits is the commits that are actually reachable from the head of the branch. If the branch is a review branch that has been rebased, this is not the same as the commits that are considered part of the review.""" return self._impl.getCommits(self.critic) def fetch(critic, branch_id=None, repository=None, name=None): """Fetch a Branch object with the given id or name When a name is provided, a repository must also be provided.""" import api.impl assert (branch_id is None) != (name is None) assert name is None or repository is not None return api.impl.branch.fetch(critic, branch_id, repository, name) def fetchAll(critic, repository=None): """Fetch Branch objects for all branches If a repository is provided, restrict the return value to branches in the specified repository.""" import api.impl assert (repository is None or isinstance(repository, api.repository.Repository)) return api.impl.branch.fetchAll(critic, repository) ================================================ FILE: src/api/changeset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ChangesetError(api.APIError): pass class ChangesetBackgroundServiceError(ChangesetError): pass class InvalidChangesetId(ChangesetError): pass class NotImplementedError(ChangesetError): pass class ChangesetDelayed(api.ResultDelayedError): pass class AutomaticChangesetEmpty(ChangesetError): """Raised when fetching an automatic changeset, and no changes were found""" pass class Changeset(api.APIObject): """Representation of a diff""" AUTOMATIC_MODES = frozenset([ # All changes in the review. "everything", # All changes in the review that are either assigned to the current user # or that matches one of the current user's watcher filters. "relevant", # All changes in the review that are assigned to the current user. "reviewable", # All pending changes in the review that are assigned to the current # user. "pending", ]) def __str__(self): return str(self._impl.id) + " (" + str(self._impl.type) + ")" @property def id(self): return self._impl.id @property def repository(self): """The repository containing the compared commits""" return self._impl.repository @property def type(self): return self._impl.type @property def from_commit(self): return self._impl.getFromCommit() @property def to_commit(self): return self._impl.getToCommit() @property def files(self): return api.filechange.fetchAll(self.critic, self) @property def contributing_commits(self): return self._impl.getContributingCommits(self.critic) def fetch(critic, repository, changeset_id=None, from_commit=None, to_commit=None, single_commit=None, review=None, automatic=None): """Fetch a single changeset from the given repository""" import api.impl if changeset_id is not None: assert (from_commit is None and to_commit is None and single_commit is None) else: if automatic is not None: assert isinstance(review, api.review.Review) assert automatic in Changeset.AUTOMATIC_MODES assert (from_commit is None and to_commit is None and single_commit is None) else: assert (from_commit is None) == (to_commit is None) assert (single_commit is None) != (from_commit is None) assert (from_commit is None or isinstance(from_commit, api.commit.Commit)) assert (to_commit is None or isinstance(to_commit, api.commit.Commit)) if single_commit is not None: assert isinstance(single_commit, api.commit.Commit) assert len(single_commit.parents) <= 1 if from_commit is not None and to_commit is not None: assert from_commit.id != to_commit.id return api.impl.changeset.fetch( critic, repository, changeset_id, from_commit, to_commit, single_commit, review, automatic) ================================================ FILE: src/api/comment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class CommentError(api.APIError): pass class InvalidCommentId(CommentError): """Raised when an invalid comment id is used.""" def __init__(self, comment_id): """Constructor""" super(InvalidCommentId, self).__init__( "Invalid comment id: %d" % comment_id) self.comment_id = comment_id class InvalidCommentIds(CommentError): """Raised by fetchMany() when invalid comment ids are used.""" def __init__(self, comment_ids): """Constructor""" super(InvalidCommentIds, self).__init__( "Invalid comment ids: %s" % ", ".join(map(str, comment_ids))) self.comment_ids = comment_ids class InvalidLocation(CommentError): """Raised when attempting to specify an invalid comment location""" pass class Comment(api.APIObject): TYPE_VALUES = frozenset(["issue", "note"]) @property def id(self): """The comment's unique id""" return self._impl.id @property def type(self): """The comment's type The type is one of "issue" and "note".""" pass @property def is_draft(self): """True if the comment is not yet published Unpublished comments are not displayed to other users.""" return self._impl.is_draft @property def review(self): """The review to which the comment belongs The review is returned as an api.review.Review object.""" return self._impl.getReview(self.critic) @property def author(self): """The comment's author The author is returned as an api.user.User object.""" return self._impl.getAuthor(self.critic) @property def timestamp(self): """The comment's timestamp The return value is a datetime.datetime object.""" return self._impl.timestamp @property def location(self): """The location of the comment, or None If the comment was made against lines in a commit message, the return value is a api.comment.CommitMessageLocation object. If the comment was made against lines in a file version, the return value is api.comment.FileVersionLocation object. Otherwise, the return value is None.""" return self._impl.getLocation(self.critic) @property def text(self): """The comment's text""" return self._impl.text @property def replies(self): """The replies to the comment The replies are returned as a list of api.reply.Reply objects.""" return self._impl.getReplies(self.critic) class DraftChanges(object): """Draft changes to the comment""" def __init__(self, author, is_draft, reply, new_type): self.__author = author self.__is_draft = is_draft self.__reply = reply self.__new_type = new_type @property def author(self): """The author of these draft changes The author is returned as an api.user.User object.""" return self.__author @property def is_draft(self): """True if the comment itself is a draft (not published)""" return self.__is_draft @property def reply(self): """The current unpublished reply The reply is returned as an api.reply.Reply object, or None if there is no current unpublished reply.""" return self.__reply @property def new_type(self): """The new type of an unpublished type change The type is returned as a string. Comment.TYPE_VALUES defines the set of possible return values.""" return self.__new_type @property def draft_changes(self): """The comment's current draft changes The draft changes are returned as a Comment.DraftChanges object, or None if the current user has no unpublished changes to this comment. If the comment is currently an issue, or the current user has an unpublished change of the comment's type to issue, the returned object will be an Issue.DraftChanges instead.""" return self._impl.getDraftChanges(self.critic) class Issue(Comment): STATE_VALUES = frozenset(["open", "addressed", "resolved"]) @property def type(self): return "issue" @property def state(self): """The issue's state The state is one of the strings "open", "addressed" or "resolved".""" return self._impl.state @property def addressed_by(self): """The commit that addressed the issue, or None The value is an api.commit.Commit object, or None if the issue's state is not "addressed".""" return self._impl.getAddressedBy(self.critic) @property def resolved_by(self): """The user that resolved the issue, or None The value is an api.user.User object, or None if the issue's state is not "resolved".""" return self._impl.getResolvedBy(self.critic) class DraftChanges(Comment.DraftChanges): """Draft changes to the issue""" def __init__(self, author, is_draft, reply, new_type, new_state, new_location): super(Issue.DraftChanges, self).__init__( author, is_draft, reply, new_type) self.__new_state = new_state self.__new_location = new_location @property def new_state(self): """The issue's new state The new state is returned as a string, or None if the current user has not resolved or reopened the issue. Issue.STATE_VALUES defines the set of possible return values.""" return self.__new_state @property def new_location(self): """The issue's new location The new location is returned as a FileVersionLocation objects, or None if the issue has not been reopened, or if it was manually resolved rather than addressed and did not need to be relocated when being reopened. Since only issues in file version locations can be addressed, that is the only possible type of new location.""" return self.__new_location class Note(Comment): @property def type(self): return "note" class Location(api.APIObject): TYPE_VALUES = frozenset(["general", "commit-message", "file-version"]) def __len__(self): """Return the the length of the location, in lines""" return (self.last_line - self.first_line) + 1 @property def type(self): """The location's type The type is one of "commit-message" and "file-version".""" pass @property def first_line(self): """The line number of the first commented line Note that line numbers are one-based.""" return self._impl.first_line @property def last_line(self): """The line number of the last commented line Note that line numbers are one-based.""" return self._impl.last_line class CommitMessageLocation(Location): @property def type(self): return "commit-message" @property def commit(self): """The commit whose message was commented""" return self._impl.getCommit(self.critic) @staticmethod def make(critic, first_line, last_line, commit): return api.impl.comment.makeCommitMessageLocation( critic, first_line, last_line, commit) class FileVersionLocation(Location): @property def type(self): return "file-version" @property def changeset(self): """The changeset containing the comment The changeset is returned as an api.changeset.Changeset object. If the comment was created while looking at a diff, this will initially be that changeset. As additional commits are added to the review, this changeset may be "extended" to contain those added commits. This is the ideal changeset to use to display the comment, unless it is an issue that has been addressed, in which case a better changeset would be the diff of the commit returned by Issue.addressed_by. If the user did not make the comment while looking at a diff but rather while looking at a single version of the file, then this attribute returns None. If this is an object returned by translateTo() called with a changeset argument, then this will be that changeset.""" return self._impl.getChangeset(self.critic) @property def side(self): """The commented side ("old" or "new") of the changeset If the user did not make the comment while looking at a changeset (i.e. a diff) but rather while looking at a single version of the file, then this attribute returns None.""" return self._impl.side @property def commit(self): """The commit whose version of the file this location references The commit is returned as an api.commit.Commit object. If this is an object returned by translateTo() called with a commit argument, then this is the commit that was given as an argument to it. If this is the primary location of the comment (returned from Comment.location) then this is the commit whose version of the file the comment was originally made against, or None if the comment was made while looking at a diff.""" return self._impl.getCommit(self.critic) @property def file(self): """The commented file""" return self._impl.getFile(self.critic) @property def is_translated(self): """True if this is a location returned by |translateTo()|""" return self._impl.is_translated def translateTo(self, changeset=None, commit=None): """Return a translated file version location, or None The location is translated to the version of the file in a certain commit. If |changeset| is not None, that commit is the changeset's |to_commit|, unless the comment is not present there, and otherwise the changeset's |from_commit|. If |commit| is not None, that's the commit. If the comment is not present in the commit, None is returned. The returned object's |is_translated| will be True. If the |changeset| argument is not None, then the returned object's |changeset| will be that changeset, and its |side| will reflect which of its |from_commit| and |to_commit| ended up being used. The returned object's |commit| will be None. If the |commit| argument is not None, the returned object's |commit| will be that commit, and its |changeset| and |side| will be None.""" assert changeset is None \ or isinstance(changeset, api.changeset.Changeset) assert commit is None or isinstance(commit, api.commit.Commit) assert (changeset is None) != (commit is None) return self._impl.translateTo(self.critic, changeset, commit) @staticmethod def make(critic, first_line, last_line, file, changeset=None, side=None, commit=None): # File is required. assert isinstance(file, api.file.File) # Changeset and side go together. assert (changeset is None) == (side is None) assert (changeset is None) \ or isinstance(changeset, api.changeset.Changeset) # Commit conflicts with changeset, but one is required. assert (commit is None) != (changeset is None) assert (commit is None) or isinstance(commit, api.commit.Commit) return api.impl.comment.makeFileVersionLocation( critic, first_line, last_line, file, changeset, side, commit) def fetch(critic, comment_id): """Fetch the Comment object with the given id""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(comment_id, int) return api.impl.comment.fetch(critic, comment_id) def fetchMany(critic, comment_ids): """Fetch multiple Comment objects with the given ids""" import api.impl assert isinstance(critic, api.critic.Critic) comment_ids = list(comment_ids) assert all(isinstance(comment_id, int) for comment_id in comment_ids) return api.impl.comment.fetchMany(critic, comment_ids) def fetchAll(critic, review=None, author=None, comment_type=None, state=None, location_type=None, changeset=None, commit=None): """Fetch all Comment objects If |review| is not None, only comments in the specified review are returned. If |author| is not None, only comments created by the specified user are returned. If |comment_type| is not None, only comments of the specified type are returned. If |state| is not None, only issues in the specified state are returned. This implies type="issue". If |location_type| is not None, only issues in the specified type of location are returned. If |changeset| is not None, only comments against file versions that are referenced by the specified changeset are returned. Must be combined with |review|, and can not be combined with |commit|. If |commit| is not None, only comments against the commit's message or file versions referenced by the commit are returned. Must be combined with |review|, and can not be combined with |changeset|.""" import api.impl assert isinstance(critic, api.critic.Critic) assert review is None or isinstance(review, api.review.Review) assert author is None or isinstance(author, api.user.User) assert comment_type is None or comment_type in Comment.TYPE_VALUES assert state is None or state in Issue.STATE_VALUES assert state is None or comment_type in (None, "issue") assert location_type is None or location_type in Location.TYPE_VALUES assert changeset is None or isinstance(changeset, api.changeset.Changeset) assert changeset is None or review is not None assert commit is None or isinstance(commit, api.commit.Commit) assert commit is None or review is not None assert changeset is None or commit is None return api.impl.comment.fetchAll(critic, review, author, comment_type, state, location_type, changeset, commit) ================================================ FILE: src/api/commit.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class CommitError(api.APIError): pass class InvalidCommitId(CommitError): """Raised when an invalid commit id is used""" def __init__(self, commit_id): super(InvalidCommitId, self).__init__( "Invalid commit id: %r" % commit_id) class InvalidSHA1(CommitError): """Raised when a given SHA-1 is invalid as a commit reference""" def __init__(self, sha1): super(InvalidSHA1, self).__init__("Invalid commit SHA-1: %r" % sha1) self.sha1 = sha1 class NotAFile(CommitError): """Raised when attempting to access a non-file as a file""" def __init__(self, path): super(NotAFile, self).__init__("Path is not a file: %s" % path) self.path = path class Commit(api.APIObject): """Representation of a Git commit""" def __str__(self): return self.sha1 def __repr__(self): return "api.commit.Commit(sha1=%r)" % self.sha1 def __hash__(self): return hash(str(self)) def __eq__(self, other): return str(self) == str(other) @property def id(self): """The commit's unique database id""" return self._impl.getId(self.critic) @property def repository(self): """The repository containing the commit""" return self._impl.repository @property def sha1(self): """The commit's full 40 character SHA-1""" return self._impl.sha1 @property def tree(self): """The SHA-1 of the tree object referenced by the commit""" return self._impl.tree @property def summary(self): """The commit's single-line summary This is the first line of the commit message, unless that line starts with 'fixup!' or 'squash!', in which case the returned summary is the first non-empty line after that, with '[fixup] ' or '[squash] ' prepended. If there is no such non-empty line, the returned summary is just '[fixup]' or '[squash]'.""" return self._impl.getSummary() @property def message(self): """The commit's full commit message""" return self._impl.message @property def parents(self): """The commit's parents The return value is a list of api.Commit objects.""" return self._impl.getParents(self.critic) @property def description(self): """A string describing the commit in "friendly" way, or None This will typically be a tag name or a branch name; in the case of a branch name with "tip of" prepended if this commit is referenced directly by that branch.""" return self._impl.getDescription(self.critic) class UserAndTimestamp(object): """Representation of the author or committer meta-data of a commit""" def __init__(self, name, email, timestamp): self.name = name self.email = email self.timestamp = timestamp @property def author(self): """The commit's "author" meta-data""" return self._impl.getAuthor(self.critic) @property def committer(self): """The commit's "committer" meta-data""" return self._impl.getCommitter(self.critic) def isAncestorOf(self, commit): """Return True if |self| is an ancestor of |commit| Also return True if |self| is |commit|, meaning a commit is considered an ancestor of itself.""" assert isinstance(commit, Commit) return self._impl.isAncestorOf(commit) class FileInformation(object): """Basic information about a file in a commit""" def __init__(self, file, mode, sha1, size): self.__file = file self.__mode = mode self.__sha1 = sha1 self.__size = size @property def file(self): """The represented file""" return self.__file @property def mode(self): """The file's "UNIX" mode as an integer""" # FIXME: Should we use an integer sub-class, like # gitutils.Tree.Entry.Mode? return self.__mode @property def sha1(self): """The file content's SHA-1""" return self.__sha1 @property def size(self): """The file's size in bytes""" return self.__size def getFileInformation(self, file): """Look up information about a file in the commit The entry is returned as an Commit.FileInformation object, or None if the path was not found in the commit's tree. If the path is found but is not a blob (e.g. because it's a directory), NotAFile is raised.""" assert isinstance(file, api.file.File) return self._impl.getFileInformation(file) def getFileContents(self, file): """Fetch the blob (contents) of a file in the commit The return value is a string, or None if the path was not found in the commit's tree. If the path is found but is not a blob (e.g. because it is a directory), NotAFile is raised.""" assert isinstance(file, api.file.File) return self._impl.getFileContents(file) def getFileLines(self, file): """Fetch the lines of a file in the commit Much like getFileContents(), but splits the returned string into a list of strings in a consistent way that matches how other parts of Critic treats line breaks, and thus compatible with stored line numbers. Note: commit.getFileContents(...).splitlines() is *not* correct!""" assert isinstance(file, api.file.File) return self._impl.getFileLines(file) def fetch(repository, commit_id=None, sha1=None, ref=None): """Fetch a Git commit from the given repository The commit can be identified by its unique (internal) database id, its SHA-1 (full 40 character string) or by an arbitrary ref that resolves to a commit object (possibly via tag objects) when given to the 'git rev-parse' command.""" import api.impl assert isinstance(repository, api.repository.Repository) assert (ref is None) != ((commit_id is None) and (sha1 is None)) return api.impl.commit.fetch(repository, commit_id, sha1, ref) def fetchMany(repository, commit_ids=None, sha1s=None): """Fetch multiple Git commits from the given repository The commits can be identified by their unique (internal) database ids, or by their SHA-1s (full 40 character strings.)""" import re import api.impl assert isinstance(repository, api.repository.Repository) assert (commit_ids is None) != (sha1s is None) if commit_ids: commit_ids = list(commit_ids) assert all(isinstance(commit_id, int) for commit_id in commit_ids) else: re_sha1 = re.compile("^[0-9A-Fa-f]{40}$") sha1s = list(sha1s) assert all(isinstance(sha1, str) and re_sha1.match(sha1) for sha1 in sha1s) return api.impl.commit.fetchMany(repository, commit_ids, sha1s) ================================================ FILE: src/api/commitset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class InvalidCommitRange(Exception): """Raised by calculateFromRange() when the range is not simple Simple in this context means that the commit that defines the start of the range is an ancestor of the commit that defines the end of the range, and all commits in-between.""" pass class CommitSet(api.APIObject): """Representation of a set of Commit objects""" def __iter__(self): return iter(self._impl) def __len__(self): return len(self._impl) def __contains__(self, item): return item in self._impl def __hash__(self): return hash(self._impl) def __eq__(self, other): assert isinstance(other, CommitSet) return self._impl == other._impl def __nonzero__(self): return len(self._impl) != 0 def __repr__(self): return "api.commitset.CommitSet(%r)" % list(self.topo_ordered) @property def date_ordered(self): """The commits in the set in (commit) timestamp order The return value is a generator producing api.commit.Commit objects. Commits are guaranteed to precede their parents, even if the actual commit timestamp order is the opposite.""" return self._impl.getDateOrdered() @property def topo_ordered(self): """The commits in the set in "topological" order The return value is a generator producing api.commit.Commit objects. Commits are guaranteed to precede their parents, and as far as possible immediately precede their parent. It is only valid to call this function on commit sets with a single head (those whose 'heads' attribute returns a set of length 1.)""" assert not self or len(self.heads) == 1 return self._impl.getTopoOrdered() @property def heads(self): """The head commits of the set The return value is a frozenset of Commit objects. A "head commit" is defined as any commit in the set that is not an immediate parent of another commit in the set.""" return self._impl.heads @property def tails(self): """The tail commits of the set The return value is a frozenset of Commit objects. A "tail commit" is defined as any commit that is a parent of a commit in the set but isn't itself in the set.""" return self._impl.tails @property def filtered_tails(self): """The filtered tail commits of the set The return value is a frozenset of Commit objects. The returned set will contain each tail commit that isn't an ancestor of another tail commit of the set. If the tail commits of the set are all different commits on an upstream branch, then this will only return the latest one.""" return self._impl.getFilteredTails(self.critic) def getChildrenOf(self, commit): """Return the commits in the set that are children of the commit The return value is a set of Commit objects.""" assert isinstance(commit, api.commit.Commit) return self._impl.getChildrenOf(commit) def getParentsOf(self, commit): """Return the intersection of the commit's parents and the set The return value is a list of Commit objects, in the same order as in "commit.parents".""" assert isinstance(commit, api.commit.Commit) return self._impl.getParentsOf(commit) def getDescendantsOf(self, commit, include_self=False): """Return the intersection of the commit's descendants and the set The return value is another CommitSet object. If 'include_self' is True, the commit itself is included in the returned set. The argument can also be a iterable, in which case the returned set is the union of the sets that would be returned for each commit in the iterable.""" if isinstance(commit, api.commit.Commit): commits = [commit] else: commits = list(commit) assert all(isinstance(commit, api.commit.Commit) for commit in commits) assert all(commit in self or commit in self.tails for commit in commits) return self._impl.getDescendantsOf(commits, include_self) def getAncestorsOf(self, commit, include_self=False): """Return the intersection of the commit's ancestors and the set The return value is another CommitSet object. If 'include_self' is True, the commit itself is included in the returned set. The argument can also be a iterable, in which case the returned set is the union of the sets that would be returned for each commit in the iterable.""" if isinstance(commit, api.commit.Commit): commits = [commit] else: commits = list(commit) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return self._impl.getAncestorsOf(commits, include_self) def union(self, commits): commits = set(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return self._impl.union(self.critic, commits) def __or__(self, commits): return self.union(commits) def intersection(self, commits): commits = set(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return self._impl.intersection(self.critic, commits) def __and__(self, commits): return self.intersection(commits) def difference(self, commits): commits = set(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return self._impl.difference(self.critic, commits) def __sub__(self, commits): return self.difference(commits) def symmetric_difference(self, commits): commits = set(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return self._impl.symmetric_difference(self.critic, commits) def __xor__(self, commits): return self.symmetric_difference(commits) def create(critic, commits): """Create a CommitSet object from an iterable of Commit objects""" import api.impl assert isinstance(critic, api.critic.Critic) if not isinstance(commits, CommitSet): commits = list(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) return api.impl.commitset.create(critic, commits) def calculateFromRange(critic, from_commit, to_commit): """Calculate a set of commits from a commit range""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(from_commit, api.commit.Commit) assert isinstance(to_commit, api.commit.Commit) assert from_commit.repository == to_commit.repository return api.impl.commitset.calculateFromRange(critic, from_commit, to_commit) ================================================ FILE: src/api/config.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ConfigurationError(api.APIError): pass class InvalidGroup(ConfigurationError): def __init__(self, name): super(ConfigurationError, self).__init__( "Invalid configuration group: %s" % name) class InvalidKey(ConfigurationError): def __init__(self, group, name): super(ConfigurationError, self).__init__( "Invalid configuration key: %s::%s" % (group, name)) class WrongType(ConfigurationError): def __init__(self, group, name, read_as): super(ConfigurationError, self).__init__( "Wrong type: %s::%s (read as %s)" % (group, name, read_as)) def getValue(group, key): import configuration if not hasattr(configuration, group): raise InvalidGroup(group) module = getattr(configuration, group) if not hasattr(module, key): raise InvalidKey(group, key) return getattr(module, key) def getBoolean(group, key): value = getValue(group, key) if not isinstance(value, bool): raise WrongType(group, key, "boolean") return value def getInteger(group, key): value = getValue(group, key) if not isinstance(value, int): raise WrongType(group, key, "integer") return value def getString(group, key): value = getValue(group, key) if not isinstance(value, basestring): raise WrongType(group, key, "string") return value ================================================ FILE: src/api/critic.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class Critic(object): def __init__(self, impl): self._impl = impl @property def effective_user(self): return self._impl.getEffectiveUser(self) @property def actual_user(self): return self._impl.actual_user @property def access_token(self): """Access token used to authenticate""" return self._impl.access_token @property def database(self): return self._impl.database def getDatabaseCursor(self): """Return a read-only database cursor object This cursor object can only be used to execute SELECT queries.""" return self._impl.database.readonly_cursor() def getUpdatingDatabaseCursor(self, *tables): """Return a database cursor for updating The return value is a "context manager", which returns the actual cursor object when entered and either commits or rolls back the current transaction when exited. The actual cursor object can only be used to update the tables specified as arguments, using INSERT, UPDATE or DELETE queries. The cursor object can also be used to execute SELECT queries (against any tables.)""" return self._impl.database.updating_cursor(*tables) def setActualUser(self, user): assert isinstance(user, api.user.User) assert self._impl.actual_user is None self._impl.actual_user = user def setAccessToken(self, access_token): """Set the access token used to authenticate""" assert self._impl.access_token is None self._impl.access_token = access_token def startSession(for_user=False, for_system=False, for_testing=False): import api.impl assert sum((for_user, for_system, for_testing)) == 1 return api.impl.critic.startSession(for_user, for_system, for_testing) ================================================ FILE: src/api/extension.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ExtensionError(api.APIError): """Base exception for all errors related to the Extension class""" pass class InvalidExtensionId(ExtensionError): """Raised when a invalid extension id is used""" def __init__(self, value): """Constructor""" super(InvalidExtensionId, self).__init__( "Invalid extension id: %r" % value) self.value = value class InvalidExtensionKey(ExtensionError): """Raised when an invalid extension key is used""" def __init__(self, value): """Constructor""" super(InvalidExtensionKey, self).__init__( "Invalid extension key: %r" % value) self.value = value class Extension(api.APIObject): """Representation of a Critic extension""" @property def id(self): """The extension's unique id""" return self._impl.id @property def name(self): """The extension's name""" return self._impl.name @property def key(self): """The extension's unique key For a system extension, the key is the extension's name. For other extensions, the key is the publisher's username followed by a slash followed by the extension's name.""" return self._impl.getKey(self.critic) @property def publisher(self): """The extension's publisher The user that published the extension. This may not be the author (who may not be a user of this Critic system.) None if this is a system extension.""" return self._impl.getPublisher(self.critic) @property def default_version(self): """The default extension version This is typically the version whose extension description and other metadata should be presented as the extension's true metadata.""" return self._impl.getDefaultVersion() def fetch(critic, extension_id=None, key=None): """Fetch an Extension object with the given extension id or key Exactly one of the 'extension_id' and 'key' arguments can be used. Exceptions: InvalidExtensionId: if 'extension_id' is used and is not a valid extension id. InvalidExtensionKey: if 'key' is used and is not a valid extensions key.""" import api.impl assert isinstance(critic, api.critic.Critic) assert (extension_id is None) != (key is None) return api.impl.extension.fetch(critic, extension_id, key) def fetchAll(critic, publisher=None, installed_by=None): """Fetch Extension objects for all extensions in the system If 'publisher' is not None, it must be an api.user.User object, and only extensions published by this user are returned. If 'installed_by' is not None, it must be an api.user.User object, and only extensions that this user has installed are returned. This may include extensions that are universally installed (i.e. installed for all users, and not by this user directly.)""" import api.impl assert isinstance(critic, api.critic.Critic) assert publisher is None or isinstance(publisher, api.user.User) assert installed_by is None or isinstance(installed_by, api.user.User) return api.impl.extension.fetchAll(critic, publisher, installed_by) ================================================ FILE: src/api/file.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class FileError(api.APIError): pass class InvalidFileId(FileError): """Raised when an invalid file id is used.""" def __init__(self, file_id): """Constructor""" super(InvalidFileId, self).__init__( "Invalid file id: %d" % file_id) class InvalidPath(FileError): """Raised when an invalid path is used.""" def __init__(self, path): """Constructor""" super(InvalidPath, self).__init__( "Invalid path: %s" % path) class File(api.APIObject): def __str__(self): return self.path @property def id(self): """The path's unique id""" return self._impl.id @property def path(self): """The path""" return self._impl.path def fetch(critic, file_id=None, path=None, create=False): """Fetch a "file" (file id / path mapping) If a path is used, and |create| is True, a mapping is created if one didn't already exist.""" import api.impl assert isinstance(critic, api.critic.Critic) assert (file_id is None) != (path is None) if file_id is not None: file_id = int(file_id) if path is not None: path = str(path) assert isinstance(create, bool) return api.impl.file.fetch(critic, file_id, path, create) def fetchMany(critic, file_ids=None, paths=None, create=False): """Fetch multiple "files" (file id / path mappings) If paths are used, and |create| is True, a mapping is created if one didn't already exist.""" import api.impl assert isinstance(critic, api.critic.Critic) assert (file_ids is None) != (paths is None) if file_ids is not None: file_ids = [int(file_id) for file_id in file_ids] if paths is not None: paths = [str(path) for path in paths] assert isinstance(create, bool) return api.impl.file.fetchMany(critic, file_ids, paths, create) ================================================ FILE: src/api/filechange.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class FileChangeError(api.APIError): pass class InvalidFileChangeId(FileChangeError): def __init__(self, changeset_id, file_id): super(InvalidFileChangeId, self).__init__( "Invalid file change id: %d:%d" % (changeset_id, file_id)) class FileChange(api.APIObject): """Representation of the changes to a file introduced by a changeset""" def __hash__(self): return hash((self.changeset, self.file)) def __eq__(self, other): return self.changeset == other.changeset and self.file == other.file @property def file(self): return self._impl.getFile(self.critic) @property def changeset(self): return self._impl.changeset @property def old_sha1(self): return self._impl.old_sha1 @property def old_mode(self): return self._impl.old_mode @property def new_sha1(self): return self._impl.new_sha1 @property def new_mode(self): return self._impl.new_mode def fetch(critic, changeset, file): assert isinstance(critic, api.critic.Critic) assert isinstance(changeset, api.changeset.Changeset) assert isinstance(file, api.file.File) return api.impl.filechange.fetch(critic, changeset, file) def fetchAll(critic, changeset): assert isinstance(critic, api.critic.Critic) assert isinstance(changeset, api.changeset.Changeset) return api.impl.filechange.fetchAll(critic, changeset) ================================================ FILE: src/api/filecontent.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class FilecontentError(api.APIError): pass class Filecontent(api.APIObject): """Representation of some context""" def getLines(self, first_row=None, last_row=None): assert first_row is None or isinstance(first_row, int) assert last_row is None or isinstance(last_row, int) return self._impl.getLines(first_row, last_row) class Line: """Representation of a line from some version of a file""" def __init__(self, parts, offset): self.__parts = parts self.__offset = offset @property def parts(self): return self.__parts @property def offset(self): return self.__offset def fetch(critic, repository, blob_sha1, file_obj): assert isinstance(critic, api.critic.Critic) assert isinstance(repository, api.repository.Repository) assert isinstance(blob_sha1, str) assert isinstance(file_obj, api.file.File) return api.impl.filecontent.fetch(critic, repository, blob_sha1, file_obj) ================================================ FILE: src/api/filediff.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class FilediffError(api.APIError): pass class FilediffParserError(api.APIError): pass class FilediffDelayed(api.ResultDelayedError): pass class Filediff(api.APIObject): """Representation of the source code for a file in a changeset A filediff has a list of macro chunks, where each macro chunk represents a partition of a file.""" def __hash__(self): return hash(("filediff", self.filechange)) def __eq__(self, other): return self.filechange == other.filechange @property def filechange(self): return self._impl.filechange @property def old_count(self): return self._impl.new_count @property def new_count(self): return self._impl.new_count def getMacroChunks(self, context_lines, comments=None, ignore_chunks=False): assert isinstance(context_lines, int) if comments is not None: comments = list(comments) assert all(isinstance(comment, api.comment.Comment) for comment in comments) assert isinstance(ignore_chunks, bool) return self._impl.getMacroChunks( self.critic, context_lines, comments, ignore_chunks) class MacroChunk(object): """Representation of a partition of a file A macro chunk contains all lines in the range from the first to the last. In other words, if a line is between the first and last line of this macro chunk, it will be included in this macro chunk. A macro chunk also contains old and new offsets and counts, which describe where in the file the lines are from, as well as how many are on each side. The two sides represents the old and new version of the file, where the old version is what the file looked like just before the first (earliest) commit of the changeset, and the new version is what the file looked like just after the last (latest) commit of the changeset.""" def __init__(self, impl_macro_chunk): self.__impl = impl_macro_chunk @property def chunks(self): return self.__impl.legacy_macro_chunk.chunks @property def old_offset(self): return self.__impl.legacy_macro_chunk.old_offset @property def new_offset(self): return self.__impl.legacy_macro_chunk.new_offset @property def old_count(self): return self.__impl.legacy_macro_chunk.old_count @property def new_count(self): return self.__impl.legacy_macro_chunk.new_count @property def lines(self): return self.__impl.getLines() class Line(object): """Representation of a line of a file A line represents a change from the old version of a file, to the new version of a file. A line has a type, which is one of the following: CONTEXT DELETED MODIFIED REPLACED INSERTED WHITESPACE CONFLICT The type of the line describes how the line changed. """ def __init__(self, impl_line): self.__impl = impl_line @property def type(self): return self.__impl.legacy_line.type @property def old_offset(self): return self.__impl.legacy_line.old_offset @property def new_offset(self): return self.__impl.legacy_line.new_offset @property def content(self): return self.__impl.getContent() @property def is_whitespace(self): return self.__impl.is_whitespace @property def analysis(self): return self.__impl.analysis @property def type_string(self): return self.__impl.type_string() class Part(object): """Representation of a part of a line of code A part has a type, which describes what kind of content it contains. It can also have a state, meaning the part is either something that was removed (in the old version of a file), or added (in the new version of a file). A part also has some content, which is typically a word (ex. for, in, if) or an operator (ex. =, !=, [, ]).""" def __init__(self, impl_part): self.__impl = impl_part @property def type(self): return self.__impl.type @property def content(self): return self.__impl.content @property def state(self): return self.__impl.state def fetch(critic, filechange): assert isinstance(critic, api.critic.Critic) assert isinstance(filechange, api.filechange.FileChange), filechange return api.impl.filediff.fetch(critic, filechange) def fetchAll(critic, changeset): assert isinstance(critic, api.critic.Critic) assert isinstance(changeset, api.changeset.Changeset) assert comments is None or isinstance(comments, list) assert isinstance(context_lines, int) return api.impl.filediff.fetchAll(critic, changeset) ================================================ FILE: src/api/filters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class FilterError(api.APIError): """Base exception for all errors related to the User class.""" pass class Filter(api.APIObject): """Base class of RepositoryFilter and ReviewFilter""" @property def subject(self): """The filter's subject The subject is the user that the filter applies to.""" return self._impl.getSubject(self.critic) @property def type(self): """The filter's type The type is always one of "reviewer", "watcher" and "ignore".""" return self._impl.type @property def path(self): """The filter's path""" return self._impl.path class InvalidRepositoryFilterId(FilterError): """Raised when an invalid repository filter id is used""" def __init__(self, value): """Constructor""" super(InvalidRepositoryFilterId, self).__init__( "Invalid repository filter id: %r" % value) self.value = value class RepositoryFilter(Filter): """Representation of a repository filter A repository filter is a filter that applies to all reviews in a repository.""" @property def id(self): """The repository filter's unique id""" return self._impl.id @property def repository(self): """The repository filter's repository""" return self._impl.getRepository(self.critic) @property def delegates(self): """The repository filter's delegates, or None The delegates are returned as a frozenset of api.user.User objects. If the filter's type is not "reviewer", this attribute's value is None.""" return self._impl.getDelegates(self.critic) def fetchRepositoryFilter(critic, filter_id): """Fetch a RepositoryFilter object with the given filter id""" assert isinstance(critic, api.critic.Critic) return api.impl.filters.fetchRepositoryFilter(critic, int(filter_id)) class ReviewFilter(Filter): """Representation of a review filter A review filter is a filter that applies to a single review only.""" @property def id(self): """The review filter's unique id""" return self._impl.id @property def review(self): """The review filter's review""" return self._impl.getReview(self.critic) @property def creator(self): """The review filter's creator This is the user that created the review filter, which can be different from the filter's subject.""" return self._impl.getCreator(self.critic) ================================================ FILE: src/api/impl/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Critic API implementation""" import critic import user import repository import filters import branch import commit import commitset import changeset import filechange import filediff import filecontent import review import reviewsummary import log import accesstoken import accesscontrolprofile import labeledaccesscontrolprofile import extension import file import comment import reply import batch import reviewablefilechange ================================================ FILE: src/api/impl/accesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import dbutils public_class = api.accesscontrolprofile.AccessControlProfile HTTPException = public_class.HTTPException RepositoryException = public_class.RepositoryException ExtensionException = public_class.ExtensionException class AccessControlProfile(apiobject.APIObject): wrapper_class = api.accesscontrolprofile.AccessControlProfile def __init__(self, profile_id, title, token_id, *rules): self.id = profile_id self.title = title self.__token_id = token_id (self.http_rule, self.repositories_rule, self.extensions_rule) = rules def getAccessToken(self, critic): if self.__token_id is None: return None return api.accesstoken.fetch(critic, self.__token_id) def getHTTP(self, critic): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, request_method, path_pattern FROM accesscontrol_http WHERE profile=%s ORDER BY id ASC""", (self.id,)) return public_class.Category( self.http_rule, [HTTPException(exception_id, request_method, path_pattern) for exception_id, request_method, path_pattern, in cursor]) def getRepositories(self, critic): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, access_type, repository FROM accesscontrol_repositories WHERE profile=%s ORDER BY id ASC""", (self.id,)) return public_class.Category( self.repositories_rule, [RepositoryException(exception_id, access_type, api.repository.fetch(critic, repository_id) if repository_id is not None else None) for exception_id, access_type, repository_id in cursor]) def getExtensions(self, critic): import configuration if not configuration.extensions.ENABLED: return public_class.Category(self.extensions_rule, []) cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, access_type, extension_key FROM accesscontrol_extensions WHERE profile=%s ORDER BY id ASC""", (self.id,)) return public_class.Category( self.extensions_rule, [ExtensionException(exception_id, access_type, api.extension.fetch(critic, key=extension_key) if extension_key is not None else None) for exception_id, access_type, extension_key in cursor]) @staticmethod def refresh(critic, tables, cached_profiles): if "accesscontrolprofiles" not in tables: return AccessControlProfile.updateAll( critic, """SELECT id, title, access_token, http, repositories, extensions FROM accesscontrolprofiles WHERE id=ANY (%s)""", cached_profiles) @AccessControlProfile.cached() def fetch(critic, profile_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, title, access_token, http, repositories, extensions FROM accesscontrolprofiles WHERE id=%s""", (profile_id,)) try: return next(AccessControlProfile.make(critic, cursor)) except StopIteration: raise api.accesscontrolprofile.InvalidAccessControlProfileId(profile_id) def fetchAll(critic, title): cursor = critic.getDatabaseCursor() if title is None: cursor.execute("""SELECT id, title, NULL, http, repositories, extensions FROM accesscontrolprofiles WHERE access_token IS NULL ORDER BY id ASC""") else: cursor.execute("""SELECT id, title, NULL, http, repositories, extensions FROM accesscontrolprofiles WHERE access_token IS NULL AND title=%s ORDER BY id ASC""", (title,)) return list(AccessControlProfile.make(critic, cursor)) ================================================ FILE: src/api/impl/accesstoken.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class AccessToken(apiobject.APIObject): wrapper_class = api.accesstoken.AccessToken def __init__(self, token_id, access_type, user_id, part1, part2, title): self.id = token_id self.access_type = access_type self.__user_id = user_id self.part1 = part1 self.part2 = part2 self.title = title self.__profile_id = None def getUser(self, critic): if self.__user_id is None: return None return api.user.fetch(critic, self.__user_id) def getProfile(self, critic): if self.__profile_id is None: cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id FROM accesscontrolprofiles WHERE access_token=%s""", (self.id,)) row = cursor.fetchone() if not row: return None self.__profile_id, = row return api.accesscontrolprofile.fetch(critic, self.__profile_id) @staticmethod def refresh(critic, tables, cached_tokens): if not tables.intersection(("accesstokens", "accesscontrolprofiles")): return AccessToken.updateAll( critic, """SELECT id, access_type, uid, part1, part2, title FROM accesstokens WHERE id=ANY (%s)""", cached_tokens) @AccessToken.cached() def fetch(critic, token_id): cursor = critic.getDatabaseCursor() cursor.execute( """SELECT id, access_type, uid, part1, part2, title FROM accesstokens WHERE id=%s""", (token_id,)) try: return next(AccessToken.make(critic, cursor)) except StopIteration: raise api.accesstoken.InvalidAccessTokenId(token_id) def fetchAll(critic, user): cursor = critic.getDatabaseCursor() if user is None: cursor.execute( """SELECT id, access_type, uid, part1, part2, title FROM accesstokens""") else: cursor.execute( """SELECT id, access_type, uid, part1, part2, title FROM accesstokens WHERE uid=%s""", (user.id,)) return list(AccessToken.make(critic, cursor)) ================================================ FILE: src/api/impl/apiobject.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class APIObject(object): def wrap(self, critic): return self.wrapper_class(critic, self) @classmethod def create(Implementation, critic, *args): return Implementation(*args).wrap(critic) @classmethod def get_cached(Implementation, critic, item_id): return critic._impl.lookup(Implementation, item_id) @classmethod def add_cached(Implementation, critic, item_id, item): critic._impl.assign(Implementation, item_id, item) @classmethod def make(Implementation, critic, args_list, ignored_errors=(), cache_key=lambda args: args[0]): for args in args_list: item_id = cache_key(args) try: item = critic._impl.lookup(Implementation, item_id) except KeyError: try: item = Implementation.create(critic, *args) except ignored_errors: continue Implementation.add_cached(critic, item_id, item) yield item @classmethod def cached(Implementation, InvalidIdError=None, cache_key=lambda args: args[0]): def wrap(fetch): def wrapper(critic, *args): item_id = cache_key(args) if item_id is not None: try: return critic._impl.lookup(Implementation, item_id) except KeyError: pass result = fetch(critic, *args) if InvalidIdError is None: return result try: return next(result) except StopIteration: raise InvalidIdError(item_id) return wrapper return wrap @classmethod def cachedMany(Implementation, InvalidIdsError, cache_keys=lambda args: args[0]): def wrap(fetchMany): def wrapper(critic, *args): items = {} item_ids = cache_keys(args) try: cache = critic._impl.lookup(Implementation) except KeyError: cache = {} uncached_ids = set(item_ids) - set(cache.keys()) items = {item_id: cache[item_id] for item_id in item_ids if item_id in cache} if uncached_ids: items.update( (item.id, item) for item in fetchMany(critic, list(uncached_ids))) if len(items) < len(set(item_ids)): invalid_ids = sorted(set(item_ids) - set(items.keys())) raise InvalidIdsError(invalid_ids) return [items[item_id] for item_id in item_ids] return wrapper return wrap @classmethod def allCached(Implementation, critic): """Return all cached objects of this type The cached objects are returned as a dictionary mapping the object id to the object. This dictionary should not be modified.""" # Don't catch KeyError here. Something is probably wrong if this # function is called when no objects of the type are cached. return critic._impl.lookup(Implementation) @staticmethod def refresh(critic, tables, cached_objects): """Refresh objects after transaction commit The |tables| parameter is a set of database tables that were modified in the transaction. The |cached_objects| parameter is a dictionary mapping object ids to cached objects (wrappers) of this type.""" pass @classmethod def updateAll(Implementation, critic, query, cached_objects): """Execute the query and update all cached objects The query must take a single parameter, which is a list of object ids. It will be executed with the list of ids of all cached objects. Each returned row must have the id of the object as the first item, and the implementation constructor must take the row as a whole as arguments: new_impl = Implementation(*row)""" cursor = critic.getDatabaseCursor() cursor.execute(query, (cached_objects.keys(),)) for row in cursor: cached_objects[row[0]]._set_impl(Implementation(*row)) ================================================ FILE: src/api/impl/batch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class ModifiedComment(object): def __init__(self, comment_id, new_type, new_state): self.id = comment_id self.new_type = new_type self.new_state = new_state class Batch(apiobject.APIObject): wrapper_class = api.batch.Batch def __init__(self, batch_id, review_id, author_id, comment_id, timestamp): self.id = batch_id self.__review_id = review_id self.__author_id = author_id self.__comment_id = comment_id self.timestamp = timestamp self.__created_comment_ids = None self.__written_reply_ids = None self.__modified_comments = None self.__reviewed_file_changes = None self.__unreviewed_file_changes = None def isEmpty(self, critic): self.loadCommentChanges(critic) self.loadFileChanges(critic) return not (self.__created_comment_ids or self.__written_reply_ids or self.__modified_comments or self.__reviewed_file_changes or self.__unreviewed_file_changes) def getReview(self, critic): return api.review.fetch(critic, self.__review_id) def getAuthor(self, critic): if self.__author_id is None: return None return api.user.fetch(critic, self.__author_id) def getComment(self, critic): if self.__comment_id is None: return None return api.comment.fetch(critic, self.__comment_id) def getCreatedComments(self, critic): if self.__created_comment_ids is None: self.loadCommentChanges(critic) return set(api.comment.fetchMany(critic, self.__created_comment_ids)) def getWrittenReplies(self, critic): if self.__written_reply_ids is None: self.loadCommentChanges(critic) return set(api.reply.fetchMany(critic, self.__written_reply_ids)) def getResolvedIssues(self, critic): if self.__modified_comments is None: self.loadCommentChanges(critic) return set(api.comment.fetchMany( critic, (modified_comment.id for modified_comment in self.__modified_comments if modified_comment.new_state == "closed"))) def getReopenedIssues(self, critic): if self.__modified_comments is None: self.loadCommentChanges(critic) return set(api.comment.fetchMany( critic, (modified_comment.id for modified_comment in self.__modified_comments if modified_comment.new_state == "open"))) def getMorphedComments(self, critic): if self.__modified_comments is None: self.loadCommentChanges(critic) new_type_by_comment_id = { modified_comment.id: modified_comment.new_type for modified_comment in self.__modified_comments if modified_comment.new_type is not None } comments = api.comment.fetchMany(critic, new_type_by_comment_id.keys()) return { comment: new_type_by_comment_id[comment.id] for comment in comments } def getReviewedFileChanges(self, critic): if self.__reviewed_file_changes is None: self.loadFileChanges(critic) return api.reviewablefilechange.fetchMany( critic, self.__reviewed_file_changes) def getUnreviewedFileChanges(self, critic): if self.__reviewed_file_changes is None: self.loadFileChanges(critic) return api.reviewablefilechange.fetchMany( critic, self.__unreviewed_file_changes) def __queryCondition(self): if self.id is None: condition = "state='draft'" batch_id = () else: condition = "batch=%s" batch_id = (self.id,) return condition, batch_id def loadCommentChanges(self, critic): cursor = critic.getDatabaseCursor() condition, batch_id = self.__queryCondition() cursor.execute("""SELECT commentchains.id, comments.id, commentchains.first_comment=comments.id FROM commentchains JOIN comments ON (comments.chain=commentchains.id) WHERE commentchains.review=%s AND commentchains.state!='empty' AND comments.uid=%s AND comments.state!='deleted' AND comments.{}""".format(condition), (self.__review_id, self.__author_id) + batch_id) self.__created_comment_ids = [] self.__written_reply_ids = [] for comment_id, reply_id, is_initial in cursor: # Don't include the note that is the batch's comment. if comment_id == self.__comment_id: continue if is_initial: self.__created_comment_ids.append(comment_id) else: self.__written_reply_ids.append(reply_id) cursor.execute( """SELECT commentchains.id, to_type, to_state FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE commentchains.review=%s AND commentchains.state!='empty' AND commentchainchanges.uid=%s AND (commentchainchanges.state='performed' OR commentchainchanges.from_state=commentchains.state OR commentchainchanges.from_type=commentchains.type) AND commentchainchanges.{}""".format(condition), (self.__review_id, self.__author_id) + batch_id) self.__modified_comments = [] for comment_id, new_type, new_state in cursor: self.__modified_comments.append(ModifiedComment( comment_id, new_type, new_state)) def loadFileChanges(self, critic): cursor = critic.getDatabaseCursor() condition, batch_id = self.__queryCondition() cursor.execute( """SELECT reviewfiles.id, to_state FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfilechanges.uid=%s AND (reviewfilechanges.state='performed' OR reviewfilechanges.to_state!=reviewfiles.state) AND reviewfilechanges.{}""".format(condition), (self.__review_id, self.__author_id) + batch_id) rows = cursor.fetchall() self.__reviewed_file_changes = set( filechange_id for filechange_id, to_state in rows if to_state == 'reviewed') self.__unreviewed_file_changes = set( filechange_id for filechange_id, to_state in rows if to_state == 'pending') @Batch.cached(api.batch.InvalidBatchId) def fetch(critic, batch_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, review, uid, comment, time FROM batches WHERE id=%s""", (batch_id,)) return Batch.make(critic, cursor) def fetchAll(critic, review, author): conditions = ["TRUE"] values = [] if review: conditions.append("review=%s") values.append(review.id) if author: conditions.append("uid=%s") values.append(author.id) cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, review, uid, comment, time FROM batches WHERE {}""".format(" AND ".join(conditions)), values) return list(Batch.make(critic, cursor)) def fetchUnpublished(critic, review): author_id = critic.effective_user.id return Batch(None, review.id, author_id, None, None).wrap(critic) ================================================ FILE: src/api/impl/branch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class Branch(apiobject.APIObject): wrapper_class = api.branch.Branch def __init__(self, branch_id, name, repository_id, head_id): self.id = branch_id self.name = name self.__repository_id = repository_id self.__head_id = head_id self.__head = None self.__commits = None def getRepository(self, critic): return api.repository.fetch(critic, repository_id=self.__repository_id) def getHead(self, critic): if self.__head is None: self.__head = api.commit.fetch( self.getRepository(critic), commit_id=self.__head_id) return self.__head def getCommits(self, critic): if self.__commits is None: repository = self.getRepository(critic) cursor = critic.getDatabaseCursor() cursor.execute("""SELECT commit FROM reachable WHERE branch=%s""", (self.id,)) self.__commits = api.commitset.create( critic, (api.commit.fetch(repository, commit_id) for (commit_id,) in cursor)) return self.__commits @Branch.cached() def fetch(critic, branch_id, repository, name): cursor = critic.getDatabaseCursor() if branch_id is not None: cursor.execute("""SELECT id, name, repository, head FROM branches WHERE id=%s""", (branch_id,)) else: cursor.execute("""SELECT id, name, repository, head FROM branches WHERE repository=%s AND name=%s""", (repository.id, name,)) try: return next(Branch.make(critic, cursor)) except StopIteration: if branch_id is not None: raise api.branch.InvalidBranchId(branch_id) else: raise api.branch.InvalidBranchName(name) def fetchAll(critic, repository): cursor = critic.getDatabaseCursor() if repository is not None: cursor.execute("""SELECT id, name, repository, head FROM branches WHERE repository=%s ORDER BY name""", (repository.id,)) else: cursor.execute("""SELECT id, name, repository, head FROM branches ORDER BY name""") return list(Branch.make(critic, cursor)) ================================================ FILE: src/api/impl/branch_unittest.py ================================================ def basic(arguments): import api assert arguments.sha1 is not None, "missing argument: --sha1" assert arguments.name is not None, "missing argument: --name" critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, repository_id=1) branch = api.branch.fetch( critic, repository=repository, name=arguments.name) assert isinstance(branch, api.branch.Branch) assert isinstance(branch.id, int) assert isinstance(branch.name, str) assert branch.name == arguments.name assert branch.repository is repository assert isinstance(branch.head, api.commit.Commit) assert branch.head.sha1 == arguments.sha1 assert isinstance(branch.commits, api.commitset.CommitSet) assert len(branch.commits) == 5 assert len(branch.commits.heads) == 1 assert branch.head in branch.commits.heads assert api.branch.fetch(critic, branch_id=branch.id) is branch branches = api.branch.fetchAll(critic) assert isinstance(branches, list) assert all(isinstance(branch, api.branch.Branch) for branch in branches) assert branch in branches branches = api.branch.fetchAll(critic, repository=repository) assert isinstance(branches, list) assert all(isinstance(branch, api.branch.Branch) for branch in branches) assert branch in branches try: api.branch.fetch(critic, branch_id=4711) except api.branch.InvalidBranchId: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" try: api.branch.fetch( critic, repository=repository, name=arguments.name + "-wrong") except api.branch.InvalidBranchName: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" print "basic: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--sha1") parser.add_argument("--name") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) ================================================ FILE: src/api/impl/changeset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from __future__ import absolute_import import api import api.impl from api.impl import apiobject import changeset.client from gitutils import GitReferenceError import diff class Changeset(apiobject.APIObject): wrapper_class = api.changeset.Changeset def __init__(self, id, changeset_type, from_commit_id, to_commit_id, files, repository): self.id = id self.type = changeset_type self.__from_commit_id = from_commit_id self.__to_commit_id = to_commit_id self.__filediffs = None self.repository = repository def getFromCommit(self): if self.__from_commit_id is None: return None return api.commit.fetch( self.repository, commit_id=self.__from_commit_id) def getToCommit(self): if self.__to_commit_id is None: return None return api.commit.fetch( self.repository, commit_id=self.__to_commit_id) def getContributingCommits(self, critic): if self.__from_commit_id is None: return None try: return api.commitset.calculateFromRange( critic, self.getFromCommit(), self.getToCommit()) except api.commitset.InvalidCommitRange: return None def fetch(critic, repository, changeset_id, from_commit, to_commit, single_commit, review, automatic): if changeset_id is not None: return fetch_by_id(critic, repository, changeset_id) if review and automatic: # Handle automatic changesets using legacy code, and by setting the # |from_commit|/|to_commit| or |single_commit| arguments. import dbutils import request import page.showcommit legacy_user = dbutils.User.fromAPI(critic.effective_user) legacy_review = dbutils.Review.fromAPI(review) try: from_sha1, to_sha1, all_commits, listed_commits = \ page.showcommit.commitRangeFromReview( critic.database, legacy_user, legacy_review, automatic, []) except request.DisplayMessage: # FIXME: This error message could be better. The legacy code does # report more useful error messages, but does it in a way that's # pretty tied to the old HTML UI. Some refactoring is needed. raise api.changeset.ChangesetError("Automatic mode failed") except page.showcommit.NoChangesFound: assert automatic != "everything" raise api.changeset.AutomaticChangesetEmpty("No %s changes found" % automatic) from_commit = api.commit.fetch(repository, sha1=from_sha1) to_commit = api.commit.fetch(repository, sha1=to_sha1) if from_commit == to_commit: single_commit = to_commit from_commit = to_commit = None if from_commit and to_commit: changeset_id = get_changeset_id( critic, repository, from_commit, to_commit) if changeset_id is not None: return fetch_by_id(critic, repository, changeset_id) request_changeset_creation( critic, repository.name, "custom", from_commit=from_commit, to_commit=to_commit) raise api.changeset.ChangesetDelayed() assert single_commit if len(single_commit.parents) > 0: from_commit = single_commit.parents[0] else: from_commit = None changeset_id = get_changeset_id( critic, repository, from_commit, single_commit) if changeset_id is not None: return fetch_by_id(critic, repository, changeset_id) request_changeset_creation( critic, repository.name, "direct", to_commit=single_commit) raise api.changeset.ChangesetDelayed() def fetch_by_id(critic, repository, changeset_id): try: return critic._impl.lookup(api.changeset.Changeset, (int(repository), changeset_id)) except KeyError: pass cursor = critic.getDatabaseCursor() cursor.execute( """SELECT type, parent, child FROM changesets WHERE id=%s""", (changeset_id,)) row = cursor.fetchone() if not row: raise api.changeset.InvalidChangesetId(id) (changeset_type, from_commit_id, to_commit_id) = row cursor.execute( """SELECT file FROM fileversions WHERE changeset=%s""", (changeset_id,)) files = api.file.fetchMany(critic, (file_id for (file_id,) in cursor)) changeset = Changeset( changeset_id, changeset_type, from_commit_id, to_commit_id, sorted(files, key=lambda file: file.path), repository).wrap(critic) critic._impl.assign( api.changeset.Changeset, (int(repository), changeset_id), changeset) return changeset def get_changeset_id(critic, repository, from_commit, to_commit): cursor = critic.getDatabaseCursor() if from_commit: cursor.execute( """SELECT id FROM changesets WHERE parent=%s AND child=%s""", (from_commit.id, to_commit.id)) else: cursor.execute( """SELECT id FROM changesets WHERE parent IS NULL AND child=%s""", (to_commit.id,)) row = cursor.fetchone() if row: return row[0] else: return None def request_changeset_creation(critic, repository_name, changeset_type, from_commit=None, to_commit=None): request = { "changeset_type": changeset_type, "repository_name": repository_name} if changeset_type == "direct": request["child_sha1"] = to_commit.sha1 elif changeset_type == "custom": request["parent_sha1"] = from_commit.sha1 request["child_sha1"] = to_commit.sha1 elif changeset_type == "merge": request["child_sha1"] = to_commit.sha1 elif changeset_type == "conflicts": request["parent_sha1"] = from_commit.sha1 request["child_sha1"] = to_commit.sha1 try: changeset.client.requestChangesets([request], async=True) except changeset.client.ChangesetBackgroundServiceError as error: raise api.changeset.ChangesetBackgroundServiceError(error) ================================================ FILE: src/api/impl/changeset_unittest.py ================================================ FROM_SHA1 = "573c5ff15ad95cfbc3e2f2efb0a638a4a78c17a7" FROM_SINGLE_SHA1 = "aabc2b10c930a9e72fe9587a6e8634087bb3efe1" TO_SHA1 = "6dc8e9c2d952028286d4b83475947bd0b1410860" ROOT_SHA1 = "ee37c47f6f6a14afa6912c1cc58a9f49d2a29acd" CUSTOM_PATHLIST = frozenset(["src/auth/accesscontrol.py", "src/operation/__init__.py", "src/operation/createreview.py", "src/page/createreview.py", "src/page/utils.py", "testing/__init__.py", "testing/repository.py", "testing/virtualbox.py"]) SINGLE_PATHLIST = frozenset(["testing/__init__.py", "testing/repository.py", "testing/virtualbox.py"]) ROOT_PATHLIST = frozenset([".gitignore", "CONTRIBUTORS", "COPYING", "INSTALL", "MIT-LICENSE.txt", "README", "auth.py", "background/__init__.py", "background/branchtracker.py", "background/branchtrackerhook.py", "background/changeset.py", "background/daemon.py", "background/githook.py", "background/highlight.py", "background/maildelivery.py", "background/servicemanager.py", "background/utils.py", "background/watchdog.py", "base.py", "batchprocessor.py", "changeset/__init__.py", "changeset/client.py", "changeset/create.py", "changeset/detectmoves.py", "changeset/html.py", "changeset/load.py", "changeset/process.py", "changeset/text.py", "changeset/utils.py", "clexer.py", "cli.py", "comments.pgsql", "changeset/html.py", "changeset/load.py", "changeset/process.py", "changeset/text.py", "changeset/utils.py", "clexer.py", "cli.py", "comments.pgsql", "config.py.empty", "critic.py", "dbaccess.py", "dbclean.sql", "dbschema.comments.sql", "dbschema.extensions.sql", "dbschema.sql", "dbutils.py", "diff.py", "diff/__init__.py", "diff/analyze.py", "diff/context.py", "diff/html.py", "diff/merge.py", "diff/parse.py", "diffutils.py", "documentation/concepts.txt", "down.py", "extensions.py", "gitutils.py", "hooks/pre-receive", "htmlutils.py", "index.py", "install.py", "installation/__init__.py", "installation/admin.py", "installation/apache.py", "installation/config.py", "installation/criticctl.py", "installation/database.py", "installation/files.py", "installation/git.py", "installation/initd.py", "installation/input.py", "installation/paths.py", "installation/prefs.py", "installation/prereqs.py", "installation/process.py", "installation/system.py", "installation/templates/configuration/__init__.py", "installation/templates/configuration/base.py", "installation/templates/configuration/database.py", "installation/templates/configuration/executables.py", "installation/templates/configuration/extensions.py", "installation/templates/configuration/limits.py", "installation/templates/configuration/mimetypes.py", "installation/templates/configuration/paths.py", "installation/templates/configuration/services.py", "installation/templates/configuration/smtp.py", "installation/templates/criticctl", "installation/templates/initd", "installation/templates/site", "linkify.py", "log/__init__.py", "log/commitset.py", "log/html.py", "log/tree.py", "mailutils.py", "maintenance/check-branches.py", "maintenance/check-commits.py", "maintenance/dumppreferences.py", "maintenance/installpreferences.py", "maintenance/progress.py", "operation/__init__.py", "operation/addrepository.py", "operation/autocompletedata.py", "operation/blame.py", "operation/createcomment.py", "operation/createreview.py", "operation/draftchanges.py", "operation/editresource.py", "operation/extensioninstallation.py", "operation/fetchlines.py", "operation/manipulateassignments.py", "operation/manipulatecomment.py", "operation/manipulatefilters.py", "operation/manipulatereview.py", "operation/manipulateuser.py", "operation/markfiles.py", "operation/news.py", "operation/rebasereview.py", "operation/recipientfilter.py", "operation/servicemanager.py", "operation/trackedbranch.py", "page/__init__.py", "page/addrepository.py", "page/basic.py", "page/branches.py", "page/checkbranch.py", "page/config.py", "page/confirmmerge.py", "page/createreview.py", "page/dashboard.py", "page/editresource.py", "page/filterchanges.py", "page/home.py", "page/manageextensions.py", "page/managereviewers.py", "page/news.py", "page/repositories.py", "page/search.py", "page/services.py", "page/showbatch.py", "page/showbranch.py", "page/showcomment.py", "page/showcommit.py", "page/showfile.py", "page/showreview.py", "page/showreviewlog.py", "page/showtree.py", "page/statistics.py", "page/tutorial.py", "page/utils.py", "path.pgsql", "profiling.py", "request.py", "resources/.gitattributes", "resources/.gitignore", "resources/autocomplete.js", "resources/basic.css", "resources/basic.js", "resources/branches.css", "resources/branches.js", "resources/changeset.css", "resources/changeset.js", "resources/checkbranch.css", "resources/checkbranch.js", "resources/comment.css", "resources/comment.js", "resources/config.css", "resources/config.js", "resources/confirmmerge.css", "resources/confirmmerge.js", "resources/createreview.css", "resources/createreview.js", "resources/dashboard.css", "resources/dashboard.js", "resources/diff.css", "resources/editresource.css", "resources/editresource.js", "resources/favicon-dev.png", "resources/favicon.png", "resources/filterchanges.css", "resources/filterchanges.js", "resources/home.css", "resources/home.js", "resources/images/ui-bg_flat_75_aaaaaa_40x100.png", "resources/images/ui-bg_glass_100_f5f0e5_1x400.png", "resources/images/ui-bg_glass_25_cb842e_1x400.png", "resources/images/ui-bg_glass_70_ede4d4_1x400.png", "resources/images/ui-bg_highlight-hard_100_f4f0ec_1x100.png", "resources/images/ui-bg_highlight-hard_65_fee4bd_1x100.png", "resources/images/ui-bg_highlight-hard_75_f5f5b5_1x100.png", "resources/images/ui-bg_inset-soft_100_f4f0ec_1x100.png", "resources/images/ui-icons_c47a23_256x240.png", "resources/images/ui-icons_cb672b_256x240.png", "resources/images/ui-icons_f08000_256x240.png", "resources/images/ui-icons_f35f07_256x240.png", "resources/images/ui-icons_ff7519_256x240.png", "resources/images/ui-icons_ffffff_256x240.png", "resources/jquery-1.7.1.min.js", "resources/jquery-tooltip.css", "resources/jquery-tooltip.js", "resources/jquery-ui-1.8.17.custom.css", "resources/jquery-ui-1.8.17.custom.min.js", "resources/jquery-ui-autocomplete-html.js", "resources/jquery-ui.css", "resources/jquery-ui.js", "resources/jquery.js", "resources/log.css", "resources/log.js", "resources/manageextensions.css", "resources/manageextensions.js", "resources/managereviewers.css", "resources/managereviewers.js", "resources/message.css", "resources/newrepository.css", "resources/newrepository.js", "resources/news.css", "resources/news.js", "resources/repositories.css", "resources/repositories.js", "resources/review.css", "resources/review.js", "resources/seal-of-approval-left.png", "resources/search.css", "resources/search.js", "resources/services.css", "resources/services.js", "resources/showbatch.css", "resources/showbranch.css", "resources/showcomment.js", "resources/showfile.css", "resources/showfile.js", "resources/showreview.css", "resources/showreview.js", "resources/showreviewlog.css", "resources/showtree.css", "resources/statistics.css", "resources/syntax.css", "resources/tabify.css", "resources/tabify.js", "resources/tutorial.css", "resources/tutorial.js", "resources/whitespace.css", "review/__init__.py", "review/comment/__init__.py", "review/filters.py", "review/html.py", "review/mail.py", "review/report.py", "review/utils.py", "roles.sql", "syntaxhighlight/__init__.py", "syntaxhighlight/clexer.py", "syntaxhighlight/context.py", "syntaxhighlight/cpp.py", "syntaxhighlight/generate.py", "syntaxhighlight/generic.py", "syntaxhighlight/request.py", "textformatting.py", "textutils.py", "tutorials/checkbranch.txt", "tutorials/rebasing.txt", "tutorials/reconfiguring.txt", "tutorials/repository.txt", "tutorials/requesting.txt", "tutorials/reviewing.txt", "utf8utils.py", "wsgi.py", "wsgistartup.py"]) def pre(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") custom_changeset("pre", api, critic, repository) direct_changeset("pre", api, critic, repository) root_changeset("pre", api, critic, repository) bad_changesets("pre", api, critic, repository) print("pre: ok") def post(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") custom_changeset("post", api, critic, repository) direct_changeset("post", api, critic, repository) root_changeset("post", api, critic, repository) print("post: ok") def is_empty_changeset(changeset, changeset_type): return (changeset.id == None and changeset.type == changeset_type and changeset.files == None) def check_types(changeset): return (isinstance(changeset.id, (int, long)) and isinstance(changeset.type, str) and isinstance(changeset.files, list)) def custom_changeset(phase, api, critic, repository): if phase == "pre": from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) try: api.changeset.fetch(critic, repository, from_commit=from_commit, to_commit=to_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) custom_changeset = api.changeset.fetch(critic, repository, from_commit=from_commit, to_commit=to_commit) assert (check_types(custom_changeset) and custom_changeset.type == "custom"),\ "custom_changeset has incorrect types" paths = frozenset([filechange.file.path for filechange in custom_changeset.files]) assert paths == CUSTOM_PATHLIST,\ "files in changeset deviate from expected files" else: raise Exception def direct_changeset(phase, api, critic, repository): if phase == "pre": single_commit = api.commit.fetch(repository, sha1=TO_SHA1) from_single_commit = api.commit.fetch(repository, sha1=FROM_SINGLE_SHA1) try: api.changeset.fetch(critic, repository, single_commit=single_commit) except api.changeset.ChangesetDelayed: pass try: api.changeset.fetch(critic, repository, from_commit=from_single_commit, to_commit=single_commit) except api.changeset.ChangesetDelayed: pass try: api.changeset.fetch(critic, repository, from_commit=single_commit, to_commit=from_single_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": single_commit = api.commit.fetch(repository, sha1=TO_SHA1) from_single_commit = api.commit.fetch(repository, sha1=FROM_SINGLE_SHA1) changeset = api.changeset.fetch(critic, repository, single_commit=single_commit) equiv_changeset = api.changeset.fetch(critic, repository, from_commit=from_single_commit, to_commit=single_commit) assert (changeset.id == equiv_changeset.id),\ "changeset and equiv_changeset have different ids" assert (changeset.type == equiv_changeset.type),\ "changeset and equiv_changeset have different types" files = [filechange.file for filechange in changeset.files] equiv_files = [filechange.file for filechange in equiv_changeset.files] changeset_files = frozenset( (file.id, file.path) for file in files) changeset_paths = frozenset(file.path for file in files) equiv_changeset_files = frozenset( (file.id, file.path) for file in equiv_files) assert (changeset_files == equiv_changeset_files),\ "changeset and equiv_changeset have different files" assert (changeset_paths == SINGLE_PATHLIST),\ "changeset has other files than expected" else: raise Exception def root_changeset(phase, api, critic, repository): if phase == "pre": root_commit = api.commit.fetch(repository, ref=ROOT_SHA1) try: api.changeset.fetch(critic, repository, single_commit=root_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": root_commit = api.commit.fetch(repository, ref=ROOT_SHA1) root_changeset = api.changeset.fetch(critic, repository, single_commit=root_commit) assert (root_changeset.type == "direct"),\ "root_changeset should be direct changeset" assert (isinstance(root_changeset.id, int)),\ "root_changeset.id should be integer" root_paths = frozenset([filechange.file.path for filechange in root_changeset.files]) assert (root_paths == ROOT_PATHLIST),\ "root_changeset has other files than expected" else: raise Exception def bad_changesets(phase, api, critic, repository): if phase == "pre": params_list = [(None, None, None, None, AssertionError), (-5, None, None, None, api.changeset.InvalidChangesetId), (None, None, None, "00g0", api.repository.InvalidRef), (None, "00g0", TO_SHA1, None, api.repository.InvalidRef), (None, FROM_SHA1, "00g0", None, api.repository.InvalidRef), (1, FROM_SHA1, TO_SHA1, TO_SHA1, AssertionError), (None, TO_SHA1, TO_SHA1, None, AssertionError)] for (changeset_id, from_commit_ref, to_commit_ref, single_commit_ref, expected_error) in params_list: try: if from_commit_ref is not None: from_commit = api.commit.fetch( repository, ref=from_commit_ref) else: from_commit = None if to_commit_ref is not None: to_commit = api.commit.fetch(repository, ref=to_commit_ref) else: to_commit = None if single_commit_ref is not None: single_commit = api.commit.fetch( repository, ref=single_commit_ref) else: single_commit = None changeset = api.changeset.fetch( critic, repository, changeset_id=changeset_id, from_commit=from_commit, to_commit=to_commit, single_commit=single_commit) except expected_error: pass else: assert False,\ "Invalid/missing parameters should raise exception" else: raise Exception ================================================ FILE: src/api/impl/comment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import dbutils class Comment(apiobject.APIObject): wrapper_class = api.comment.Comment STATE_MAP = { # "Is draft" is a separate attribute, so use the state it would have # once published instead. "draft": "open", # "Closed" is only used in the database, really, the UI has always # called it "Resolve" (action) / "Resolved" (state). "closed": "resolved" } @staticmethod def __translateState(state): return Comment.STATE_MAP.get(state, state) def __init__(self, chain_id, review_id, batch_id, author_id, comment_type, state, side, timestamp, text, file_id, first_commit_id, last_commit_id, addressed_by_id, resolved_by_id): self.id = chain_id self.is_draft = state == "draft" self.state = Comment.__translateState(state) self.__review_id = review_id self.__batch_id = batch_id self.__author_id = author_id self.side = side self.timestamp = timestamp self.text = text self.__file_id = file_id self.__first_commit_id = first_commit_id self.__last_commit_id = last_commit_id self.__addressed_by_id = addressed_by_id self.__resolved_by_id = resolved_by_id self.__type = comment_type if comment_type == "issue": self.wrapper_class = api.comment.Issue else: self.wrapper_class = api.comment.Note def getReview(self, critic): return api.review.fetch(critic, self.__review_id) def getAuthor(self, critic): return api.user.fetch(critic, self.__author_id) def getLocation(self, critic): cursor = critic.getDatabaseCursor() if self.__file_id is not None: repository = self.getReview(critic).repository if self.side == "old": commit = api.commit.fetch(repository, self.__first_commit_id) else: commit = api.commit.fetch(repository, self.__last_commit_id) file_sha1 = commit.getFileInformation( api.file.fetch(critic, file_id=self.__file_id)).sha1 cursor.execute("""SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s AND (state!='draft' OR uid=%s)""", (self.id, file_sha1, critic.effective_user.id)) first_line, last_line = cursor.fetchone() location = FileVersionLocation( self, first_line, last_line, repository, self.__file_id, first_commit_id=self.__first_commit_id, last_commit_id=self.__last_commit_id, side=self.side) elif self.__first_commit_id is not None: repository = self.getReview(critic).repository commit = api.commit.fetch( repository, commit_id=self.__first_commit_id) cursor.execute("""SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s AND (state!='draft' OR uid=%s)""", (self.id, commit.sha1, critic.effective_user.id)) first_line, last_line = cursor.fetchone() # FIXME: Make commit message comment line numbers one-based too! first_line += 1 last_line += 1 # FIXME: ... and then delete the above two lines of code. location = CommitMessageLocation( first_line, last_line, repository, self.__first_commit_id) else: return None return location.wrap(critic) def getReplies(self, critic): return api.impl.reply.fetchForComment(critic, self.id) def getAddressedBy(self, critic): if self.state != "addressed": return None repository = self.getReview(critic).repository return api.commit.fetch(repository, commit_id=self.__addressed_by_id) def getResolvedBy(self, critic): if self.state != "resolved": return None return api.user.fetch(critic, user_id=self.__resolved_by_id) def getDraftChanges(self, critic): if critic.effective_user.is_anonymous: return None if self.is_draft: return api.comment.Comment.DraftChanges( critic.effective_user, True, None, None) cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id FROM comments WHERE uid=%s AND chain=%s AND state='draft'""", (critic.effective_user.id, self.id)) row = cursor.fetchone() if row: reply_id, = row reply = api.reply.fetch(critic, reply_id) else: reply = None effective_type = self.__type new_type = None new_state = None new_location = None cursor.execute("""SELECT from_state, to_state, from_type, to_type, from_last_commit, to_last_commit, from_addressed_by, to_addressed_by FROM commentchainchanges WHERE uid=%s AND chain=%s AND state='draft'""", (critic.effective_user.id, self.id)) row = cursor.fetchone() if not row: if reply is None: return None else: (from_state, to_state, from_type, to_type, from_last_commit, to_last_commit, from_addressed_by, to_addressed_by) = row if to_type is not None and from_type == self.__type: effective_type = new_type = to_type if to_state is not None: if Comment.__translateState(from_state) == self.state: new_state = Comment.__translateState(to_state) # FIXME: Handle new location. if effective_type == "note": return api.comment.Comment.DraftChanges( critic.effective_user, False, reply, new_type) return api.comment.Issue.DraftChanges( critic.effective_user, False, reply, new_type, new_state, new_location) @staticmethod def refresh(critic, tables, cached_comments): if not tables.intersection(("commentchains", "comments")): return Comment.updateAll( critic, """SELECT commentchains.id, review, commentchains.batch, commentchains.uid, type, commentchains.state, origin, commentchains.time, comments.comment, file, first_commit, last_commit, addressed_by, closed_by FROM commentchains JOIN comments ON (comments.id=first_comment) WHERE commentchains.id=ANY (%s)""", cached_comments) @Comment.cached(api.comment.InvalidCommentId) def fetch(critic, comment_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT commentchains.id, review, commentchains.batch, commentchains.uid, type, commentchains.state, origin, commentchains.time, comments.comment, file, first_commit, last_commit, addressed_by, closed_by FROM commentchains JOIN comments ON (comments.id=first_comment) WHERE commentchains.id=%s AND commentchains.state!='empty'""", (comment_id,)) return Comment.make(critic, cursor) @Comment.cachedMany(api.comment.InvalidCommentIds) def fetchMany(critic, comment_ids): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT commentchains.id, review, commentchains.batch, commentchains.uid, type, commentchains.state, origin, commentchains.time, comments.comment, file, first_commit, last_commit, addressed_by, closed_by FROM commentchains JOIN comments ON (comments.id=first_comment) WHERE commentchains.id=ANY (%s) AND commentchains.state!='empty'""", (comment_ids,)) return Comment.make(critic, cursor) def fetchAll(critic, review, author, comment_type, state, location_type, changeset, commit): joins = ["JOIN comments ON (comments.id=first_comment)"] conditions = ["(commentchains.state!='draft' OR commentchains.uid=%s)", "commentchains.state!='empty'"] values = [critic.effective_user.id] if review: conditions.append("commentchains.review=%s") values.append(review.id) if author: conditions.append("commentchains.uid=%s") values.append(author.id) if comment_type: conditions.append("commentchains.type=%s") values.append(comment_type) if state: if state == "resolved": state = "closed" conditions.append("commentchains.state=%s") values.append(state) if location_type: if location_type == "commit-message": conditions.extend(["commentchains.file IS NULL", "commentchains.first_commit IS NOT NULL"]) else: conditions.extend(["commentchains.file IS NOT NULL"]) if changeset is not None: joins.extend([ "JOIN commentchainlines" " ON (commentchainlines.chain=commentchains.id)", "JOIN fileversions" " ON (fileversions.file=commentchains.file AND" " commentchainlines.sha1 IN (fileversions.old_sha1," " fileversions.new_sha1))" ]) conditions.append("fileversions.changeset=%s") values.append(changeset.id) cursor = critic.getDatabaseCursor() cursor.execute( """SELECT DISTINCT commentchains.id, commentchains.review, commentchains.batch, commentchains.uid, commentchains.type, commentchains.state, commentchains.origin, commentchains.time, comments.comment, commentchains.file, commentchains.first_commit, commentchains.last_commit, commentchains.addressed_by, commentchains.closed_by FROM commentchains LEFT OUTER JOIN batches ON (batches.comment=commentchains.id) {} WHERE batches.id IS NULL AND {} ORDER BY commentchains.id""".format( " ".join(joins), " AND ".join(conditions)), values) comments = list(Comment.make(critic, cursor)) if commit is not None: comments_by_id = { comment.id: comment for comment in comments } cursor.execute( """SELECT chain, sha1 FROM commentchainlines WHERE chain=ANY (%s)""", (comments_by_id.keys(),)) comments_by_sha1 = dict() for comment_id, sha1 in cursor: comments_by_sha1.setdefault(sha1, set()).add( comments_by_id[comment_id]) file_versions_cache = {} filtered_comments = [] for comment in comments: if not comment.location: continue if comment.location.type == "commit-message": if comment.location.commit == commit: filtered_comments.append(comment) continue file_id = comment.location.file.id if file_id not in file_versions_cache: try: file_information = \ commit.getFileInformation(comment.location.file) except api.commit.NotAFile: file_information = None file_versions_cache[file_id] = file_information else: file_information = file_versions_cache[file_id] if file_information is not None: if comment in comments_by_sha1.get(file_information.sha1, ()): filtered_comments.append(comment) return filtered_comments return comments class Location(apiobject.APIObject): def __init__(self, first_line, last_line): self.first_line = first_line self.last_line = last_line class CommitMessageLocation(Location): wrapper_class = api.comment.CommitMessageLocation def __init__(self, first_line, last_line, repository, commit_id): super(CommitMessageLocation, self).__init__(first_line, last_line) self.repository = repository self.__commit_id = commit_id def getCommit(self, critic): return api.commit.fetch(self.repository, self.__commit_id) def makeCommitMessageLocation(critic, first_line, last_line, commit): max_line = len(commit.message.splitlines()) if last_line < first_line: raise api.comment.InvalidLocation( "first_line must be equal to or less than last_line") if last_line > max_line: raise api.comment.InvalidLocation( "last_line must be less than or equal to the number of lines in " "the commit message") return CommitMessageLocation(first_line, last_line, commit.repository, commit.id).wrap(critic) class FileVersionLocation(Location): wrapper_class = api.comment.FileVersionLocation def __init__(self, comment, first_line, last_line, repository, file_id, changeset=None, first_commit_id=None, last_commit_id=None, side=None, commit=None, commit_id=None, is_translated=False): super(FileVersionLocation, self).__init__(first_line, last_line) self.comment = comment if first_commit_id is not None and first_commit_id == last_commit_id: commit_id = last_commit_id first_commit_id = last_commit_id = side = None self.repository = repository self.__file_id = file_id self.__changeset = changeset self.__first_commit_id = first_commit_id self.__last_commit_id = last_commit_id self.side = side self.__commit = commit self.__commit_id = commit_id self.is_translated = is_translated def getChangeset(self, critic): if self.__changeset: return self.__changeset if self.side is None: # Comment was made while looking at a single version of the file, # not while looking at a diff where the file was modified. return None from_commit = api.commit.fetch( self.repository, commit_id=self.__first_commit_id) to_commit = api.commit.fetch( self.repository, commit_id=self.__last_commit_id) return api.changeset.fetch(critic, self.repository, from_commit=from_commit, to_commit=to_commit) def getCommit(self, critic): if self.__commit: return self.__commit if self.__commit_id is None: return None return api.commit.fetch(self.repository, commit_id=self.__commit_id) def getFile(self, critic): return api.file.fetch(critic, file_id=self.__file_id) def translateTo(self, critic, changeset, commit): cursor = critic.getDatabaseCursor() def translateToCommit(target_commit, side): try: file_information = target_commit.getFileInformation( self.getFile(critic)) except api.commit.NotAFile: raise KeyError if not file_information: raise KeyError cursor.execute("""SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s""", (self.comment.id, file_information.sha1,)) row = cursor.fetchone() if row is None: raise KeyError first_line, last_line = row return FileVersionLocation( self.comment, first_line, last_line, self.repository, self.__file_id, changeset=changeset, side=side, commit=commit, is_translated=True).wrap(critic) if changeset: try: return translateToCommit(changeset.to_commit, "new") except KeyError: pass if changeset.from_commit: try: return translateToCommit(changeset.from_commit, "old") except KeyError: pass else: try: return translateToCommit(commit, None) except KeyError: pass return None def makeFileVersionLocation(critic, first_line, last_line, file, changeset, side, commit): if changeset is not None: repository = changeset.repository if side == "old": check_commit = changeset.from_commit else: check_commit = changeset.to_commit else: repository = commit.repository check_commit = commit max_line = len(check_commit.getFileLines(file)) if last_line < first_line: raise api.comment.InvalidLocation( "first_line must be equal to or less than last_line") if last_line > max_line: raise api.comment.InvalidLocation( "last_line must be less than or equal to the number of lines in " "the file version") return FileVersionLocation( None, first_line, last_line, repository, file.id, changeset=changeset, side=side, commit=commit).wrap(critic) ================================================ FILE: src/api/impl/comment_unittest.py ================================================ import sys import datetime def basic(arguments): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") branch = api.branch.fetch( critic, repository=repository, name=arguments.review) review = api.review.fetch(critic, branch=branch) alice = api.user.fetch(critic, name="alice") bob = api.user.fetch(critic, name="bob") dave = api.user.fetch(critic, name="dave") erin = api.user.fetch(critic, name="erin") all_comments = api.comment.fetchAll(critic) assert isinstance(all_comments, list) EXPECTED = { 0: { "text": "This is a general issue.", "location": None, "type": "issue", "state": "open" }, 1: { "text": "This is a general note.", "location": None, "type": "issue", "state": "open" }, 2: { "text": "This is a commit issue.", "location": ("commit-message", 1, 3), "type": "issue", "state": "resolved", "resolved_by": dave }, 3: { "text": "This is a commit note.", "location": ("commit-message", 5, 5), "type": "note" }, 4: { "text": "This is a file issue.", "location": ("file-version", 1, 3), "type": "issue", "state": "open" }, 5: { "text": "This is a file note.", "location": ("file-version", 9, 9), "type": "note" } } def check_comment(comment): assert isinstance(comment, api.comment.Comment) assert isinstance(comment.id, int) assert api.comment.fetch(critic, comment_id=comment.id) is comment expected = EXPECTED[comment_id_map[comment.id]] assert isinstance(comment.type, str) assert comment.type == expected["type"] assert isinstance(comment.is_draft, bool) assert not comment.is_draft assert comment.review is review assert comment.author is alice assert isinstance(comment.timestamp, datetime.datetime) assert isinstance(comment.text, str) assert comment.text == expected["text"] if comment.type == "note": assert isinstance(comment, api.comment.Note) return assert isinstance(comment, api.comment.Issue) assert isinstance(comment.state, str) assert comment.state == expected["state"] if comment.state == "resolved": assert comment.resolved_by is expected["resolved_by"] else: assert comment.resolved_by is None if comment.state == "addressed": assert comment.addressed_by is expected["addressed_by"] else: assert comment.addressed_by is None if expected["location"] is None: assert comment.location is None else: location_type, first_line, last_line = expected["location"] if location_type == "file-version": # FileVersionLocation is not yet supported. return assert comment.location.type == location_type assert comment.location.first_line == first_line assert comment.location.last_line == last_line, (comment.location.last_line, last_line) assert isinstance(comment.location, api.comment.Location) if location_type == "commit-message": assert isinstance( comment.location, api.comment.CommitMessageLocation) else: assert isinstance( comment.location, api.comment.FileVersionLocation) comments = api.comment.fetchAll(critic, review=review) assert isinstance(comments, list) assert len(comments) == 6 comment_id_map = { comment.id: index for index, comment in enumerate(comments) } for comment in comments: check_comment(comment) some_comments = api.comment.fetchMany(critic, [3, 2, 1]) assert len(some_comments) == 3 assert some_comments[0].id == 3 assert some_comments[0] is api.comment.fetch(critic, 3) assert some_comments[1].id == 2 assert some_comments[1] is api.comment.fetch(critic, 2) assert some_comments[2].id == 1 assert some_comments[2] is api.comment.fetch(critic, 1) print "basic: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--review") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) ================================================ FILE: src/api/impl/commit.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import calendar import datetime import re import api import apiobject import api.impl import diff.parse import gitutils RE_FOLLOWUP = re.compile("(fixup|squash)!.*(?:\n[ \t]*)+(.*)") class Commit(apiobject.APIObject): wrapper_class = api.commit.Commit def __init__(self, repository, internal): self.repository = repository self.internal = internal self.sha1 = internal.sha1 self.tree = internal.tree self.message = internal.message def getId(self, critic): return self.internal.getId(critic.database) def getSummary(self): match = RE_FOLLOWUP.match(self.message) if match: followup_type, summary = match.groups() return "[%s] %s" % (followup_type, summary) return self.message.split("\n", 1)[0] def getParents(self, critic): return [fetch(self.repository, None, sha1, None) for sha1 in self.internal.parents] def getDescription(self, critic): return self.internal.repository.describe(critic.database, self.sha1) def getAuthor(self, critic): return api.commit.Commit.UserAndTimestamp( self.internal.author.name, self.internal.author.email, datetime.datetime.fromtimestamp( calendar.timegm(self.internal.author.time))) def getCommitter(self, critic): return api.commit.Commit.UserAndTimestamp( self.internal.committer.name, self.internal.committer.email, datetime.datetime.fromtimestamp( calendar.timegm(self.internal.committer.time))) def isAncestorOf(self, commit): return self.internal.isAncestorOf(commit._impl.internal) def getFileInformation(self, file): import stat internal = self.internal.getFileEntry(file.path) if internal is None: return None if internal.type != "blob" or not stat.S_ISREG(internal.mode): raise api.commit.NotAFile(file.path) return api.commit.Commit.FileInformation( file, int(internal.mode), internal.sha1, internal.size) def getFileContents(self, file): information = self.getFileInformation(file) if information is None: return None return self.internal.repository.fetch(information.sha1).data def getFileLines(self, file): contents = self.getFileContents(file) if contents is None: return None return diff.parse.splitlines(contents) @staticmethod def create(critic, repository, commit_id, sha1): try: internal = gitutils.Commit.fromSHA1( db=critic.database, repository=repository._impl.getInternal(critic), sha1=sha1, commit_id=commit_id) except gitutils.GitReferenceError: raise api.commit.InvalidSHA1(sha1) return Commit(repository, internal).wrap(critic) def fetch(repository, commit_id, sha1, ref): critic = repository.critic def commit_id_from_sha1(): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id FROM commits WHERE sha1=%s""", (sha1,)) row = cursor.fetchone() if not row: raise api.commit.InvalidSHA1(sha1) (commit_id,) = row return commit_id def sha1_from_commit_id(): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT sha1 FROM commits WHERE id=%s""", (commit_id,)) row = cursor.fetchone() if not row: raise api.commit.InvalidCommitId(commit_id) (sha1,) = row return sha1 if ref is not None: sha1 = repository.resolveRef(ref, expect="commit") if commit_id is not None: try: return Commit.get_cached(critic, (int(repository), commit_id)) except KeyError: pass if sha1 is None: sha1 = sha1_from_commit_id() else: try: return Commit.get_cached(critic, (int(repository), sha1)) except KeyError: pass commit_id = commit_id_from_sha1() commit = Commit.create(critic, repository, commit_id, sha1) Commit.add_cached(critic, (int(repository), commit_id), commit) Commit.add_cached(critic, (int(repository), sha1), commit) return commit def fetchMany(repository, commit_ids, sha1s): critic = repository.critic cursor = critic.getDatabaseCursor() if commit_ids: cursor.execute( """SELECT id, sha1 FROM commits WHERE id=ANY (%s)""", (commit_ids,)) rows = cursor.fetchall() if len(rows) != len(set(commit_ids)): found = set(commit_id for commit_id, _ in rows) for commit_id in commit_ids: if commit_id not in found: raise api.commit.InvalidCommitId(commit_id) commits = { commit_id: sha1 for commit_id, sha1 in rows } return [fetch(repository, commit_id, commits[commit_id], None) for commit_id in commit_ids] else: cursor.execute( """SELECT id, sha1 FROM commits WHERE sha1=ANY (%s)""", (sha1s,)) rows = cursor.fetchall() if len(rows) != len(set(sha1s)): found = set(sha1 for _, sha1 in rows) for sha1 in sha1s: if sha1 not in found: raise api.commit.InvalidSHA1(sha1) commits = { sha1: commit_id for commit_id, sha1 in rows } return [fetch(repository, commits[sha1], sha1, None) for sha1 in sha1s] ================================================ FILE: src/api/impl/commit_unittest.py ================================================ import datetime # This is the commit that added the testing framework: COMMIT_SHA1 = "78d7849db854f3544d7291cce96a0a4fa6d6843d" # This is its tree object: COMMIT_TREE = "c102e63ed1d612e48d3372c223559192fcf500ce" # This is its commit message: COMMIT_MESSAGE = """\ High-level testing framework Framework for automated installation and "black-box" testing of Critic running in a VirtualBox instance. """ # This is its "summary": COMMIT_SUMMARY = "High-level testing framework" # This is the SHA-1 of its parent: COMMIT_PARENT_SHA1 = "cf0ecdeafb682bd03fba9a5bbc94e125101a5a0f" # This is its author name, email address and timestamp: COMMIT_AUTHOR_NAME = "Jens Lindstrom" COMMIT_AUTHOR_EMAIL = "jl@opera.com" COMMIT_AUTHOR_TS = datetime.datetime.fromtimestamp(1364402400) # This is its committer name, email address and timestamp: COMMIT_COMMITTER_NAME = "Jens Lindstrom" COMMIT_COMMITTER_EMAIL = "jl@opera.com" COMMIT_COMMITTER_TS = datetime.datetime.fromtimestamp(1365369848) def basic(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") try: api.commit.fetch(critic, sha1=COMMIT_SHA1) except AssertionError: pass else: assert False try: api.commit.fetch(repository) except AssertionError: pass else: assert False try: api.commit.fetch(repository, sha1=COMMIT_SHA1, ref="something") except AssertionError: pass else: assert False try: api.commit.fetch(repository, commit_id=0, ref="something") except AssertionError: pass else: assert False commit = api.commit.fetch(repository, sha1=COMMIT_SHA1) assert str(commit) == COMMIT_SHA1 assert repr(commit) == "api.commit.Commit(sha1=%r)" % COMMIT_SHA1 assert hash(commit) == hash(COMMIT_SHA1) assert commit == COMMIT_SHA1 assert COMMIT_SHA1 == commit assert isinstance(commit.id, int), type(commit.id) assert isinstance(commit.sha1, str), type(commit.sha1) assert commit.sha1 == COMMIT_SHA1, commit.sha1 assert isinstance(commit.tree, str), type(commit.tree) assert commit.tree == COMMIT_TREE, commit.tree assert isinstance(commit.summary, str), type(commit.summary) assert commit.summary == COMMIT_SUMMARY, commit.summary assert isinstance(commit.message, str), type(commit.message) assert commit.message == COMMIT_MESSAGE, commit.message assert isinstance(commit.parents, list), type(commit.parents) assert len(commit.parents) == 1, len(commit.parents) assert isinstance(commit.parents[0], api.commit.Commit), \ type(commit.parents[0]) assert commit.parents[0].sha1 == COMMIT_PARENT_SHA1, commit.parents[0].sha1 assert isinstance(commit.description, str), type(commit.description) assert commit.description == "master", commit.description assert isinstance(commit.author, api.commit.Commit.UserAndTimestamp), \ type(commit.author) assert isinstance(commit.author.name, str), type(commit.author.name) assert commit.author.name == COMMIT_AUTHOR_NAME, commit.author.name assert isinstance(commit.author.email, str), type(commit.author.email) assert commit.author.email == COMMIT_AUTHOR_EMAIL, commit.author.email assert isinstance(commit.author.timestamp, datetime.datetime), \ type(commit.author.timestamp) assert commit.author.timestamp == COMMIT_AUTHOR_TS, commit.author.timestamp assert isinstance(commit.committer, api.commit.Commit.UserAndTimestamp), \ type(commit.committer) assert isinstance(commit.committer.name, str), \ type(commit.committer.name) assert commit.committer.name == COMMIT_COMMITTER_NAME, \ commit.committer.name assert isinstance(commit.committer.email, str), type(commit.committer.email) assert commit.committer.email == COMMIT_COMMITTER_EMAIL, \ commit.committer.email assert isinstance(commit.committer.timestamp, datetime.datetime), \ type(commit.committer.timestamp) assert commit.committer.timestamp == COMMIT_COMMITTER_TS, \ commit.committer.timestamp try: api.commit.fetch(repository, commit_id=47114711) except api.commit.InvalidCommitId: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" try: api.commit.fetch(repository, sha1="".join(reversed(COMMIT_SHA1))) except api.commit.InvalidSHA1: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" print "basic: ok" ================================================ FILE: src/api/impl/commitset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class CommitSet(apiobject.APIObject): wrapper_class = api.commitset.CommitSet def __init__(self, commits): self.commits = frozenset(commits) self.__children = {} parents = set() for commit in self.commits: parents.update(commit.parents) for parent in commit.parents: self.__children.setdefault(parent, set()).add(commit) self.heads = frozenset(self.commits - parents) self.tails = frozenset(parents - self.commits) def __iter__(self): return iter(self.commits) def __len__(self): return len(self.commits) def __contains__(self, item): return str(item) in self.commits def __hash__(self): return hash(self.commits) def __eq__(self, other): return self.commits == other.commits def getFilteredTails(self, critic): if not self.commits: return frozenset() legacy_repository = next(iter(self.commits)).repository._impl.getInternal(critic) candidates = set(self.tails) result = set() while candidates: tail = candidates.pop() eliminated = set() for other in candidates: base = legacy_repository.mergebase([tail, other]) if base == tail: # Tail is an ancestor of other: tail should not be included # in the returned set. break elif base == other: # Other is an ancestor of tail: other should not be included # in the returned set. eliminated.add(other) else: result.add(tail) candidates -= eliminated return frozenset(result) def getDateOrdered(self): queue = sorted(self.commits, key=lambda commit: commit.committer.timestamp, reverse=True) included = set() while queue: commit = queue.pop(0) if commit in included: continue if commit in self.__children: remaining_children = self.__children[commit] - included if remaining_children: # Some descendants of this commit have not yet been emitted; # we have to delay this commit. Insert the commit after the # earliest remaining descendant in the list. This means # that as soon as we've processed all descendants, we retry # the commit. queue.insert(max(queue.index(child) for child in remaining_children) + 1, commit) continue yield commit included.add(commit) def getTopoOrdered(self): if not self: return head = set(self.heads).pop() queue = [head] included = set() while queue: commit = queue.pop(0) if commit in included: continue if commit in self.__children and self.__children[commit] - included: # Some descendants of this commit have not yet been emitted; we # have to delay this commit. We can only delay this commit if # the queue is non-empty, so assert that it isn't. assert queue queue.append(commit) continue yield commit included.add(commit) parents = sorted((parent for parent in commit.parents if parent in self and parent not in included), key=lambda commit: commit.committer.timestamp, reverse=False) queue[:0] = parents def getChildrenOf(self, commit): return set(self.__children.get(commit, [])) def getParentsOf(self, commit): return [parent for parent in commit.parents if parent in self] def getDescendantsOf(self, commits, include_self): descendants = set() if include_self: descendants.update(commits) queue = set() for commit in commits: queue.update(self.getChildrenOf(commit)) while queue: descendant = queue.pop() descendants.add(descendant) children = self.__children.get(descendant) if children: queue.update(children - descendants) return create(commits[0].critic, descendants) def getAncestorsOf(self, commits, include_self): ancestors = set() if include_self: ancestors.update(commits) queue = set() for commit in commits: queue.update(self.getParentsOf(commit)) while queue: ancestor = queue.pop() ancestors.add(ancestor) queue.update(set(self.getParentsOf(ancestor)) - ancestors) return create(commits[0].critic, ancestors) def union(self, critic, commits): return create(critic, self.commits.union(commits)) def intersection(self, critic, commits): return create(critic, self.commits.intersection(commits)) def difference(self, critic, commits): return create(critic, self.commits.difference(commits)) def symmetric_difference(self, critic, commits): return create(critic, self.commits.symmetric_difference(commits)) def create(critic, commits): if isinstance(commits, api.commitset.CommitSet): assert isinstance(commits._impl, CommitSet) impl = commits._impl else: impl = CommitSet(commits) return api.commitset.CommitSet(critic, impl) def calculateFromRange(critic, from_commit, to_commit): repository = to_commit.repository if from_commit in to_commit.parents: return create(critic, [to_commit]) if not from_commit.isAncestorOf(to_commit): raise api.commitset.InvalidCommitRange( "Start-of-range commit is not an ancestor of end-of-range commit") commitset = create(critic, repository.listCommits(include=to_commit, exclude=from_commit)) if len(commitset.tails) > 1: raise api.commitset.InvalidCommitRange( "Start-of-range commit is not an ancestor of all included commits") return commitset ================================================ FILE: src/api/impl/commitset_unittest.py ================================================ def basic(arguments): import api assert arguments.prefix is not None, "missing argument: --prefix" critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") # This set of commits should exist in the repository; each referenced by a # branch named prefix + letter. # # (X) # | # A # / \ # C B (Y) # | |\ / # D | G # \ / | # E H # | |\ # F K \ # \ / I # M | # | J # N # | # L ALL_LETTERS = "ABCDEFGHIJKLMN" commits = { letter: api.commit.fetch(repository, ref=arguments.prefix + letter) for letter in ALL_LETTERS + "XY" } def make(letters, fn=lambda commits: api.commitset.create(critic, commits)): return fn(commits[letter] for letter in letters) def tostring(commits): return "".join(commit.summary for commit in commits) commitset = make(ALL_LETTERS) assert isinstance(commitset, api.commitset.CommitSet) assert len(commitset) == 14 assert len(commitset.heads) == 2 assert commits["J"] in commitset.heads assert commits["L"] in commitset.heads assert len(commitset.tails) == 2 assert commits["X"] in commitset.tails assert commits["Y"] in commitset.tails for commit in make(ALL_LETTERS, list): assert commit in commitset from_L = commitset.getAncestorsOf(commits["L"], include_self=True) from_J = commitset.getAncestorsOf(commits["J"], include_self=True) assert len(from_L.heads) == 1 assert commits["L"] in from_L.heads assert len(from_J.heads) == 1 assert commits["J"] in from_J.heads assert tostring(from_L.topo_ordered) == "LNMFEDCKHGBA" assert tostring(from_J.topo_ordered) == "JIHGBA" assert tostring(from_L.date_ordered) == "LNMKHGFEDCBA" assert tostring(from_J.date_ordered) == "JIHGBA" assert commitset.getChildrenOf(commits["A"]) == make("BC", set) assert commitset.getChildrenOf(commits["B"]) == make("EG", set) assert commitset.getChildrenOf(commits["C"]) == make("D", set) assert commitset.getChildrenOf(commits["D"]) == make("E", set) assert commitset.getChildrenOf(commits["E"]) == make("F", set) assert commitset.getChildrenOf(commits["F"]) == make("M", set) assert commitset.getChildrenOf(commits["G"]) == make("H", set) assert commitset.getChildrenOf(commits["H"]) == make("KI", set) assert commitset.getChildrenOf(commits["I"]) == make("J", set) assert commitset.getChildrenOf(commits["J"]) == make("", set) assert commitset.getChildrenOf(commits["K"]) == make("M", set) assert commitset.getChildrenOf(commits["L"]) == make("", set) assert commitset.getChildrenOf(commits["M"]) == make("N", set) assert commitset.getChildrenOf(commits["N"]) == make("L", set) assert commitset.getChildrenOf(commits["X"]) == make("A", set) assert commitset.getChildrenOf(commits["Y"]) == make("G", set) assert commitset.getParentsOf(commits["A"]) == make("", list) assert commitset.getParentsOf(commits["B"]) == make("A", list) assert commitset.getParentsOf(commits["C"]) == make("A", list) assert commitset.getParentsOf(commits["D"]) == make("C", list) assert commitset.getParentsOf(commits["E"]) == make("DB", list) assert commitset.getParentsOf(commits["F"]) == make("E", list) assert commitset.getParentsOf(commits["G"]) == make("B", list) assert commitset.getParentsOf(commits["H"]) == make("G", list) assert commitset.getParentsOf(commits["I"]) == make("H", list) assert commitset.getParentsOf(commits["J"]) == make("I", list) assert commitset.getParentsOf(commits["K"]) == make("H", list) assert commitset.getParentsOf(commits["L"]) == make("N", list) assert commitset.getParentsOf(commits["M"]) == make("FK", list) assert commitset.getParentsOf(commits["N"]) == make("M", list) assert commitset.getDescendantsOf(commits["A"]) == make("BCDEFGHIJKLMN") assert commitset.getDescendantsOf(commits["B"]) == make("EFGHIJKLMN") assert commitset.getDescendantsOf(commits["C"]) == make("DEFLMN") assert commitset.getDescendantsOf(commits["D"]) == make("EFLMN") assert commitset.getDescendantsOf(commits["E"]) == make("FLMN") assert commitset.getDescendantsOf(commits["F"]) == make("LMN") assert commitset.getDescendantsOf(commits["G"]) == make("HIJKLMN") assert commitset.getDescendantsOf(commits["H"]) == make("IJKLMN") assert commitset.getDescendantsOf(commits["I"]) == make("J") assert commitset.getDescendantsOf(commits["J"]) == make("") assert commitset.getDescendantsOf(commits["K"]) == make("LMN") assert commitset.getDescendantsOf(commits["L"]) == make("") assert commitset.getDescendantsOf(commits["M"]) == make("NL") assert commitset.getDescendantsOf(commits["N"]) == make("L") assert commitset.getDescendantsOf(commits["X"]) == make("ABCDEFGHIJKLMN") assert commitset.getDescendantsOf(commits["Y"]) == make("GHIJKLMN") assert commitset.getDescendantsOf(commits["L"], True) == make("L") assert commitset.getDescendantsOf( commit for commit in [commits["I"], commits["N"]]) == make("JL") assert commitset.getAncestorsOf(commits["A"]) == make("") assert commitset.getAncestorsOf(commits["B"]) == make("A") assert commitset.getAncestorsOf(commits["C"]) == make("A") assert commitset.getAncestorsOf(commits["D"]) == make("AC") assert commitset.getAncestorsOf(commits["E"]) == make("ABCD") assert commitset.getAncestorsOf(commits["F"]) == make("ABCDE") assert commitset.getAncestorsOf(commits["G"]) == make("AB") assert commitset.getAncestorsOf(commits["H"]) == make("ABG") assert commitset.getAncestorsOf(commits["I"]) == make("ABGH") assert commitset.getAncestorsOf(commits["J"]) == make("ABGHI") assert commitset.getAncestorsOf(commits["K"]) == make("ABGH") assert commitset.getAncestorsOf(commits["L"]) == make("ABCDEFGHKMN") assert commitset.getAncestorsOf(commits["M"]) == make("ABCDEFGHK") assert commitset.getAncestorsOf(commits["N"]) == make("ABCDEFGHKM") assert commitset.getAncestorsOf(commits["A"], True) == make("A") assert commitset.getAncestorsOf( commit for commit in [commits["D"], commits["G"]]) == make("ABC") assert (make("ABC") | make("BCD")) == make("ABCD") assert (make("ABC") & make("BCD")) == make("BC") assert (make("ABC") - make("BCD")) == make("A") assert (make("ABC") ^ make("BCD")) == make("AD") print "basic: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--prefix") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) ================================================ FILE: src/api/impl/config_unittest.py ================================================ def basic(): import api assert api.config.getBoolean("debug", "IS_TESTING") is True assert api.config.getBoolean("smtp", "USE_SSL") is False assert api.config.getInteger("smtp", "MAX_ATTEMPTS") == 10 assert api.config.getString("base", "SYSTEM_IDENTITY") == "main" assert api.config.getValue("base", "REPOSITORY_URL_TYPES") == ["http"] try: api.config.getValue("invalid", "IRRELEVANT") except api.config.InvalidGroup as error: assert error.message == "Invalid configuration group: invalid" else: assert False try: api.config.getValue("base", "INVALID") except api.config.InvalidKey as error: assert error.message == "Invalid configuration key: base::INVALID" else: assert False try: api.config.getBoolean("base", "SYSTEM_USER_NAME") except api.config.WrongType as error: assert error.message == ("Wrong type: base::SYSTEM_USER_NAME " "(read as boolean)") else: assert False try: api.config.getInteger("base", "SYSTEM_USER_NAME") except api.config.WrongType as error: assert error.message == ("Wrong type: base::SYSTEM_USER_NAME " "(read as integer)") else: assert False try: api.config.getString("debug", "IS_TESTING") except api.config.WrongType as error: assert error.message == ("Wrong type: debug::IS_TESTING " "(read as string)") else: assert False print "basic: ok" ================================================ FILE: src/api/impl/critic.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import dbutils class NoKey(object): pass class Critic(object): def __init__(self): self.database = None self.actual_user = None self.access_token = None self.__cache = {} def setDatabase(self, database): self.database = database def getEffectiveUser(self, critic): if self.actual_user: return self.actual_user return api.user.anonymous(critic) def lookup(self, cls, key=NoKey): objects = self.__cache[cls] if key is NoKey: return objects return objects[key] def assign(self, cls, key, value): self.__cache.setdefault(cls, {})[key] = value @staticmethod def transactionEnded(critic, tables): for Implementation, cached_objects in critic._impl.__cache.items(): if hasattr(Implementation, "refresh"): Implementation.refresh(critic, tables, cached_objects) return True def startSession(for_user, for_system, for_testing): critic = api.critic.Critic(Critic()) if for_user: database = dbutils.Database.forUser(critic) elif for_system: database = dbutils.Database.forSystem(critic) else: database = dbutils.Database.forTesting(critic) critic._impl.setDatabase(database) return critic ================================================ FILE: src/api/impl/extension.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import configuration class Extension(apiobject.APIObject): wrapper_class = api.extension.Extension def __init__(self, extension_id, name, publisher_id): self.id = extension_id self.name = name self.__publisher_id = publisher_id def getKey(self, critic): publisher = self.getPublisher(critic) if publisher is None: return self.name return "%s/%s" % (publisher.name, self.name) def getPublisher(self, critic): if self.__publisher_id is None: return None return api.user.fetch(critic, self.__publisher_id) @Extension.cached() def fetch(critic, extension_id, key): if not configuration.extensions.ENABLED: raise api.extension.ExtensionError("Extension support not enabled") cursor = critic.getDatabaseCursor() if extension_id is not None: cursor.execute( """SELECT id, name, author FROM extensions WHERE id=%s""", (extension_id,)) else: publisher_name, _, extension_name = key.partition("/") if extension_name is None: extension_name = publisher_name cursor.execute( """SELECT id, name, author FROM extensions WHERE author IS NULL AND name=%s""", (extension_name,)) else: cursor.execute( """SELECT extensions.id, extensions.name, author FROM extensions JOIN users ON (users.id=author) WHERE extensions.name=%s AND users.name=%s""", (extension_name, publisher_name)) try: return next(Extension.make(critic, cursor)) except StopIteration: if extension_id is not None: raise api.extension.InvalidExtensionId(extension_id) else: raise api.extension.InvalidExtensionKey(key) def fetchAll(critic, publisher, installed_by): if not configuration.extensions.ENABLED: raise api.extension.ExtensionError("Extension support not enabled") cursor = critic.getDatabaseCursor() if installed_by: if publisher: cursor.execute( """SELECT extensions.id, name, author FROM extensions JOIN extensioninstalls ON (extension=extensions.id) WHERE author=%s AND uid=%s""", (publisher.id, installed_by.id)) else: cursor.execute( """SELECT extensions.id, name, author FROM extensions JOIN extensioninstalls ON (extension=extensions.id) WHERE uid=%s""", (installed_by.id,)) elif publisher: cursor.execute( """SELECT id, name, author FROM extensions WHERE author=%s""", (publisher.id,)) else: cursor.execute( """SELECT id, name, author FROM extensions""") return list(Extension.make(critic, cursor)) ================================================ FILE: src/api/impl/file.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import dbutils class File(apiobject.APIObject): wrapper_class = api.file.File def __init__(self, file_id, path): self.id = file_id self.path = path def _fetch_by_ids(critic, file_ids): # FIXME: Optimize this to do a single database query. Currently, there will # be one (fast) query per file. try: for file_id in file_ids: internal = dbutils.File.fromId(critic.database, file_id) yield (int(internal), str(internal)) except dbutils.InvalidFileId: raise api.file.InvalidFileId(file_id) def _fetch_by_paths(critic, paths, create): # FIXME: Optimize this to do a single database query. Currently, there will # be one (fast) query per file. try: for path in paths: internal = dbutils.File.fromPath( critic.database, path, insert=create) yield (int(internal), str(internal)) except dbutils.InvalidPath: raise api.file.InvalidPath(path) @File.cached() def fetch(critic, file_id, path, create): if file_id is not None: items = _fetch_by_ids(critic, [file_id]) else: items = _fetch_by_paths(critic, [path], create) return next(File.make(critic, items)) def fetchMany(critic, file_ids, paths, create): if file_ids is not None: items = _fetch_by_ids(critic, file_ids) else: items = _fetch_by_paths(critic, paths, create) return list(File.make(critic, items)) ================================================ FILE: src/api/impl/filechange.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import api.impl import apiobject class FileChange(apiobject.APIObject): wrapper_class = api.filechange.FileChange def __init__(self, changeset, file_id, old_sha1, old_mode, new_sha1, new_mode): self.changeset = changeset self.__file_id = file_id self.old_sha1 = old_sha1 if old_sha1 != "0" * 40 else None self.old_mode = old_mode self.new_sha1 = new_sha1 if new_sha1 != "0" * 40 else None self.new_mode = new_mode def getFile(self, critic): return api.file.fetch(critic, self.__file_id) @FileChange.cached(api.filechange.InvalidFileChangeId, cache_key=lambda (changeset, file): (changeset.id, file.id)) def fetch(critic, changeset, file): cursor = critic.getDatabaseCursor() cursor.execute( """SELECT file, old_sha1, old_mode, new_sha1, new_mode FROM fileversions WHERE changeset=%s AND file=%s""", (changeset.id, file.id,)) def cache_key(args): return args[0].id, args[1] return FileChange.make(critic, ((changeset,) + row for row in cursor), cache_key=cache_key) def fetchAll(critic, changeset): cursor = critic.getDatabaseCursor() cursor.execute( """SELECT file, old_sha1, old_mode, new_sha1, new_mode FROM fileversions JOIN files ON (files.id=file) WHERE changeset=%s ORDER BY files.path""", (changeset.id,)) def cache_key(args): return args[0].id, args[1] return list(FileChange.make(critic, ((changeset,) + row for row in cursor), cache_key=cache_key)) ================================================ FILE: src/api/impl/filechange_unittest.py ================================================ from api.impl.changeset_unittest import ROOT_PATHLIST, ROOT_SHA1, FROM_SHA1,\ TO_SHA1, CUSTOM_PATHLIST def pre(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") root_filechange("pre", api, critic, repository) custom_filechange("pre", api, critic, repository) single_filechange("pre", api, critic, repository) print("pre: ok") def post(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") root_filechange("post", api, critic, repository) custom_filechange("post", api, critic, repository) single_filechange("post", api, critic, repository) print("post: ok") # requires list of filechange objects def assert_valid_filechanges(filechanges): for filechange in filechanges: assert (filechange.old_sha1 != filechange.new_sha1),\ "Expected filechange.new_sha1 to be different from filechange.old_sha1" if filechange.old_sha1 is not None: assert (isinstance(filechange.old_sha1, str)),\ "Expected filechange.old_sha1 to be a string" assert (len(filechange.old_sha1) == 40),\ "Expected filechange.old_sha1 to be 40 characters long" if filechange.new_sha1 is not None: assert (filechange.new_sha1 is None or isinstance(filechange.new_sha1, str)),\ "Expected filechange.new_sha1 to be a string" assert (len(filechange.new_sha1) == 40),\ "Expected filechange.new_sha1 to be 40 characters long" def root_filechange(phase, api, critic, repository): if phase == "pre": root_commit = api.commit.fetch(repository, sha1=ROOT_SHA1) try: api.changeset.fetch( critic, repository, single_commit=root_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": root_commit = api.commit.fetch(repository, sha1=ROOT_SHA1) root_changeset = api.changeset.fetch( critic, repository, single_commit=root_commit) root_filechanges = api.filechange.fetchAll(critic, root_changeset) root_files = frozenset( [filechange.file.path for filechange in root_filechanges]) assert (root_files == ROOT_PATHLIST),\ "files in filechanges differ from the expected" assert_valid_filechanges(root_filechanges) else: raise Exception def custom_filechange(phase, api, critic, repository): if phase == "pre": from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) try: api.changeset.fetch( critic, repository, from_commit=from_commit, to_commit=to_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) custom_changeset = api.changeset.fetch( critic, repository, from_commit=from_commit, to_commit=to_commit) for filechange in custom_changeset.files: assert_valid_filechanges([filechange]) else: raise Exception def single_filechange(phase, api, critic, repository): if phase == "pre": single_commit = api.commit.fetch(repository, sha1=TO_SHA1) try: api.changeset.fetch( critic, repository, single_commit=single_commit) except api.changeset.ChangesetDelayed: pass elif phase == "post": single_commit = api.commit.fetch(repository, sha1=TO_SHA1) single_changeset = api.changeset.fetch( critic, repository, single_commit=single_commit) all_filechanges = api.filechange.fetchAll(critic, single_changeset) assert_valid_filechanges(all_filechanges) all_filechange_ids = [filechange.file.id for filechange in all_filechanges] equiv_filechange_ids = [ filechange.file.id for filechange in single_changeset.files ] assert True or (frozenset(all_filechange_ids) == frozenset(equiv_filechange_ids)),\ "filechanges from fetchAll should be equal to list of filechanges fetched by file_id" else: raise Exception ================================================ FILE: src/api/impl/filecontent.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from __future__ import absolute_import import api import api.impl from api.impl import apiobject import diff class Filecontent(apiobject.APIObject): wrapper_class = api.filecontent.Filecontent def __init__(self, critic, repository, blob_sha1, file_obj): diffFile = diff.File( repository=repository._impl.getInternal(critic), path=file_obj.path, new_sha1=blob_sha1) diffFile.loadNewLines( highlighted=True, request_highlight=True, highlight_mode="json") self.__filecontents = diffFile.newLines(highlighted=True) def getLines(self, first_row, last_row): num_lines = len(self.__filecontents) actual_first_row = min(first_row, num_lines) if actual_first_row is None: actual_first_row = 1 actual_last_row = min(max(last_row, actual_first_row), num_lines) if actual_last_row is None: actual_last_row = num_lines lines = [] for offset in range(actual_first_row-1, actual_last_row): parts = api.impl.filediff.parts_from_html(self.__filecontents[offset]) lines.append(api.filecontent.Line(parts, offset+1)) return lines def fetch(critic, repository, blob_sha1, file_obj): return Filecontent(critic, repository, blob_sha1, file_obj).wrap(critic) ================================================ FILE: src/api/impl/filediff.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from __future__ import absolute_import import api import api.impl from api.impl import apiobject import diff import diff.context import json class Filediff(apiobject.APIObject): wrapper_class = api.filediff.Filediff def __init__(self, filechange): self.filechange = filechange self.old_count = None self.new_count = None self.__chunks = None self.__macro_chunks = None self.__repository = filechange.changeset.repository diff_file = self.__getLegacyFile(filechange.critic) self.__highlight_delayed = not diff_file.ensureHighlight("json") @staticmethod def cache_key(filechange): return (filechange.changeset.id, filechange.file.id) def __getChunks(self, critic): if self.__chunks is None: cached_objects = Filediff.allCached(critic) assert Filediff.cache_key(self.filechange) in cached_objects cached_by_changeset = {} for (changeset_id, file_id), filediff in cached_objects.items(): if filediff._impl.__chunks is None: filediff._impl.__chunks = [] cached_by_changeset.setdefault(changeset_id, []) \ .append(file_id) cursor = critic.getDatabaseCursor() for changeset_id, file_ids in cached_by_changeset.items(): cursor.execute( """SELECT file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace FROM chunks WHERE changeset=%s AND file=ANY (%s) ORDER BY file, deleteOffset, insertOffset""", (changeset_id, file_ids)) for (file_id, delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace) in cursor: cached_objects[(changeset_id, file_id)]._impl.__chunks \ .append(diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, analysis=analysis, is_whitespace=is_whitespace)) return self.__chunks def __getLegacyFile(self, critic): return diff.File( self.filechange.file.id, self.filechange.file.path, self.filechange.old_sha1, self.filechange.new_sha1, self.__repository._impl.getInternal(critic), old_mode=self.filechange.old_mode, new_mode=self.filechange.new_mode) def getMacroChunks(self, critic, context_lines, comments, ignore_chunks): def create_line_filter(location, context_lines): def line_filter(line): first_context_line = location.first_line - context_lines last_context_line = location.last_line + context_lines if location.side == "old": line_number = line.old_offset if line_number == first_context_line and line.type == diff.Line.INSERTED: return False else: line_number = line.new_offset if line_number == first_context_line and line.type == diff.Line.DELETED: return False return first_context_line <= line_number <= last_context_line return line_filter if self.__macro_chunks is None: if self.__highlight_delayed: raise api.filediff.FilediffDelayed() diff_file = self.__getLegacyFile(critic) diff_file.loadOldLines(True, highlight_mode="json") diff_file.loadNewLines(True, highlight_mode="json") self.old_count = diff_file.oldCount() self.new_count = diff_file.newCount() diff_chunks = self.__getChunks(critic) if comments is not None: translated_comments = [] skinny_comment_chains = [] for comment in comments: if not isinstance( comment.location, api.comment.FileVersionLocation): continue if comment.location.file != self.filechange.file: continue location = comment.location.translateTo( self.filechange.changeset) if not location: continue translated_comments.append((comment, location)) skinny_comment_chains.append(( SkinnyCommentChain(critic, location), location.side == "old")) else: translated_comments = None skinny_comment_chains = None if ignore_chunks and translated_comments: line_filter = create_line_filter( translated_comments[0][1], context_lines) else: line_filter = None diff_context_lines = diff.context.ContextLines( diff_file, diff_chunks, skinny_comment_chains) legacy_macro_chunks = diff_context_lines.getMacroChunks( context_lines, skip_interline_diff=True, lineFilter=line_filter) self.__macro_chunks = [ api.filediff.MacroChunk(MacroChunk(critic, legacy_macro_chunk)) for legacy_macro_chunk in legacy_macro_chunks ] return self.__macro_chunks class MacroChunk(object): def __init__(self, critic, legacy_macro_chunk): self.legacy_macro_chunk = legacy_macro_chunk self.old_offset = legacy_macro_chunk.old_offset self.new_offset = legacy_macro_chunk.new_offset self.old_count = legacy_macro_chunk.old_count self.new_count = legacy_macro_chunk.new_count self.__lines = None def getLines(self): if self.__lines is None: self.__lines = [ api.filediff.Line(Line.from_legacy_line(line)) for line in self.legacy_macro_chunk.lines ] return self.__lines class Line(object): CONTEXT = 1 DELETED = 2 MODIFIED = 3 REPLACED = 4 INSERTED = 5 WHITESPACE = 6 CONFLICT = 7 @classmethod def from_legacy_line(self, legacy_line): line = Line() line.legacy_line = legacy_line line.__content = None line.is_whitespace = legacy_line.is_whitespace line.analysis = legacy_line.analysis return line @classmethod def from_changed_line(self, original_line, old_content_replacement, new_content_replacement): line = Line() line.legacy_line = original_line.legacy_line line.__old_content = old_content_replacement line.__new_content = new_content_replacement line.is_whitespace = original_line.is_whitespace return line def type_string(self): if self.legacy_line.type == Line.CONTEXT: return "CONTEXT" elif self.legacy_line.type == Line.DELETED: return "DELETED" elif self.legacy_line.type == Line.MODIFIED: return "MODIFIED" elif self.legacy_line.type == Line.REPLACED: return "REPLACED" elif self.legacy_line.type == Line.INSERTED: return "INSERTED" elif self.legacy_line.type == Line.WHITESPACE: return "WHITESPACE" elif self.legacy_line.type == Line.CONFLICT: return "CONFLICT" def getContent(self): if self.__content is None: old_value = self.legacy_line.old_value new_value = self.legacy_line.new_value old_content = parts_from_html(self.legacy_line.old_value) if self.legacy_line.type == Line.CONTEXT: content = old_content else: new_content = parts_from_html(self.legacy_line.new_value) if self.legacy_line.analysis: content = perform_detailed_operations( self.legacy_line.analysis, old_content, new_content) else: content = perform_basic_operations( self.legacy_line.type, old_content, new_content) self.__content = [api.filediff.Part(part) for part in content] return self.__content class Part(object): def __init__(self, part_type, content, state=None): self.type = part_type self.content = content self.state = state def copy(self): return Part(self.type, self.content, self.state) def with_state(self, state): self.state = state return self class SkinnyCommentChain(object): def __init__(self, critic, location): filechange = api.filechange.fetch( critic, location.changeset, location.file) if location.side == "old": key = filechange.old_sha1 else: key = filechange.new_sha1 self.lines_by_sha1 = {} self.lines_by_sha1[key] = ( location.first_line, location.last_line - location.first_line + 1 ) self.comments = True def fetch(critic, filechange): cache_key = Filediff.cache_key(filechange) try: return Filediff.get_cached(critic, cache_key) except KeyError: pass filediff = Filediff(filechange).wrap(critic) Filediff.add_cached(critic, cache_key, filediff) return filediff def fetchAll(critic, changeset): return [ fetch(critic, filechange) for filechange in changeset.files ] def parts_from_html(content): if content is None: return None return (Part(part_json[0], part_json[1].encode("utf-8")) for part_json in json.loads(content)) class Parts(object): def __init__(self, parts): self.parts = list(parts) self.offset = 0 def extract(self, length): self.offset += length while self.parts and len(self.parts[0].content) <= length: part = self.parts.pop(0) length -= len(part.content) yield part if length: tail_part = self.parts[0] head_part = tail_part.copy() head_part.content = head_part.content[:length] tail_part.content = tail_part.content[length:] yield head_part def skip(self, length): for part in self.extract(length): pass def perform_detailed_operations(operations, old_content, new_content): processed_content = [] old_parts = Parts(old_content) new_parts = Parts(new_content) for operation in operations: if operation[0] == "r": old_range, _, new_range = operation[1:].partition("=") elif operation[0] == "d": old_range = operation[1:] new_range = None else: old_range = None new_range = operation[1:] if old_range: old_begin, old_end = map(int, old_range.split("-")) if new_range: new_begin, new_end = map(int, new_range.split("-")) if old_range: context_length = old_begin - old_parts.offset if context_length: processed_content.extend(old_parts.extract(context_length)) new_parts.skip(context_length) deleted_length = old_end - old_begin processed_content.extend( part.with_state("d") for part in old_parts.extract(deleted_length)) if new_range: if not old_range: context_length = new_begin - new_parts.offset if context_length: processed_content.extend(old_parts.extract(context_length)) new_parts.skip(context_length) inserted_length = new_end - new_begin processed_content.extend( part.with_state("i") for part in new_parts.extract(inserted_length)) processed_content.extend(old_parts.parts) return processed_content def perform_basic_operations(line_type, old_content, new_content): if old_content is not None and new_content is not None: return ([part.with_state("d") for part in old_content or []] + [part.with_state("i") for part in new_content or []]) elif old_content is not None: return old_content return new_content ================================================ FILE: src/api/impl/filediff_unittest.py ================================================ from api.impl.changeset_unittest import FROM_SHA1, TO_SHA1 def pre1(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) try: api.changeset.fetch( critic, repository, from_commit=from_commit, to_commit=to_commit) except api.changeset.ChangesetDelayed: pass print "pre1: ok" def pre2(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) changeset = api.changeset.fetch( critic, repository, from_commit=from_commit, to_commit=to_commit) file = api.file.fetch(critic, path="src/operation/createreview.py") filechange = api.filechange.fetch(critic, changeset, file) try: api.filediff.fetch(critic, filechange).getMacroChunks(context_lines=3) except api.filediff.FilediffDelayed: pass else: assert False, "filediff not delayed as expected" print "pre2: ok" def post(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") from_commit = api.commit.fetch(repository, sha1=FROM_SHA1) to_commit = api.commit.fetch(repository, sha1=TO_SHA1) changeset = api.changeset.fetch( critic, repository, from_commit=from_commit, to_commit=to_commit) file = api.file.fetch(critic, path="src/operation/createreview.py") filechange = api.filechange.fetch(critic, changeset, file) filediff = api.filediff.fetch(critic, filechange) chunks = filediff.getMacroChunks(context_lines=3) assert isinstance(chunks, list) assert len(chunks) == 2 assert chunks[0].old_offset == 88 assert chunks[0].old_count == 6 assert chunks[0].new_offset == 88 assert chunks[0].new_count == 9 assert len(chunks[0].lines) == 9 assert chunks[0].lines[0].type_string == "CONTEXT" assert chunks[0].lines[1].type_string == "CONTEXT" assert chunks[0].lines[2].type_string == "CONTEXT" assert chunks[0].lines[3].type_string == "INSERTED" assert chunks[0].lines[4].type_string == "INSERTED" assert chunks[0].lines[5].type_string == "INSERTED" assert chunks[0].lines[6].type_string == "CONTEXT" assert chunks[0].lines[7].type_string == "CONTEXT" assert chunks[0].lines[8].type_string == "CONTEXT" assert chunks[1].old_offset == 199 assert chunks[1].old_count == 14 assert chunks[1].new_offset == 202 assert chunks[1].new_count == 15 assert len(chunks[1].lines) == 15 assert chunks[1].lines[0].type_string == "CONTEXT" assert chunks[1].lines[1].type_string == "CONTEXT" assert chunks[1].lines[2].type_string == "CONTEXT" assert chunks[1].lines[3].type_string == "MODIFIED" assert chunks[1].lines[4].type_string == "CONTEXT" assert chunks[1].lines[5].type_string == "CONTEXT" assert chunks[1].lines[6].type_string == "CONTEXT" assert chunks[1].lines[7].type_string == "CONTEXT" assert chunks[1].lines[8].type_string == "CONTEXT" assert chunks[1].lines[9].type_string == "MODIFIED" assert chunks[1].lines[10].type_string == "REPLACED" assert chunks[1].lines[11].type_string == "INSERTED" assert chunks[1].lines[12].type_string == "CONTEXT" assert chunks[1].lines[13].type_string == "CONTEXT" assert chunks[1].lines[14].type_string == "CONTEXT" print "post: ok" ================================================ FILE: src/api/impl/filters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class RepositoryFilter(apiobject.APIObject): wrapper_class = api.filters.RepositoryFilter def __init__(self, filter_id, subject_id, filter_type, path, repository_id, delegate_string, repository=None): self.id = filter_id self.__subject_id = subject_id self.__subject = None self.type = filter_type self.path = path self.__repository_id = repository_id self.__repository = repository self.__delegate_string = delegate_string self.__delegates = None def getSubject(self, critic): if self.__subject is None: self.__subject = api.user.fetch(critic, user_id=self.__subject_id) return self.__subject def getRepository(self, critic): if self.__repository is None: self.__repository = api.repository.fetch( critic, repository_id=self.__repository_id) return self.__repository def getDelegates(self, critic): if self.__delegates is None: self.__delegates = frozenset( api.user.fetch(critic, name=name.strip()) for name in filter(None, self.__delegate_string.split(","))) return self.__delegates @staticmethod def refresh(critic, tables, cached_filters): if "filters" not in tables: return RepositoryFilter.updateAll( critic, """SELECT id, uid, type, path, repository, delegate FROM filters WHERE id=ANY (%s)""", cached_filters) def fetchRepositoryFilter(critic, filter_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, uid, type, path, repository, delegate FROM filters WHERE id=%s""", (filter_id,)) try: return next(RepositoryFilter.make(critic, cursor)) except StopIteration: raise api.filters.InvalidRepositoryFilterId(filter_id) class ReviewFilter(object): wrapper_class = api.filters.ReviewFilter def __init__(self, subject_id, filter_type, path, filter_id, review_id, creator_id): self.__subject_id = subject_id self.__subject = None self.type = filter_type self.path = path self.id = filter_id self.__review_id = review_id self.__review = None self.__creator_id = creator_id self.__creator = None def getSubject(self, critic): if self.__subject is None: self.__subject = api.user.fetch(critic, user_id=self.__subject_id) return self.__subject def getReview(self, critic): if self.__review is None: self.__review = api.review.fetch(critic, review_id=self.__review_id) return self.__review def getCreator(self, critic): if self.__creator is None: self.__creator = api.user.fetch(critic, user_id=self.__creator_id) return self.__creator ================================================ FILE: src/api/impl/labeledaccesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import dbutils public_module = api.labeledaccesscontrolprofile public_class = public_module.LabeledAccessControlProfile class LabeledAccessControlProfile(apiobject.APIObject): wrapper_class = public_class def __init__(self, labels, profile_id): self.labels = tuple(labels.split("|")) self.__profile_id = profile_id def getAccessControlProfile(self, critic): return api.accesscontrolprofile.fetch(critic, self.__profile_id) @LabeledAccessControlProfile.cached() def fetch(critic, labels): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT labels, profile FROM labeledaccesscontrolprofiles WHERE labels=%s""", ("|".join(labels),)) try: return next(LabeledAccessControlProfile.make(critic, cursor)) except StopIteration: raise public_module.InvalidAccessControlProfileLabels(labels) def fetchAll(critic, profile): cursor = critic.getDatabaseCursor() if profile is None: cursor.execute("""SELECT labels, profile FROM labeledaccesscontrolprofiles ORDER BY labels ASC""") else: cursor.execute("""SELECT labels, profile FROM labeledaccesscontrolprofiles WHERE profile=%s ORDER BY labels ASC""", (profile.id,)) return list(LabeledAccessControlProfile.make(critic, cursor)) ================================================ FILE: src/api/impl/log/__init__.py ================================================ import rebase import partition ================================================ FILE: src/api/impl/log/partition.py ================================================ import api class Partition(object): def __init__(self, commits): assert not commits or len(commits.heads) == 1 self.commits = commits self.preceding = None self.following = None def wrap(self, critic): return api.log.partition.Partition(critic, self) def create(critic, commits, rebases): commits = api.commitset.create(critic, commits) partitions = [] def add(rebase, partition): if partitions: previous_rebase, previous_partition = partitions[-1] previous_partition._impl.preceding = \ api.log.partition.Partition.Edge(previous_rebase, partition) partition._impl.following = \ api.log.partition.Partition.Edge(previous_rebase, previous_partition) partitions.append((rebase, partition)) rebase = None for rebase in reversed(rebases): partition_commits = commits.getAncestorsOf( rebase.old_head, rebase.old_head in commits) commits = commits - partition_commits add(rebase, Partition(partition_commits).wrap(critic)) if len(commits.heads) > 1: raise api.log.partition.PartitionError( "Incompatible commits/rebases arguments") add(None, Partition(commits).wrap(critic)) return partitions[-1][1] ================================================ FILE: src/api/impl/log/partition_unittest.py ================================================ def basic(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") def fetch_review(number): branch = api.branch.fetch(critic, repository=repository, name="r/020-reviewrebase/%d" % number) return api.review.fetch(critic, branch=branch) def check_partition(partition): assert isinstance(partition, api.log.partition.Partition) def check_commits(commits, summaries): assert isinstance(commits, api.commitset.CommitSet) assert len(commits) == len(summaries) for commit, summary in zip(commits.topo_ordered, summaries): assert commit.summary == summary def check_following(partition, rebase_class): edge = partition.following assert isinstance(edge, api.log.partition.Partition.Edge) assert isinstance(edge.rebase, rebase_class) assert isinstance(edge.partition, api.log.partition.Partition) assert edge.partition is not partition mirror = edge.partition.preceding assert mirror.partition is partition assert mirror.rebase is edge.rebase return edge.partition # # 020-reviewrebase, test 1 # partition = fetch_review(1).first_partition assert partition.preceding is None check_partition(partition) check_commits(partition.commits, ["Test #1, commit 6", "Test #1, commit 5", "Test #1, commit 4"]) partition = check_following(partition, api.log.rebase.HistoryRewrite) check_partition(partition) check_commits(partition.commits, ["Test #1, commit 3", "Test #1, commit 2", "Test #1, commit 1"]) assert partition.following is None # # 020-reviewrebase, test 2 # partition = fetch_review(2).first_partition assert partition.preceding is None check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.HistoryRewrite) check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, ["Test #2, commit 3", "Test #2, commit 2", "Test #2, commit 1"]) assert partition.following is None # # 020-reviewrebase, test 3 # partition = fetch_review(3).first_partition assert partition.preceding is None check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.HistoryRewrite) check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, ["Test #3, commit 3", "Test #3, commit 2", "Test #3, commit 1"]) assert partition.following is None # # 020-reviewrebase, test 4 # partition = fetch_review(4).first_partition assert partition.preceding is None check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, [("Merge branch '020-reviewrebase-4-1' " "into 020-reviewrebase-4-2"), "Test #4, commit 3", "Test #4, commit 2", "Test #4, commit 1"]) assert partition.following is None # # 020-reviewrebase, test 5 # partition = fetch_review(5).first_partition assert partition.preceding is None check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.MoveRebase) check_partition(partition) check_commits(partition.commits, []) partition = check_following(partition, api.log.rebase.HistoryRewrite) check_partition(partition) check_commits(partition.commits, ["Test #5, commit 3", "Test #5, commit 2", "Test #5, commit 1"]) assert partition.following is None print "basic: ok" ================================================ FILE: src/api/impl/log/rebase.py ================================================ import api from .. import apiobject class Rebase(apiobject.APIObject): wrapper_class = api.log.rebase.Rebase def __init__(self, rebase_id, review_id, creator_id, old_head_id, new_head_id, old_upstream_id, new_upstream_id, equivalent_merge_id, replayed_rebase_id): self.id = rebase_id self.review_id = review_id self.old_head_id = old_head_id self.new_head_id = new_head_id self.old_upstream_id = old_upstream_id self.new_upstream_id = new_upstream_id self.equivalent_merge_id = equivalent_merge_id self.replayed_rebase_id = replayed_rebase_id self.creator_id = creator_id if self.new_upstream_id is None: self.wrapper_class = api.log.rebase.HistoryRewrite else: self.wrapper_class = api.log.rebase.MoveRebase def getReview(self, critic): return api.review.fetch(critic, review_id=self.review_id) def getRepository(self, critic): return self.getReview(critic).branch.repository def getOldHead(self, critic): return api.commit.fetch(self.getRepository(critic), commit_id=self.old_head_id) def getNewHead(self, critic): if self.new_head_id is not None: return api.commit.fetch(self.getRepository(critic), commit_id=self.new_head_id) else: return None def getOldUpstream(self, critic): return api.commit.fetch(self.getRepository(critic), commit_id=self.old_upstream_id) def getNewUpstream(self, critic): return api.commit.fetch(self.getRepository(critic), commit_id=self.new_upstream_id) def getEquivalentMerge(self, critic): assert self.new_upstream_id is not None if self.equivalent_merge_id is None: return None return api.commit.fetch(self.getRepository(critic), commit_id=self.equivalent_merge_id) def getReplayedRebase(self, critic): assert self.new_upstream_id is not None if self.replayed_rebase_id is None: return None return api.commit.fetch(self.getRepository(critic), commit_id=self.replayed_rebase_id) def getCreator(self, critic): return api.user.fetch(critic, user_id=self.creator_id) @Rebase.cached() def fetch(critic, rebase_id): cursor = critic.getDatabaseCursor() cursor.execute( """SELECT id, review, uid, old_head, new_head, old_upstream, new_upstream, equivalent_merge, replayed_rebase FROM reviewrebases WHERE id=%s""", (rebase_id,)) try: return next(Rebase.make(critic, cursor)) except StopIteration: raise api.log.rebase.InvalidRebaseId(rebase_id) def fetchAll(critic, review, pending): cursor = critic.getDatabaseCursor() new_head = "new_head IS NULL" if pending else "new_head IS NOT NULL" if review is not None: cursor.execute( """SELECT id, review, uid, old_head, new_head, old_upstream, new_upstream, equivalent_merge, replayed_rebase FROM reviewrebases WHERE review=%s AND """ + new_head + """ ORDER BY id DESC""", (review.id,)) else: cursor.execute( """SELECT id, review, uid, old_head, new_head, old_upstream, new_upstream, equivalent_merge, replayed_rebase FROM reviewrebases WHERE """ + new_head + """ ORDER BY id DESC""") return list(Rebase.make(critic, cursor)) ================================================ FILE: src/api/impl/log/rebase_unittest.py ================================================ def basic(): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") alice = api.user.fetch(critic, name="alice") def fetch_review(number=None): if number is None: name = "r/012-replayrebase" else: name = "r/020-reviewrebase/%d" % number branch = api.branch.fetch(critic, repository=repository, name=name) return api.review.fetch(critic, branch=branch) def check_history_rewrite(rebase, old_head_summary, new_head_summary): assert isinstance(rebase, api.log.rebase.HistoryRewrite) assert api.log.rebase.fetch(critic, rebase_id=rebase.id) is rebase assert rebase.old_head.summary == old_head_summary, (rebase.old_head.summary, old_head_summary) assert rebase.new_head.summary == new_head_summary, (rebase.new_head.summary, new_head_summary) def check_move_rebase(rebase, old_head_summary, new_head_summary, old_upstream_summary, new_upstream_summary, expect=None): assert isinstance(rebase, api.log.rebase.MoveRebase) assert api.log.rebase.fetch(critic, rebase_id=rebase.id) is rebase assert all(isinstance(commit, api.commit.Commit) for commit in (rebase.old_head, rebase.new_head, rebase.old_upstream, rebase.new_upstream)) assert rebase.old_head.summary == old_head_summary, (rebase.old_head.summary, old_head_summary) assert rebase.new_head.summary == new_head_summary, (rebase.new_head.summary, new_head_summary) if old_upstream_summary is not None: assert rebase.old_upstream.summary == old_upstream_summary if new_upstream_summary is not None: assert rebase.new_upstream.summary == new_upstream_summary if expect == "equivalent_merge": assert isinstance(rebase.equivalent_merge, api.commit.Commit) else: assert rebase.equivalent_merge is None if expect == "replayed_rebase": assert isinstance(rebase.replayed_rebase, api.commit.Commit) else: assert rebase.replayed_rebase is None assert rebase.creator is alice # # 012-replayrebase # rebases = fetch_review().rebases assert len(rebases) == 1 check_move_rebase(rebases[0], old_head_summary="Use temporary clones for relaying instead of temporary remotes", new_head_summary="Use temporary clones for relaying instead of temporary remotes", old_upstream_summary="Add test for diffs including the initial commit", new_upstream_summary="Add utility function for creating a user", expect="replayed_rebase") # # 020-reviewrebase, test 1 # rebases = fetch_review(1).rebases assert len(rebases) == 1 check_history_rewrite(rebases[0], old_head_summary="Test #1, commit 3", new_head_summary="Test #1, commit 1") # # 020-reviewrebase, test 2 # rebases = fetch_review(2).rebases assert len(rebases) == 3 check_move_rebase(rebases[0], old_head_summary="Test #2, commit 7", new_head_summary="Test #2, commit 8", old_upstream_summary="Test #2 base, commit 2", new_upstream_summary="Test #2 base, commit 1", expect="replayed_rebase") check_history_rewrite(rebases[1], old_head_summary="Test #2, commit 6", new_head_summary="Test #2, commit 7") check_move_rebase(rebases[2], old_head_summary="Test #2, commit 3", new_head_summary="Test #2, commit 6", old_upstream_summary="Test #2 base, commit 1", new_upstream_summary="Test #2 base, commit 2", expect="equivalent_merge") # # 020-reviewrebase, test 3 # rebases = fetch_review(3).rebases assert len(rebases) == 3 check_move_rebase(rebases[0], old_head_summary="Test #3, commit 7", new_head_summary="Test #3, commit 8", old_upstream_summary="Test #3 base, commit 2", new_upstream_summary="Test #3 base, commit 1", expect="replayed_rebase") check_history_rewrite(rebases[1], old_head_summary="Test #3, commit 6", new_head_summary="Test #3, commit 7") check_move_rebase(rebases[2], old_head_summary="Test #3, commit 3", new_head_summary="Test #3, commit 6", old_upstream_summary="Test #3 base, commit 1", new_upstream_summary="Test #3 base, commit 2", expect="equivalent_merge") # # 020-reviewrebase, test 4 # rebases = fetch_review(4).rebases assert len(rebases) == 1 check_move_rebase(rebases[0], old_head_summary=("Merge branch '020-reviewrebase-4-1' " "into 020-reviewrebase-4-2"), new_head_summary="Test #4, commit 6", old_upstream_summary=None, new_upstream_summary=None, expect="equivalent_merge") # # 020-reviewrebase, test 5 # rebases = fetch_review(5).rebases assert len(rebases) == 2 check_move_rebase(rebases[0], old_head_summary="Test #5, commit 1", new_head_summary="Test #5, commit 4", old_upstream_summary="Test #5 base, commit 2", new_upstream_summary="Test #5 base, commit 1", expect="replayed_rebase") check_history_rewrite(rebases[1], old_head_summary="Test #5, commit 3", new_head_summary="Test #5, commit 1") try: api.log.rebase.fetch(critic, rebase_id=10000) except api.log.rebase.InvalidRebaseId: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" print "basic: ok" ================================================ FILE: src/api/impl/reply.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class Reply(apiobject.APIObject): wrapper_class = api.reply.Reply def __init__(self, reply_id, state, comment_id, batch_id, author_id, timestamp, text): self.id = reply_id self.is_draft = state == "draft" self.__comment_id = comment_id self.__batch_id = batch_id self.__author_id = author_id self.timestamp = timestamp self.text = text def __cmp__(self, other): return cmp(self.__batch_id, other.__batch_id) def getComment(self, critic): return api.comment.fetch(critic, self.__comment_id) def getAuthor(self, critic): return api.user.fetch(critic, self.__author_id) @staticmethod def refresh(critic, tables, cached_replies): if "comments" not in tables: return Reply.updateAll( critic, """SELECT id, state, chain, batch, uid, time, comment FROM comments WHERE id=ANY (%s)""", cached_replies) @Reply.cached(api.reply.InvalidReplyId) def fetch(critic, reply_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT comments.id, comments.state, chain, comments.batch, comments.uid, comments.time, comment FROM comments JOIN commentchains ON (commentchains.id=comments.chain) WHERE comments.id=%s AND comments.state!='deleted' AND commentchains.first_comment!=comments.id""", (reply_id,)) return Reply.make(critic, cursor) @Reply.cachedMany(api.reply.InvalidReplyIds) def fetchMany(critic, reply_ids): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT comments.id, comments.state, chain, comments.batch, comments.uid, comments.time, comment FROM comments JOIN commentchains ON (commentchains.id=comments.chain) WHERE comments.id=ANY (%s) AND comments.state!='deleted' AND commentchains.first_comment!=comments.id""", (reply_ids,)) return Reply.make(critic, cursor) def fetchForComment(critic, chain_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT comments.id, comments.state, chain, comments.batch, comments.uid, comments.time, comment FROM comments JOIN commentchains ON (commentchains.id=comments.chain) WHERE comments.state='current' AND commentchains.id=%s AND commentchains.first_comment!=comments.id ORDER BY comments.batch ASC""", (chain_id,)) return list(Reply.make(critic, cursor)) ================================================ FILE: src/api/impl/reply_unittest.py ================================================ import sys import datetime def basic(arguments): import api critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, name="critic") branch = api.branch.fetch( critic, repository=repository, name=arguments.review) review = api.review.fetch(critic, branch=branch) alice = api.user.fetch(critic, name="alice") bob = api.user.fetch(critic, name="bob") dave = api.user.fetch(critic, name="dave") erin = api.user.fetch(critic, name="erin") EXPECTED = { 0: [dave, bob, erin, bob, alice], 1: [alice, bob], 2: [bob, erin, alice], 3: [], 4: [bob], 5: [] } def check_replies(comment): assert isinstance(comment, api.comment.Comment) assert isinstance(comment.replies, list) expected = EXPECTED[comment_id_map[comment.id]] assert len(comment.replies) == len(expected) for index, (reply, author) in enumerate(zip(comment.replies, expected)): assert isinstance(reply, api.reply.Reply) assert isinstance(reply.id, int) assert isinstance(reply.is_draft, bool) assert not reply.is_draft assert reply.comment is comment assert reply.author is author, (comment.id, index, reply.author.name) assert isinstance(reply.timestamp, datetime.datetime) assert isinstance(reply.text, str) assert reply.text == ("This is a reply from %s." % author.name.capitalize()) assert api.reply.fetch(critic, reply_id=reply.id) is reply comments = api.comment.fetchAll(critic, review=review) assert isinstance(comments, list) assert len(comments) == 6 comment_id_map = { comment.id: index for index, comment in enumerate(comments) } for comment in comments: check_replies(comment) reply_ids = [ reply.id for reply in reversed(comments[0].replies[:3]) ] some_replies = api.reply.fetchMany(critic, reply_ids) assert len(some_replies) == 3 assert some_replies[0].id == reply_ids[0] assert some_replies[0] is api.reply.fetch(critic, reply_ids[0]) assert some_replies[1].id == reply_ids[1] assert some_replies[1] is api.reply.fetch(critic, reply_ids[1]) assert some_replies[2].id == reply_ids[2] assert some_replies[2] is api.reply.fetch(critic, reply_ids[2]) print "basic: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--review") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) ================================================ FILE: src/api/impl/repository.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import subprocess import api import apiobject import auth import configuration import dbutils import gitutils class Repository(apiobject.APIObject): wrapper_class = api.repository.Repository def __init__(self, repository_id, name, path): self.id = repository_id self.name = name self.path = path self.relative_path = os.path.relpath(path, configuration.paths.GIT_DIR) self.__internal = None def getInternal(self, critic): if not self.__internal: self.__internal = gitutils.Repository.fromId( db=critic.database, repository_id=self.id) return self.__internal def getURL(self, critic): return gitutils.Repository.constructURL( critic.database, critic.effective_user.internal, self.path) def run(self, *args): argv = [configuration.executables.GIT] + list(args) process = subprocess.Popen( argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = process.communicate() if process.returncode != 0: raise api.repository.GitCommandError( argv, process.returncode, stdout, stderr) return stdout def resolveRef(self, ref, expect, short): command_line = ["rev-parse", "--verify", "--quiet"] if short: if isinstance(short, int): command_line.append("--short=%d" % short) else: command_line.append("--short") if expect is not None: ref += "^{%s}" % expect command_line.append(ref) try: return self.run(*command_line).strip() except api.repository.GitCommandError: raise api.repository.InvalidRef(ref) def listCommits(self, repository, include, exclude, args, paths): args = ['rev-list'] + args args.extend(commit.sha1 for commit in include) args.extend("^" + commit.sha1 for commit in exclude) if paths: args.append("--") args.extend(paths) return api.commit.fetchMany(repository, sha1s=self.run(*args).split()) @classmethod def create(Repository, critic, repository_id, name, path): import auth repository = Repository(repository_id, name, path).wrap(critic) auth.AccessControl.accessRepository(critic.database, "read", repository) return repository @Repository.cached() def fetch(critic, repository_id, name, path): cursor = critic.getDatabaseCursor() if repository_id is not None: cursor.execute("""SELECT id, name, path FROM repositories WHERE id=%s""", (repository_id,)) elif name is not None: cursor.execute("""SELECT id, name, path FROM repositories WHERE name=%s""", (name,)) else: cursor.execute("""SELECT id, name, path FROM repositories WHERE path=%s""", (path,)) try: return next(Repository.make(critic, cursor)) except StopIteration: if repository_id is not None: raise api.repository.InvalidRepositoryId(repository_id) elif name is not None: raise api.repository.InvalidRepositoryName(name) else: raise api.repository.InvalidRepositoryPath(path) def fetchAll(critic): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, name, path FROM repositories ORDER BY name""") return list(Repository.make( critic, cursor, ignored_errors=(auth.AccessDenied,))) def fetchHighlighted(critic, user): highlighted = set() cursor = critic.getDatabaseCursor() cursor.execute("""SELECT DISTINCT repository FROM filters WHERE uid=%s""", (user.id,)) highlighted.update(repository_id for (repository_id,) in cursor) cursor.execute("""SELECT DISTINCT repository FROM branches JOIN reviews ON (reviews.branch=branches.id) JOIN reviewusers ON (reviewusers.review=reviews.id) WHERE reviewusers.uid=%s AND reviewusers.owner""", (user.id,)) highlighted.update(repository_id for (repository_id,) in cursor) cursor.execute("""SELECT id, name, path FROM repositories WHERE id=ANY (%s) ORDER BY name""", (list(highlighted),)) return list(Repository.make( critic, cursor, ignored_errors=(auth.AccessDenied,))) ================================================ FILE: src/api/impl/repository_unittest.py ================================================ def basic(arguments): import api assert arguments.sha1 is not None assert len(arguments.sha1) == 40 assert arguments.head is not None assert len(arguments.head) == 40 assert arguments.path is not None critic = api.critic.startSession(for_testing=True) repository = api.repository.fetch(critic, repository_id=1) alice = api.user.fetch(critic, name="alice") assert isinstance(repository, api.repository.Repository) assert isinstance(repository.id, int) assert repository.id == 1 assert isinstance(repository.name, str) assert repository.name == "critic" assert isinstance(repository.path, str) assert repository.path == arguments.path assert isinstance(repository.relative_path, str) assert arguments.path.endswith(repository.relative_path) # FIXME: repository.url is currently broken. assert api.repository.fetch(critic, name="critic") is repository assert api.repository.fetch(critic, path=arguments.path) is repository all_repositories = api.repository.fetchAll(critic) assert len(all_repositories) == 2 assert all_repositories[0] is repository highlighted_repositories = api.repository.fetchHighlighted(critic, alice) assert len(highlighted_repositories) == 1 assert highlighted_repositories[0] is repository head = api.commit.fetch(repository, sha1=arguments.head) assert arguments.head == repository.resolveRef("HEAD") assert arguments.head == repository.resolveRef("HEAD", expect="commit") assert arguments.head.startswith(repository.resolveRef("HEAD", short=True)) assert arguments.head.startswith(repository.resolveRef("HEAD", short=8)) assert len(repository.resolveRef("HEAD", short=8)) == 8 assert head.tree == repository.resolveRef("HEAD", expect="tree") simple_tag = repository.resolveRef("007-repository/simple-tag") assert simple_tag == head.sha1 annotated_tag = repository.resolveRef("007-repository/annotated-tag") assert annotated_tag != head.sha1 annotated_tag = repository.resolveRef("007-repository/annotated-tag", expect="commit") assert annotated_tag == head.sha1 commit0 = api.commit.fetch(repository, sha1=arguments.sha1) commit1 = commit0.parents[0] commit2 = commit1.parents[0] commit3 = commit2.parents[0] commit4 = commit3.parents[0] commit5 = commit4.parents[0] commits = repository.listCommits(commit0, commit5) assert len(commits) == 5 assert all(isinstance(commit, api.commit.Commit) for commit in commits) assert commits[0] == commit0 assert commits[1] == commit1 assert commits[2] == commit2 assert commits[3] == commit3 assert commits[4] == commit4 commits = repository.listCommits((commit for commit in [commit0]), (commit for commit in [commit5]), args=["--merges"]) assert len(commits) == 0 commits = repository.listCommits(args=["--reverse", "%s..%s" % (commit5, commit0)]) assert len(commits) == 5 assert commits[0] == commit4 assert commits[1] == commit3 assert commits[2] == commit2 assert commits[3] == commit1 assert commits[4] == commit0 try: api.repository.fetch(critic, repository_id=4711) except api.repository.InvalidRepositoryId: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" try: api.repository.fetch(critic, name="wrong") except api.repository.InvalidRepositoryName: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" try: api.repository.fetch(critic, path="/var/git/wrong.git") except api.repository.InvalidRepositoryPath: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" print "basic: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--sha1") parser.add_argument("--head") parser.add_argument("--path") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) ================================================ FILE: src/api/impl/review.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import api.impl.filters import auth class Review(apiobject.APIObject): wrapper_class = api.review.Review def __init__(self, review_id, repository_id, branch_id, state, summary, description): self.id = review_id self.__repository_id = repository_id self.__branch_id = branch_id self.state = state self.summary = summary self.description = description self.__owners_ids = None self.__assigned_reviewers_ids = None self.__active_reviewers_ids = None self.__watchers_ids = None self.__filters = None self.__commits = None self.__rebases = None self.__issues = None self.__notes = None self.__open_issues = None self.__total_progress = None self.__progress_per_commit = None def getRepository(self, critic): return api.repository.fetch(critic, repository_id=self.__repository_id) def getBranch(self, critic): return api.branch.fetch(critic, branch_id=self.__branch_id) def __fetchOwners(self, critic): if self.__owners_ids is None: cursor = critic.getDatabaseCursor() cursor.execute("""SELECT uid FROM reviewusers WHERE review=%s AND owner""", (self.id,)) self.__owners_ids = frozenset(user_id for (user_id,) in cursor) def getOwners(self, critic): self.__fetchOwners(critic) return frozenset(api.user.fetch(critic, user_id=user_id) for user_id in self.__owners_ids) def __fetchAssignedReviewers(self, critic): if self.__assigned_reviewers_ids is None: cursor = critic.getDatabaseCursor() cursor.execute( """SELECT DISTINCT uid FROM reviewuserfiles JOIN reviewfiles ON (reviewfiles.id=reviewuserfiles.file) WHERE reviewfiles.review=%s""", (self.id,)) self.__assigned_reviewers_ids = frozenset( user_id for (user_id,) in cursor) def getAssignedReviewers(self, critic): self.__fetchAssignedReviewers(critic) return frozenset(api.user.fetchMany( critic, user_ids=self.__assigned_reviewers_ids)) def __fetchActiveReviewers(self, critic): if self.__active_reviewers_ids is None: cursor = critic.getDatabaseCursor() cursor.execute( """SELECT DISTINCT uid FROM reviewfilechanges JOIN reviewfiles ON (reviewfiles.id=reviewfilechanges.file) WHERE reviewfiles.review=%s""", (self.id,)) self.__active_reviewers_ids = frozenset( user_id for (user_id,) in cursor) def getActiveReviewers(self, critic): self.__fetchActiveReviewers(critic) return frozenset(api.user.fetchMany( critic, user_ids=self.__active_reviewers_ids)) def __fetchWatchers(self, critic): if self.__watchers_ids is None: cursor = critic.getDatabaseCursor() cursor.execute("""SELECT uid FROM reviewusers WHERE review=%s""", (self.id,)) associated_users = frozenset(user_id for (user_id,) in cursor) self.__fetchOwners(critic) self.__fetchAssignedReviewers(critic) self.__fetchActiveReviewers(critic) non_watchers = self.__owners_ids | self.__assigned_reviewers_ids | \ self.__active_reviewers_ids self.__watchers_ids = associated_users - non_watchers def getWatchers(self, critic): self.__fetchWatchers(critic) return frozenset(api.user.fetch(critic, user_id=user_id) for user_id in self.__watchers_ids) def getFilters(self, critic): if self.__filters is None: cursor = critic.getDatabaseCursor() cursor.execute("""SELECT uid, type, path, id, review, creator FROM reviewfilters WHERE review=%s""", (self.id,)) impls = [api.impl.filters.ReviewFilter(*row) for row in cursor] self.__filters = [api.filters.ReviewFilter(critic, impl) for impl in impls] return self.__filters def getCommits(self, critic): if self.__commits is None: cursor = critic.getDatabaseCursor() # Direct changesets: no merges, no rebase changes. cursor.execute( """SELECT DISTINCT commits.id, commits.sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s AND changesets.type='direct'""", (self.id,)) commit_ids_sha1s = set(cursor) # Merge changesets, excluding those added by move rebases. cursor.execute( """SELECT DISTINCT commits.id, commits.sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) LEFT OUTER JOIN reviewrebases ON (reviewrebases.review=%s AND reviewrebases.equivalent_merge=commits.id) WHERE reviewchangesets.review=%s AND changesets.type='merge' AND reviewrebases.id IS NULL""", (self.id, self.id)) commit_ids_sha1s.update(cursor) repository = self.getRepository(critic) commits = [api.commit.fetch(repository, commit_id, sha1) for commit_id, sha1 in commit_ids_sha1s] self.__commits = api.commitset.create(critic, commits) return self.__commits def getRebases(self, wrapper): return api.log.rebase.fetchAll(wrapper.critic, wrapper) def getPendingRebase(self, wrapper): rebases = api.log.rebase.fetchAll(wrapper.critic, wrapper, pending=True) if len(rebases) == 1: return rebases[0] else: return None def getIssues(self, wrapper): if self.__issues is None: self.__issues = api.comment.fetchAll( wrapper.critic, review=wrapper, comment_type="issue") return self.__issues def getOpenIssues(self, wrapper): if self.__open_issues is None: self.__open_issues = [issue for issue in self.getIssues(wrapper) if issue.state == "open"] return self.__open_issues def getNotes(self, wrapper): if self.__notes is None: self.__notes = api.comment.fetchAll( wrapper.critic, review=wrapper, comment_type="note") return self.__notes def isReviewableCommit(self, critic, commit): cursor = critic.getDatabaseCursor() cursor.execute( """SELECT 1 FROM reviewchangesets JOIN changesets ON (changesets.id=reviewchangesets.changeset) WHERE reviewchangesets.review=%s AND changesets.child=%s""", (self.id, commit.id)) return bool(cursor.fetchone()) def getTotalProgress(self, critic): if self.__total_progress is None: cursor = critic.getDatabaseCursor() cursor.execute( """SELECT state, sum(inserted+deleted) FROM reviewfiles WHERE review=%s GROUP BY state""", (self.id,)) reviewed = 0 pending = 0 for state, modifications in cursor: if modifications == 0: # binary file change actual_modifications = 1 else: actual_modifications = modifications if state == "reviewed": reviewed = actual_modifications elif state == "pending": pending = actual_modifications total = reviewed + pending if reviewed == 0: self.__total_progress = 0 elif pending == 0: self.__total_progress = 1 else: self.__total_progress = reviewed / float(total) return self.__total_progress def getProgressPerCommit(self, critic): if self.__progress_per_commit is None: cursor = critic.getDatabaseCursor() cursor.execute( """SELECT changesets.child, SUM(deleted + inserted) FROM reviewfiles JOIN changesets ON changesets.id=reviewfiles.changeset WHERE reviewfiles.review=%s GROUP BY changesets.child""", (self.id,)) total_changes_dict = {} for commit_id, changes in cursor: total_changes_dict[commit_id] = changes cursor.execute( """SELECT changesets.child, SUM(deleted + inserted) FROM reviewfiles JOIN changesets ON changesets.id=reviewfiles.changeset WHERE reviewfiles.review=%s AND state='reviewed' GROUP BY changesets.child""", (self.id,)) reviewed_changes_dict = {} for commit_id, changes in cursor: reviewed_changes_dict[commit_id] = changes commit_change_counts = [] for commit_id, total_changes in total_changes_dict.iteritems(): reviewed_changes = reviewed_changes_dict.get(commit_id, 0) commit_change_counts.append(api.review.CommitChangeCount( commit_id, total_changes, reviewed_changes)) self.__progress_per_commit = commit_change_counts return self.__progress_per_commit @classmethod def create(Review, critic, *args): review = Review(*args).wrap(critic) # Access the repository object to trigger an access control check. review.repository return review @Review.cached() def fetch(critic, review_id, branch): cursor = critic.getDatabaseCursor() if review_id is not None: cursor.execute("""SELECT reviews.id, branches.repository, branches.id, state, summary, description FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE reviews.id=%s""", (review_id,)) else: cursor.execute("""SELECT reviews.id, branches.repository, branches.id, state, summary, description FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE branches.id=%s""", (int(branch),)) try: return next(Review.make(critic, cursor)) except StopIteration: if review_id is not None: raise api.review.InvalidReviewId(review_id) else: raise api.review.InvalidReviewBranch(branch) def fetchMany(critic, review_ids): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT reviews.id, branches.repository, branches.id, state, summary, description FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE reviews.id=ANY (%s)""", (review_ids,)) reviews_by_id = {review.id: review for review in Review.make(critic, cursor)} return [reviews_by_id[review_id] for review_id in review_ids] def fetchAll(critic, repository, state): cursor = critic.getDatabaseCursor() conditions = ["TRUE"] values = [] if repository is not None: conditions.append("branches.repository=%s") values.append(repository.id) if state is not None: conditions.append("reviews.state IN (%s)" % ", ".join(["%s"] * len(state))) values.extend(state) cursor.execute("""SELECT reviews.id, branches.repository, branches.id, state, summary, description FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE """ + " AND ".join(conditions) + """ ORDER BY reviews.id""", values) return list(Review.make( critic, cursor, ignored_errors=(auth.AccessDenied,))) ================================================ FILE: src/api/impl/review_unittest.py ================================================ def basic(): import api critic = api.critic.startSession(for_testing=True) review = api.review.fetch(critic, review_id=1) assert isinstance(review, api.review.Review) assert isinstance(review.id, int) assert review.id == 1 assert int(review) == 1 assert api.review.fetch(critic, branch=review.branch) is review assert review.state == "open" assert isinstance(review.summary, basestring) assert review.summary == "Minor /dashboard query optimizations" assert review.description is None assert isinstance(review.repository, api.repository.Repository) assert review.repository.name == "critic" assert isinstance(review.branch, api.branch.Branch) assert review.branch.name == "r/004-createreview" assert review.branch.repository is review.repository assert isinstance(review.owners, frozenset) assert all(isinstance(owner, api.user.User) for owner in review.owners) assert list(review.owners)[0].name == "alice" assert isinstance(review.filters, list) assert all(isinstance(review_filter, api.filters.ReviewFilter) for review_filter in review.filters) assert len(review.filters) == 3 assert isinstance(review.filters[0].subject, api.user.User) assert review.filters[0].subject.name == "bob" assert review.filters[0].type == "reviewer" assert review.filters[0].path == "/" assert isinstance(review.filters[0].id, int) assert review.filters[0].review is review assert isinstance(review.filters[0].creator, api.user.User) assert review.filters[0].creator.name == "alice" assert isinstance(review.filters[1].subject, api.user.User) assert review.filters[1].subject.name == "dave" assert review.filters[1].type == "watcher" assert review.filters[1].path == "/" assert isinstance(review.filters[1].id, int) assert review.filters[1].id != review.filters[0].id assert review.filters[1].review is review assert isinstance(review.filters[1].creator, api.user.User) assert review.filters[1].creator.name == "alice" assert isinstance(review.filters[2].subject, api.user.User) assert review.filters[2].subject.name == "erin" assert review.filters[2].type == "watcher" assert review.filters[2].path == "/" assert isinstance(review.filters[2].id, int) assert review.filters[2].id not in (review.filters[0].id, review.filters[1].id) assert review.filters[2].review is review assert isinstance(review.filters[2].creator, api.user.User) assert review.filters[2].creator.name == "alice" assert isinstance(review.commits, api.commitset.CommitSet) assert all(isinstance(commit, api.commit.Commit) for commit in review.commits) assert len(review.commits) == 2 assert len(review.commits.heads) == 1 assert len(review.commits.tails) == 1 topo_ordered = review.commits.topo_ordered assert topo_ordered.next().summary == "Add missing import" assert topo_ordered.next().summary == "Minor /dashboard query optimizations" assert review.commits == review.branch.commits assert isinstance(review.rebases, list) assert len(review.rebases) == 0 try: api.review.fetch(critic, review_id=10000) except api.review.InvalidReviewId: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" master = api.branch.fetch( critic, repository=review.branch.repository, name="master") try: api.review.fetch(critic, branch=master) except api.review.InvalidReviewBranch: pass except Exception as error: assert False, "wrong exception raised: %s" % error else: assert False, "no exception raised" all_reviews = api.review.fetchAll(critic) assert isinstance(all_reviews, list) assert len(all_reviews) >= 1 assert review in all_reviews critic_reviews = api.review.fetchAll(critic, repository=review.repository) assert isinstance(critic_reviews, list) assert review in critic_reviews open_reviews = api.review.fetchAll(critic, state="open") assert isinstance(open_reviews, list) assert review in open_reviews assert all(review.state == "open" for review in open_reviews) closed_reviews = api.review.fetchAll(critic, state="closed") assert isinstance(closed_reviews, list) assert review not in closed_reviews assert all(review.state == "closed" for review in closed_reviews) dropped_reviews = api.review.fetchAll(critic, state="dropped") assert isinstance(dropped_reviews, list) assert review not in dropped_reviews assert all(review.state == "dropped" for review in dropped_reviews) any_reviews = api.review.fetchAll( critic, state=(state for state in ["open", "closed", "dropped"])) assert any_reviews == all_reviews print "basic: ok" ================================================ FILE: src/api/impl/reviewablefilechange.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject class ReviewableFileChange(apiobject.APIObject): wrapper_class = api.reviewablefilechange.ReviewableFileChange def __init__(self, filechange_id, review_id, changeset_id, file_id, deleted_lines, inserted_lines, reviewed_by_id): self.id = filechange_id self.__review_id = review_id self.__changeset_id = changeset_id self.__file_id = file_id self.inserted_lines = inserted_lines self.deleted_lines = deleted_lines self.is_reviewed = reviewed_by_id is not None self.__reviewed_by_id = reviewed_by_id self.__assigned_reviewers = None self.__draft_changes = None self.__draft_changes_fetched = False def getReview(self, critic): return api.review.fetch(critic, self.__review_id) def getChangeset(self, critic): review = self.getReview(critic) return api.changeset.fetch( critic, review.repository, self.__changeset_id) def getFile(self, critic): return api.file.fetch(critic, self.__file_id) def getReviewedBy(self, critic): if self.__reviewed_by_id is None: return None return api.user.fetch(critic, self.__reviewed_by_id) def getAssignedReviewers(self, critic): if self.__assigned_reviewers is None: cached_objects = ReviewableFileChange.allCached(critic) assert self.id in cached_objects # Filter out those cached objects (including this) whose assigned # reviewers hasn't been fetched yet. need_fetch = set() for filechange in cached_objects.values(): if filechange._impl.__assigned_reviewers is None: filechange._impl.__assigned_reviewers = set() need_fetch.add(filechange.id) assert self.id in need_fetch cursor = critic.getDatabaseCursor() cursor.execute("""SELECT file, uid FROM reviewuserfiles WHERE file=ANY (%s)""", (list(need_fetch),)) for filechange_id, reviewer_id in cursor: filechange = cached_objects[filechange_id] filechange._impl.__assigned_reviewers.add( api.user.fetch(critic, reviewer_id)) for filechange_id in need_fetch: filechange = cached_objects[filechange_id] filechange._impl.__assigned_reviewers = frozenset( filechange._impl.__assigned_reviewers) return self.__assigned_reviewers def getDraftChanges(self, critic): if not self.__draft_changes_fetched: cached_objects = ReviewableFileChange.allCached(critic) assert self.id in cached_objects # Filter out those cached objects (including this) whose draft # changes hasn't been fetched yet. need_fetch = set() for filechange in cached_objects.values(): if not filechange._impl.__draft_changes_fetched: need_fetch.add(filechange.id) assert self.id in need_fetch cursor = critic.getDatabaseCursor() cursor.execute( """SELECT file, from_state='reviewed', to_state='reviewed' FROM reviewfilechanges WHERE uid=%s AND state='draft' AND file=ANY (%s)""", (critic.effective_user.id, list(need_fetch))) draft_changes = { filechange_id: (from_is_reviewed, to_is_reviewed) for filechange_id, from_is_reviewed, to_is_reviewed in cursor } for filechange_id in need_fetch: filechange = cached_objects[filechange_id] filechange._impl.__draft_changes_fetched = True if filechange_id not in draft_changes: # No unpublished changes in the database. continue from_is_reviewed, to_is_reviewed = draft_changes[filechange_id] if filechange.is_reviewed != from_is_reviewed: # The unpublished change has been made redundant, typically # by another user making the same change. continue new_reviewed_by = (critic.actual_user if to_is_reviewed else None) filechange._impl.__draft_changes = \ api.reviewablefilechange.ReviewableFileChange.DraftChanges( critic.actual_user, new_reviewed_by) return self.__draft_changes @staticmethod def refresh(critic, tables, cached_filechanges): if not tables.intersection(("reviewfiles", "reviewfilechanges", "reviewuserfiles")): return ReviewableFileChange.updateAll( critic, """SELECT id, review, changeset, file, deleted, inserted, reviewer FROM reviewfiles WHERE id=ANY (%s)""", cached_filechanges) @ReviewableFileChange.cached( api.reviewablefilechange.InvalidReviewableFileChangeId) def fetch(critic, filechange_id): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, review, changeset, file, deleted, inserted, reviewer FROM reviewfiles WHERE id=%s""", (filechange_id,)) return ReviewableFileChange.make(critic, cursor) @ReviewableFileChange.cachedMany( api.reviewablefilechange.InvalidReviewableFileChangeIds) def fetchMany(critic, filechange_ids): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, review, changeset, file, deleted, inserted, reviewer FROM reviewfiles WHERE id=ANY (%s)""", (filechange_ids,)) return ReviewableFileChange.make(critic, cursor) def fetchAll(critic, review, changeset, file, assignee, is_reviewed): cursor = critic.getDatabaseCursor() tables = ["reviewfiles"] conditions = ["reviewfiles.review=%s"] values = [review.id] if changeset: # Check if the changeset is a "squash" of the changes in multiple # commits. If so, return the reviewable file changes from each of the # commits. contributing_commits = changeset.contributing_commits if contributing_commits is None: raise api.reviewablefilechange.InvalidChangeset(changeset) if len(contributing_commits) > 1: result = [] try: for commit in contributing_commits: # Note: Checking that it is a reviewable commit here is sort # of redundant; we could just call fetchAll() recursively, # and it would raise an InvalidChangeset if it is not. The # problem is that if it is not a reviewable commit, there # may not be a changeset prepared for it, which would make # this asynchronous. As long as we only deal with reviewable # commits, the api.changeset.fetch() call is guaranteed to # succeed synchronously. if not review.isReviewableCommit(commit): raise api.reviewablefilechange.InvalidChangeset( changeset) commit_changeset = api.changeset.fetch( critic, review.repository, single_commit=commit) result.extend(fetchAll(critic, review, commit_changeset, file, assignee, is_reviewed)) except api.reviewablefilechange.InvalidChangeset: raise api.reviewablefilechange.InvalidChangeset(changeset) return sorted(result, key=lambda change: change.id) elif not review.isReviewableCommit(next(iter(contributing_commits))): raise api.reviewablefilechange.InvalidChangeset(changeset) conditions.append("reviewfiles.changeset=%s") values.append(changeset.id) if file: conditions.append("reviewfiles.file=%s") values.append(file.id) if assignee: tables.append("JOIN reviewuserfiles " " ON (reviewuserfiles.file=reviewfiles.id)") conditions.append("reviewuserfiles.uid=%s") values.append(assignee.id) if is_reviewed is not None: if assignee: # If the specified assignee has a draft change to the state, use # that changed state instead of the actual state when filtering. tables.append("LEFT OUTER JOIN reviewfilechanges " " ON (reviewfilechanges.file=reviewfiles.id" " AND reviewfilechanges.uid=reviewuserfiles.uid" " AND reviewfilechanges.state='draft')") conditions.append("COALESCE(reviewfilechanges.to_state," " reviewfiles.state)=%s") else: conditions.append("reviewfiles.state=%s") values.append("reviewed" if is_reviewed else "pending") cursor.execute("""SELECT reviewfiles.id, reviewfiles.review, reviewfiles.changeset, reviewfiles.file, reviewfiles.deleted, reviewfiles.inserted, reviewfiles.reviewer FROM {} WHERE {} ORDER BY id""".format( " ".join(tables), " AND ".join(conditions)), values) return list(ReviewableFileChange.make(critic, cursor)) ================================================ FILE: src/api/impl/reviewsummary.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operator import itemgetter import calendar from datetime import datetime import api import api.impl import apiobject class ReviewSummaryContainer(apiobject.APIObject): wrapper_class = api.reviewsummary.ReviewSummaryContainer def __init__(self, review_summaries, more): self.reviews = review_summaries self.more = more class ReviewSummary(apiobject.APIObject): wrapper_class = api.reviewsummary.ReviewSummary def __init__(self, review, latest_change): self.review = review self.latest_change = latest_change def fetchMany(critic, search_type, user, count, offset): cursor = critic.getDatabaseCursor() if count is None: count = 10 if offset is None: offset = 0 if search_type == "own" or search_type == "other": cursor.execute( """SELECT DISTINCT reviews.id FROM reviews JOIN reviewusers ON (reviewusers.review=reviews.id) WHERE reviews.state='open' AND reviewusers.uid=%s AND reviewusers.owner=%s""", (user.id, search_type=="own")) else: cursor.execute( """SELECT DISTINCT reviews.id FROM reviews WHERE reviews.state='open'""") rows = cursor.fetchall() review_ids = [row[0] for row in rows] cursor.execute( """SELECT reviewchangesets.review, MAX(commits.commit_time) AS latest_change FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=ANY (%s) GROUP BY reviewchangesets.review""", (review_ids,)) latest_commits = {} for review_id, latest_timestamp in cursor: if isinstance(latest_timestamp, str): # sqlite3 returns a string latest_commits[review_id] = calendar.timegm(datetime.strptime( latest_timestamp, "%Y-%m-%d %H:%M:%S").timetuple()) else: latest_commits[review_id] = calendar.timegm(latest_timestamp.timetuple()) cursor.execute( """SELECT commentchains.review, MAX(comments.time) AS latest_change FROM comments JOIN commentchains ON (commentchains.id=comments.chain) WHERE commentchains.review=ANY (%s) AND (comments.state='current' OR comments.state='edited') GROUP BY commentchains.review""", (review_ids,)) latest_comments = {} for review_id, latest_timestamp in cursor: if isinstance(latest_timestamp, datetime): latest_comments[review_id] = calendar.timegm( latest_timestamp.timetuple()) else: latest_comments[review_id] = latest_timestamp latest_changes = [] for review_id in review_ids: latest_change = max(latest_commits.get(review_id), latest_comments.get(review_id)) if latest_change is not None: latest_changes.append((latest_change, review_id)) latest_sorted_changes = sorted( latest_changes, reverse=True)[offset:offset+count] sorted_reviews = [ review_id for _, review_id in latest_sorted_changes ] review_objects = api.review.fetchMany(critic, sorted_reviews) review_summaries = [ ReviewSummary(review, latest_change[0]).wrap(critic) for review, latest_change in zip(review_objects, latest_sorted_changes) ] has_more = len(latest_changes) > len(review_summaries) + offset return ReviewSummaryContainer(review_summaries, has_more).wrap(critic) ================================================ FILE: src/api/impl/user.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import apiobject import dbutils class User(apiobject.APIObject): wrapper_class = api.user.User def __init__(self, user_id, name, fullname, status, email): self.id = user_id self.name = name self.fullname = fullname self.status = status self.email = email # Things that are fetched on demand. self.__internal = None def isAnonymous(self): return self.id is None def getInternal(self, critic): if not self.__internal: if self.isAnonymous(): self.__internal = dbutils.User.makeAnonymous() else: self.__internal = dbutils.User.fromId(critic.database, self.id) return self.__internal def getPrimaryEmails(self, critic): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT useremails.email, useremails.id=users.email, useremails.verified FROM useremails JOIN users ON (users.id=useremails.uid) WHERE useremails.uid=%s ORDER BY useremails.id ASC""", (self.id,)) return [api.user.User.PrimaryEmail(address, bool(selected), dbutils.boolean(verified)) for address, selected, verified in cursor] def getGitEmails(self, critic): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT email FROM usergitemails WHERE uid=%s""", (self.id,)) return set(email for (email,) in cursor) def getRepositoryFilters(self, critic): from api.impl.filters import RepositoryFilter all_repositories = {} filters = {} def processRepository(repository_id): if repository_id not in all_repositories: all_repositories[repository_id] = api.repository.fetch( critic, repository_id=repository_id) return all_repositories[repository_id] cursor = critic.getDatabaseCursor() cursor.execute("""SELECT id, uid, type, path, repository, delegate FROM filters WHERE uid=%s ORDER BY id ASC""", (self.id,)) for repository_filter in RepositoryFilter.make(critic, cursor): filters.setdefault(repository_filter.repository, []).append( repository_filter) return filters def hasRole(self, critic, role): cursor = critic.getDatabaseCursor() cursor.execute("""SELECT uid FROM roles LEFT OUTER JOIN userroles ON (userroles.role=roles.name AND userroles.uid=%s) WHERE name=%s""", (self.id, role)) row = cursor.fetchone() if row: return row[0] is not None raise api.user.InvalidRole(role) def getPreference(self, critic, item, user, repository): cursor = critic.getDatabaseCursor() cursor.execute("SELECT type FROM preferences WHERE item=%s", (item,)) row = cursor.fetchone() if not row: raise api.preference.InvalidPreferenceItem(item) preference_type = row[0] arguments = [item] where = ["item=%s"] if preference_type in ("boolean", "integer"): column = "integer" else: column = "string" if not self.isAnonymous(): arguments.append(user.id) where.append("uid=%s OR uid IS NULL") else: where.append("uid IS NULL") if repository is not None: arguments.append(repository.id) where.append("repository=%s OR repository IS NULL") else: where.append("repository IS NULL") where = " AND ".join("(%s)" % condition for condition in where) query = ("""SELECT %(column)s, uid, repository FROM userpreferences WHERE %(where)s""" % { "column": column, "where": where }) cursor.execute(query, arguments) row = sorted(cursor, key=lambda row: row[1:])[-1] value, user_id, repository_id = row if preference_type == "boolean": value = bool(value) if user_id is None: user = None if repository_id is None: repository = None return api.preference.Preference(item, value, user, repository) @staticmethod def refresh(critic, tables, cached_users): if not tables.intersection(("users", "useremails")): return User.updateAll( critic, """SELECT users.id, name, fullname, status, useremails.email FROM users LEFT OUTER JOIN useremails ON (useremails.id=users.email AND (useremails.verified IS NULL OR useremails.verified)) WHERE users.id=ANY (%s)""", cached_users) @User.cached() def fetch(critic, user_id, name): try: return fetchMany(critic, user_ids=None if user_id is None else [user_id], names=None if name is None else [name])[0] except api.user.InvalidUserIds as error: raise api.user.InvalidUserId(error.values[0]) except api.user.InvalidUserNames as error: raise api.user.InvalidUserName(error.values[0]) def fetchMany(critic, user_ids, names): return_type = list if user_ids is not None: if isinstance(user_ids, set): return_type = set where_column = "users.id" values = [int(user_id) for user_id in user_ids] column_index = 0 else: if isinstance(names, set): return_type = set where_column = "name" values = [str(name) for name in names] column_index = 1 cursor = critic.getDatabaseCursor() cursor.execute("""SELECT users.id, name, fullname, status, useremails.email FROM users LEFT OUTER JOIN useremails ON (useremails.id=users.email AND (useremails.verified IS NULL OR useremails.verified)) WHERE """ + where_column + """=ANY (%s)""", (values,)) rows = cursor.fetchall() if len(rows) < len(values): found = set(row[column_index] for row in rows) if user_ids is not None: exception_type = api.user.InvalidUserIds else: exception_type = api.user.InvalidUserNames values = [value for value in values if value not in found] raise exception_type(values) rows = dict((row[column_index], row) for row in rows) return return_type(User.make(critic, (rows[key] for key in values))) def fetchAll(critic, status): cursor = critic.getDatabaseCursor() if status is None: condition = "" values = () else: condition = " WHERE status IN (%s)" % ", ".join(["%s"] * len(status)) values = tuple(status) cursor.execute("""SELECT users.id, name, fullname, status, useremails.email FROM users LEFT OUTER JOIN useremails ON (useremails.id=users.email AND (useremails.verified IS NULL OR useremails.verified)) """ + condition + """ ORDER BY users.id""", values) return list(User.make(critic, cursor)) def anonymous(critic): return next(User.make(critic, [(None, None, None, "anonymous", None)])) ================================================ FILE: src/api/impl/user_unittest.py ================================================ def basic(arguments): import api critic = api.critic.startSession(for_testing=True) alice = api.user.fetch(critic, name="alice") bob = api.user.fetch(critic, name="bob") carol = api.user.fetch(critic, name="carol") dave = api.user.fetch(critic, name="dave") erin = api.user.fetch(critic, name="erin") felix = api.user.fetch(critic, name="felix") howard = api.user.fetch(critic, name="howard") gina = api.user.fetch(critic, name="gina") iris = api.user.fetch(critic, name="iris") admin = api.user.fetch(critic, name="admin") extra = api.user.fetch(critic, name="extra") all_users = [admin, alice, bob, dave, erin, howard, carol, felix, gina, iris, extra] assert isinstance(alice, api.user.User) assert isinstance(alice.id, int) assert int(alice) == alice.id assert hash(alice) == hash(alice.id) assert alice == alice.id assert alice.id == alice assert alice.name == "alice" assert alice.fullname == "Alice von Testing" assert alice.status == "current" assert alice.email == "alice@example.org" assert alice.is_anonymous is False assert isinstance(alice.primary_emails, list) assert len(alice.primary_emails) == 1 assert isinstance(alice.primary_emails[0], api.user.User.PrimaryEmail) assert alice.primary_emails[0].address == "alice@example.org" assert alice.primary_emails[0].selected is True assert alice.primary_emails[0].verified is None assert isinstance(alice.git_emails, set) if len(alice.git_emails) == 0: assert arguments.unreliable_git_emails else: assert len(alice.git_emails) == 2 assert "alice@example.org" in alice.git_emails assert "common@example.org" in alice.git_emails assert isinstance(alice.repository_filters, dict) assert len(alice.repository_filters) == 1 repository, filters = alice.repository_filters.items()[0] assert isinstance(repository, api.repository.Repository) assert repository.name == "critic" assert len(filters) == 1 assert isinstance(filters[0], api.filters.RepositoryFilter) assert filters[0].subject is alice assert filters[0].type == "reviewer" assert filters[0].path == "028-gitemails/" assert isinstance(filters[0].id, int) assert filters[0].repository is repository assert isinstance(filters[0].delegates, frozenset) assert all(isinstance(delegate, api.user.User) for delegate in filters[0].delegates) assert erin in filters[0].delegates assert not (alice == bob) assert alice != bob try: api.user.fetch(alice, user_id=alice.id) except AssertionError: pass else: assert False try: api.user.fetch(critic) except AssertionError: pass else: assert False try: api.user.fetch(critic, user_id=alice.id, name=alice.name) except AssertionError: pass else: assert False try: api.user.fetch(critic, user_id="foo") except ValueError: pass else: assert False try: api.user.fetch(critic, user_id=4711) except api.user.InvalidUserId as error: assert error.message == "Invalid user id: %r" % 4711 assert error.value == 4711 else: assert False try: api.user.fetch(critic, name="nobody") except api.user.InvalidUserName as error: assert error.message == "Invalid user name: %r" % "nobody" assert error.value == "nobody" else: assert False try: api.user.fetchMany(alice, user_ids=[alice.id]) except AssertionError: pass else: assert False try: api.user.fetchMany(critic, user_ids=[alice.id], names=[alice.name]) except AssertionError: pass else: assert False try: api.user.fetchMany(critic, user_ids=[4711, 4712]) except api.user.InvalidUserIds as error: assert error.message == "Invalid user ids: %r" % [4711, 4712], error.message assert error.values == [4711, 4712], repr(error.values) else: assert False try: api.user.fetchMany(critic, names=["nobody", "anybody"]) except api.user.InvalidUserNames as error: assert error.message == "Invalid user names: %r" % ["nobody", "anybody"], error.message assert error.values == ["nobody", "anybody"], repr(error.values) else: assert False alice_bob_and_dave = api.user.fetchMany( critic, user_ids=[alice.id, bob.id, dave.id]) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=[alice.name, bob.name, dave.name]) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, user_ids=set([alice.id, bob.id, dave.id])) assert isinstance(alice_bob_and_dave, set), type(alice_bob_and_dave) assert alice_bob_and_dave == set([alice, bob, dave]), repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=set([alice.name, bob.name, dave.name])) assert isinstance(alice_bob_and_dave, set), type(alice_bob_and_dave) assert alice_bob_and_dave == set([alice, bob, dave]), repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, user_ids=(user.id for user in [alice, bob, dave])) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=(user.name for user in [alice, bob, dave])) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) users = api.user.fetchAll(critic) assert isinstance(users, list) assert users == sorted(all_users, key=lambda user: user.id) users = api.user.fetchAll(critic, status="current") assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status == "current"], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["current", "absent"]) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("current", "absent")], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["retired", "absent"]) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("retired", "absent")], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["absent"]) assert isinstance(users, list) assert users == [] users = api.user.fetchAll( critic, status=(status for status in ["current", "absent"])) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("current", "absent")], key=lambda user: user.id) assert alice.hasRole("administrator") is False assert alice.hasRole("repositories") is False assert alice.hasRole("newswriter") is False assert alice.hasRole("developer") is False assert admin.hasRole("administrator") is True assert admin.hasRole("repositories") is True if arguments.unreliable_admin_newswriter: assert isinstance(admin.hasRole("newswriter"), bool) else: assert admin.hasRole("newswriter") is True assert admin.hasRole("developer") is True try: alice.hasRole("crazy-cat-lady") except api.user.InvalidRole as error: assert error.message == "Invalid role: %r" % "crazy-cat-lady", error.message assert error.role == "crazy-cat-lady", error.role else: assert False anonymous = api.user.anonymous(critic) assert isinstance(anonymous, api.user.User) assert anonymous.id is None assert anonymous.name is None assert anonymous.fullname is None assert anonymous.is_anonymous is True assert anonymous.email is None assert anonymous.primary_emails == [] assert anonymous.git_emails == set([]) assert anonymous.repository_filters == {} print "basic: ok" def preferences(): import api critic = api.critic.startSession(for_testing=True) alice = api.user.fetch(critic, name="alice") repository = api.repository.fetch(critic, name="critic") compactMode = alice.getPreference("commit.diff.compactMode") assert isinstance(compactMode.value, bool) assert compactMode.item == "commit.diff.compactMode" assert compactMode.value is True assert compactMode.user is None assert compactMode.repository is None rulerColumn = alice.getPreference("commit.diff.rulerColumn") assert isinstance(rulerColumn.value, int) assert rulerColumn.item == "commit.diff.rulerColumn" assert rulerColumn.value == 0 assert rulerColumn.user is None assert rulerColumn.repository is None defaultGroups = alice.getPreference("dashboard.defaultGroups") assert isinstance(defaultGroups.value, str) assert defaultGroups.item == "dashboard.defaultGroups" assert defaultGroups.value == "owned,draft,active,watched" assert defaultGroups.user is None assert defaultGroups.repository is None # Read per-repository, not overridden. compactMode = alice.getPreference("commit.diff.compactMode", repository=repository) assert compactMode.item == "commit.diff.compactMode" assert compactMode.value is True assert compactMode.user is None assert compactMode.repository is None # Read per-user, overridden per user. visualTabs = alice.getPreference("commit.diff.visualTabs") assert visualTabs.value is True assert visualTabs.user is alice assert visualTabs.repository is None # Read per-repository, overridden per user. visualTabs = alice.getPreference("commit.diff.visualTabs", repository=repository) assert visualTabs.value is True assert visualTabs.user is alice assert visualTabs.repository is None # Read per-user, overridden per repository. expandAllFiles = alice.getPreference("commit.expandAllFiles") assert expandAllFiles.value is False assert expandAllFiles.user is None assert expandAllFiles.repository is None # Read per-repository, overridden per repository. expandAllFiles = alice.getPreference("commit.expandAllFiles", repository=repository) assert expandAllFiles.value is True assert expandAllFiles.user is alice assert expandAllFiles.repository is repository print "preferences: ok" def main(argv): import argparse parser = argparse.ArgumentParser() parser.add_argument("--unreliable-git-emails", action="store_true") parser.add_argument("--unreliable-admin-newswriter", action="store_true") parser.add_argument("tests", nargs=argparse.REMAINDER) arguments = parser.parse_args(argv) for test in arguments.tests: if test == "basic": basic(arguments) elif test == "preferences": preferences() ================================================ FILE: src/api/labeledaccesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class LabeledAccessControlProfileError(api.APIError): """Base exception for all errors related to the LabeledAccessControlProfile class""" pass class InvalidAccessControlProfileLabels(LabeledAccessControlProfileError): """Raised when an invalid label set is used""" def __init__(self, value): """Constructor""" super(InvalidAccessControlProfileLabels, self).__init__( "Invalid labels: %s" % "|".join(value)) self.value = value class LabeledAccessControlProfile(api.APIObject): """Representation of a labeled access control profile selector""" RULE_VALUES = frozenset(["allow", "deny"]) def __str__(self): return "|".join(self.labels) def __hash__(self): return hash(str(self)) def __eq__(self, other): return str(self) == str(other) @property def labels(self): """The labels for which the access control profile is selected""" return self._impl.labels @property def profile(self): """The access control profile that is selected""" return self._impl.getAccessControlProfile(self.critic) def fetch(critic, labels): """Fetch an LabeledAccessControlProfile object for the given labels""" import api.impl assert isinstance(critic, api.critic.Critic) labels = tuple(sorted(str(label) for label in labels)) return api.impl.labeledaccesscontrolprofile.fetch(critic, labels) def fetchAll(critic, profile=None): """Fetch LabeledAccessControlProfile objects for all labeled profiles selectors in the system""" import api.impl assert isinstance(critic, api.critic.Critic) assert profile is None \ or isinstance(profile, api.accesscontrolprofile.AccessControlProfile) return api.impl.labeledaccesscontrolprofile.fetchAll(critic, profile) ================================================ FILE: src/api/log/__init__.py ================================================ import rebase import partition ================================================ FILE: src/api/log/partition.py ================================================ import api class PartitionError(api.APIError): """Raised for incompatible commits/rebases arguments to create()""" pass class Partition(api.APIObject): """Representation of a part of a disjoint commit log The history of a branch (such as a review branch) that has potentially been rebased one or more times during its existence, is represented as a linked list of "partitions" where each partition represents a connected set of commits and each "link" or "edge" between represents the branch being rebased.""" class Edge(object): """The edge (in one direction) between two partitions""" def __init__(self, rebase, partition): self.__rebase = rebase self.__partition = partition @property def rebase(self): """The rebase between the partitions""" return self.__rebase @property def partition(self): """The other partition""" return self.__partition @property def preceding(self): """The edge leading to the preceding (newer) partition""" return self._impl.preceding @property def following(self): """The edge leading to the following (older) partition""" return self._impl.following @property def commits(self): """The set of commits in the partition The return value is an api.commitset.CommitSet object.""" return self._impl.commits def create(critic, commits, rebases=[]): """Divide a set of commits into partitions and return the first""" import api.impl assert isinstance(critic, api.critic.Critic) if not isinstance(commits, api.commitset.CommitSet): commits = list(commits) assert all(isinstance(commit, api.commit.Commit) for commit in commits) rebases = list(rebases) assert all(isinstance(rebase, api.log.rebase.Rebase) for rebase in rebases) return api.impl.log.partition.create(critic, commits, rebases) ================================================ FILE: src/api/log/rebase.py ================================================ import api class RebaseError(api.APIError): """Base exception for all errors related to the Rebase class""" pass class InvalidRebaseId(RebaseError): """Raised when an invalid rebase id is used""" def __init__(self, value): """Constructor""" super(InvalidRebaseId, self).__init__("Invalid rebase id: %r" % value) self.value = value class Rebase(api.APIObject): """Representation of a rebase of a review branch""" @property def id(self): return self._impl.id @property def review(self): return self._impl.getReview(self.critic) @property def old_head(self): return self._impl.getOldHead(self.critic) @property def new_head(self): return self._impl.getNewHead(self.critic) @property def creator(self): return self._impl.getCreator(self.critic) class HistoryRewrite(Rebase): """Representation of a history rewrite rebase The review branch after a history rewrite rebase is always based on the same upstream commit as before it and makes the exact same changes relative it, but contains a different set of actual commits.""" pass class MoveRebase(Rebase): """Representation of a "move" rebase A move rebase moves the changes in the review onto a different upstream commit.""" @property def old_upstream(self): return self._impl.getOldUpstream(self.critic) @property def new_upstream(self): return self._impl.getNewUpstream(self.critic) @property def equivalent_merge(self): return self._impl.getEquivalentMerge(self.critic) @property def replayed_rebase(self): return self._impl.getReplayedRebase(self.critic) def fetch(critic, rebase_id): """Fetch a Rebase object with the given id""" import api.impl assert isinstance(critic, api.critic.Critic) return api.impl.log.rebase.fetch(critic, rebase_id) def fetchAll(critic, review=None, pending=False): """Fetch Rebase objects for all rebases If a review is provided, restrict the return value to rebases of the specified review. If pending is True, fetch only pending rebases, otherwise fetch only performed (completed) rebases.""" import api.impl assert isinstance(critic, api.critic.Critic) assert review is None or isinstance(review, api.review.Review) assert isinstance(pending, bool) return api.impl.log.rebase.fetchAll(critic, review, pending) ================================================ FILE: src/api/preference.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class InvalidPreferenceItem(api.APIError): """Raised if an invalid preference item is used.""" def __init__(self, item): """Constructor""" super(InvalidPreferenceItem, self).__init__( "Invalid preference item: %r" % item) class Preference(object): def __init__(self, item, value, user, repository): self.__item = item self.__value = value self.__user = user self.__repository = repository def __bool__(self): return bool(self.__value) def __int__(self): return self.__value def __str__(self): return self.__value @property def item(self): return self.__item @property def value(self): return self.__value @property def user(self): return self.__user @property def repository(self): return self.__repository ================================================ FILE: src/api/reply.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ReplyError(api.APIError): pass class InvalidReplyId(ReplyError): """Raised when an invalid reply id is used.""" def __init__(self, reply_id): """Constructor""" super(InvalidReplyId, self).__init__( "Invalid reply id: %d" % reply_id) self.reply_id = reply_id class InvalidReplyIds(ReplyError): """Raised by fetchMany() when invalid reply ids are used.""" def __init__(self, reply_ids): """Constructor""" super(InvalidReplyIds, self).__init__( "Invalid reply ids: %s" % ", ".join(map(str, reply_ids))) self.reply_ids = reply_ids class Reply(api.APIObject): @property def id(self): """The reply's unique id""" return self._impl.id @property def is_draft(self): """True if the reply is not yet published Unpublished replies are not displayed to other users.""" return self._impl.is_draft @property def comment(self): """The comment this reply is a reply to The comment is returned as an api.comment.Comment object.""" return self._impl.getComment(self.critic) @property def author(self): """The reply's author The author is returned as an api.user.User object.""" return self._impl.getAuthor(self.critic) @property def timestamp(self): """The reply's timestamp The return value is a datetime.datetime object.""" return self._impl.timestamp @property def text(self): """The reply's text""" return self._impl.text def fetch(critic, reply_id): """Fetch the Reply object with the given id""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(reply_id, int) return api.impl.reply.fetch(critic, reply_id) def fetchMany(critic, reply_ids): """Fetch multiple Reply objects with the given ids""" import api.impl assert isinstance(critic, api.critic.Critic) reply_ids = list(reply_ids) assert all(isinstance(reply_id, int) for reply_id in reply_ids) return api.impl.reply.fetchMany(critic, reply_ids) ================================================ FILE: src/api/repository.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class RepositoryError(api.APIError): """Base exception for all errors related to the Repository class""" pass class InvalidRepositoryId(RepositoryError): """Raised when an invalid repository id is used""" def __init__(self, repository_id): """Constructor""" super(InvalidRepositoryId, self).__init__( "Invalid repository id: %r" % repository_id) class InvalidRepositoryName(RepositoryError): """Raised when an invalid repository name is used""" def __init__(self, name): """Constructor""" super(InvalidRepositoryName, self).__init__( "Invalid repository name: %r" % name) class InvalidRepositoryPath(RepositoryError): """Raised when an invalid repository path is used""" def __init__(self, path): """Constructor""" super(InvalidRepositoryPath, self).__init__( "Invalid repository path: %r" % path) class InvalidRef(RepositoryError): """Raised by Repository.resolveRef() for invalid refs""" def __init__(self, ref): """Constructor""" super(InvalidRef, self).__init__("Invalid ref: %r" % ref) self.ref = ref class GitCommandError(RepositoryError): """Raised by Repository methods when 'git' fails unexpectedly""" def __init__(self, argv, returncode, stdout, stderr): self.argv = argv self.returncode = returncode self.stdout = stdout self.stderr = stderr class Repository(api.APIObject): """Representation of one of Critic's repositories""" @property def id(self): """The repository's unique id""" return self._impl.id @property def name(self): """The repository's short name""" return self._impl.name @property def path(self): """The repository's (absolute) file-system path""" return self._impl.path @property def relative_path(self): """The repository's (relative) file-system path The path is relative the directory in which all repositories are stored on the system (`configuration.paths.GIT_DIR`).""" return self._impl.relative_path @property def url(self): """The repository's URL The URL type depends on the effective user's 'repository.urlType' setting.""" return self._impl.getURL(self.critic) def resolveRef(self, ref, expect=None, short=False): """Resolve the given ref to a SHA-1 using 'git rev-parse' If 'expect' is not None, it should be a string containing a Git object type, such as "commit", "tag", "tree" or "blob". When given, it is passed on to 'git rev-parse' using the "^{}" syntax. If 'short' is True, 'git rev-parse' is given the '--short' argument, which causes it to return a shortened SHA-1. If 'short' is an int, it is given as the argument value: '--short=N'. If the ref can't be resolved, an InvalidRef exception is raised.""" assert expect is None or expect in ("blob", "commit", "tag", "tree") return self._impl.resolveRef(str(ref), expect, short) def listCommits(self, include=None, exclude=None, args=None, paths=None): """List commits using 'git rev-list' Call 'git rev-list' to list commits reachable from the commits in 'include' but not reachable from the commits in 'exclude'. Extra arguments to 'git rev-list' can be added through 'args' or 'paths'. The return value is a list of api.commit.Commit objects.""" if include is None: include = [] elif isinstance(include, api.commit.Commit): include = [include] else: include = list(include) if exclude is None: exclude = [] elif isinstance(exclude, api.commit.Commit): exclude = [exclude] else: exclude = list(exclude) args = [] if args is None else list(args) paths = [] if paths is None else list(paths) assert all(isinstance(commit, api.commit.Commit) for commit in include) assert all(isinstance(commit, api.commit.Commit) for commit in exclude) assert all(isinstance(arg, basestring) for arg in args) assert all(isinstance(path, basestring) for path in paths) return self._impl.listCommits(self, include, exclude, args, paths) def fetch(critic, repository_id=None, name=None, path=None): """Fetch a Repository object with the given id, name or path""" import api.impl assert isinstance(critic, api.critic.Critic) assert sum((repository_id is None, name is None, path is None)) == 2 return api.impl.repository.fetch(critic, repository_id, name, path) def fetchAll(critic): """Fetch Repository objects for all repositories The return value is a list ordered by the repositories' names.""" import api.impl assert isinstance(critic, api.critic.Critic) return api.impl.repository.fetchAll(critic) def fetchHighlighted(critic, user): """Fetch Repository objects for repositories that are extra relevant The return value is a list ordered by the repositories' names.""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(user, api.user.User) return api.impl.repository.fetchHighlighted(critic, user) ================================================ FILE: src/api/review.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ReviewError(api.APIError): """Base exception for all errors related to the Review class.""" pass class InvalidReviewId(ReviewError): """Raised when an invalid review id is used.""" def __init__(self, review_id): """Constructor""" super(InvalidReviewId, self).__init__( "Invalid review id: %d" % review_id) class InvalidReviewBranch(ReviewError): """Raised when an invalid review branch is used.""" def __init__(self, branch): """Constructor""" super(InvalidReviewBranch, self).__init__( "Invalid review branch: %r" % str(branch)) class Review(api.APIObject): """Representation of a Critic review""" STATE_VALUES = frozenset(["open", "closed", "dropped"]) @property def id(self): """The review's unique id""" return self._impl.id @property def state(self): """The review's state""" return self._impl.state @property def summary(self): """The review's summary""" return self._impl.summary @property def description(self): """The review's description, or None""" return self._impl.description @property def repository(self): """The review's repository The repository is returned as an api.repository.Repository object.""" return self._impl.getRepository(self.critic) @property def branch(self): """The review's branch The branch is returned as an api.branch.Branch object.""" return self._impl.getBranch(self.critic) @property def owners(self): """The review's owners The owners are returned as a set of api.user.User objects.""" return self._impl.getOwners(self.critic) @property def assigned_reviewers(self): """The review's assigned reviewers The reviewers are returned as a set of api.user.User objects. Assigned reviewers are users that have been (manually or automatically) assigned as such. An assigned reviewer may or may not also be an active reviewer (a reviewer that has reviewed changes).""" return self._impl.getAssignedReviewers(self.critic) @property def active_reviewers(self): """The review's active reviewers The reviewers are returned as a set of api.user.User objects. Active reviewers are users that have reviewed changes. An active reviewer may or may not also be an assigned reviewer (see above).""" return self._impl.getActiveReviewers(self.critic) @property def watchers(self): """The review's watchers The watchers are returned as a set of api.user.User objects. A user is a watcher if he/she is on the list of users that receive emails about the review, and is neither an owner nor a reviewer.""" return self._impl.getWatchers(self.critic) @property def filters(self): """The review's local filters The filters are returned as a list of api.filters.ReviewFilter objects.""" return self._impl.getFilters(self.critic) @property def commits(self): """The set of commits that are part of the review Note: This set never changes when the review branch is rebased, and commits are never removed from it. For the set of commits that are actually reachable from the review branch, consult the 'commits' attribute on the api.branch.Branch object that is returned by the 'branch' attribute.""" return self._impl.getCommits(self.critic) @property def rebases(self): """The rebases of the review branch The rebases are returned as a list of api.log.rebase.Rebase objects, ordered chronologically with the most recent rebase first.""" return self._impl.getRebases(self) @property def pending_rebase(self): """The pending rebase of the review branch The rebase, if it exists, is returned as an api.log.rebase.Rebase object. If there isn't a pending rebase, this will be None.""" return self._impl.getPendingRebase(self) @property def issues(self): """The issues in the review The issues are returned as a list of api.comment.Issue objects.""" return self._impl.getIssues(self) @property def open_issues(self): """The open issues in the review The issues are returned as a list of api.comment.Issue objects.""" return self._impl.getOpenIssues(self) @property def notes(self): """The notes in the review The notes are returned as a list of api.comment.Note objects.""" return self._impl.getNotes(self) @property def first_partition(self): return api.log.partition.create( self.critic, self.commits, self.rebases) def isReviewableCommit(self, commit): """Return true if the commit is a primary commit in this review A primary commit is one that is included in one of the log partitions, and not just part of the "actual log" after a rebase of the review branch.""" assert isinstance(commit, api.commit.Commit) return self._impl.isReviewableCommit(self.critic, commit) @property def total_progress(self): """Total progress made on a review Total progress is expressed as a number between 0 and 1, 1 being fully reviewed and 0 being fully pending.""" return self._impl.getTotalProgress(self.critic) @property def progress_per_commit(self): """Progress made on a review, grouped by commit Returned as a list of CommitChangeCount, where each has the number of total changed lines, and the number of reviewed changed lines""" return self._impl.getProgressPerCommit(self.critic) class CommitChangeCount: def __init__(self, commit_id, total_changes, reviewed_changes): self.commit_id = commit_id self.total_changes = total_changes self.reviewed_changes = reviewed_changes def fetch(critic, review_id=None, branch=None): """Fetch a Review object with the given id or branch""" import api.impl assert isinstance(critic, api.critic.Critic) assert (review_id is None) != (branch is None) assert branch is None or isinstance(branch, api.branch.Branch) return api.impl.review.fetch(critic, review_id, branch) def fetchMany(critic, review_ids): """Fetch many Review objects with the given ids, and return them in the same order""" import api.impl assert isinstance(critic, api.critic.Critic) review_ids = list(review_ids) return api.impl.review.fetchMany(critic, review_ids) def fetchAll(critic, repository=None, state=None): """Fetch all Review objects in repository with the given state""" import api.impl assert isinstance(critic, api.critic.Critic) assert (repository is None or isinstance(repository, api.repository.Repository)) if state is not None: if isinstance(state, basestring): state = set([state]) else: state = set(state) assert not (state - Review.STATE_VALUES) return api.impl.review.fetchAll(critic, repository, state) ================================================ FILE: src/api/reviewablefilechange.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ReviewableFileChangeError(api.APIError): pass class InvalidReviewableFileChangeId(ReviewableFileChangeError): """Raised when an invalid reviewable file change id is used""" def __init__(self, filechange_id): super(InvalidReviewableFileChangeId, self).__init__( "Invalid reviewable file change id: %d" % filechange_id) self.filechange_id = filechange_id class InvalidReviewableFileChangeIds(ReviewableFileChangeError): """Raised when invalid reviewable file change ids are used""" def __init__(self, filechange_ids): super(InvalidReviewableFileChangeIds, self).__init__( "Invalid reviewable file change ids: %s" % ", ".join(map(str, filechange_ids))) self.filechange_ids = filechange_ids class InvalidChangeset(ReviewableFileChangeError): """Raised when fetchAll() is called with an invalid changeset""" def __init__(self, changeset): super(InvalidChangeset, self).__init__( "Changeset has no reviewable changes: %d" % changeset.id) self.changeset = changeset class ReviewableFileChange(api.APIObject): """Representation of changes to a file, to be reviewed""" @property def id(self): return self._impl.id @property def review(self): return self._impl.getReview(self.critic) @property def changeset(self): """The changeset that the change is part of The changeset is returned as an api.changeset.Changeset object. Note that this changeset is always of a single commit, and that this commit will be included in a partition in the review (meaning it will not be part of a rebased version of the review branch.)""" return self._impl.getChangeset(self.critic) @property def file(self): """The file that was changed The file is returned as an api.file.File object.""" return self._impl.getFile(self.critic) @property def deleted_lines(self): """Number of deleted or modified lines In other words, number of lines in the old version of the file that are not present in the new version of the file.""" return self._impl.deleted_lines @property def inserted_lines(self): """Number of modified or inserted lines In other words, number of lines in the new version of the file that were not present in the old version of the file.""" return self._impl.inserted_lines @property def is_reviewed(self): """True if the file change has been marked as reviewed.""" return self._impl.is_reviewed @property def reviewed_by(self): """The user that reviewed the changes The user is returned as a api.user.User object, or None if the change has not been reviewed yet.""" return self._impl.getReviewedBy(self.critic) @property def assigned_reviewers(self): """The users that are assigned to review the changes The reviewers are returned as a set of api.user.User objects.""" return self._impl.getAssignedReviewers(self.critic) class DraftChanges(object): """Draft changes to file change state""" def __init__(self, author, new_reviewed_by): self.__author = author self.__new_is_reviewed = new_reviewed_by is not None self.__new_reviewed_by = new_reviewed_by @property def author(self): """The author of these draft changes The author is returned as an api.user.User object.""" return self.__author @property def new_is_reviewed(self): """New value for the |is_reviewed| attribute""" return self.__new_is_reviewed @property def new_reviewed_by(self): """New value for the |reviewed_by| attribute""" return self.__new_reviewed_by @property def draft_changes(self): """The file change's current draft changes The draft changes are returned as a ReviewableFileChange.DraftChanges object, or None if the current user has no unpublished changes to this file change.""" return self._impl.getDraftChanges(self.critic) def fetch(critic, filechange_id): """Fetch a single reviewable file change by its unique id""" assert isinstance(critic, api.critic.Critic) assert isinstance(filechange_id, int) return api.impl.reviewablefilechange.fetch(critic, filechange_id) def fetchMany(critic, filechange_ids): """Fetch multiple reviewable file change by their unique ids""" assert isinstance(critic, api.critic.Critic) filechange_ids = list(filechange_ids) assert all(isinstance(filechange_id, int) for filechange_id in filechange_ids) return api.impl.reviewablefilechange.fetchMany(critic, filechange_ids) def fetchAll(critic, review, changeset=None, file=None, assignee=None, is_reviewed=None): """Fetch all reviewable file changes in a review If a |changeset| is specified, fetch only file changes that are part of that changeset. If a |file| is specified, fetch only file changes in that file. If a |assignee| is specified, fetch only file changes that the specified user is assigned to review. If |is_reviewed| is specified (not |None|), fetch only file changes that are marked as reviewed (when |is_reviewed==True|) or not.""" import api.impl assert isinstance(critic, api.critic.Critic) assert isinstance(review, api.review.Review) assert changeset is None or isinstance(changeset, api.changeset.Changeset) assert file is None or isinstance(file, api.file.File) assert assignee is None or isinstance(assignee, api.user.User) assert is_reviewed is None or isinstance(is_reviewed, bool) return api.impl.reviewablefilechange.fetchAll( critic, review, changeset, file, assignee, is_reviewed) ================================================ FILE: src/api/reviewsummary.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from types import NoneType import api class ReviewSummaryError(api.APIError): pass class ReviewSummaryContainer(api.APIObject): """Container object for review summaries""" @property def reviews(self): return self._impl.reviews @property def more(self): return self._impl.more class ReviewSummary(api.APIObject): """Representation of a review summary""" TYPE_VALUES = frozenset(["all", "own", "other"]) @property def review(self): return self._impl.review @property def latest_change(self): return self._impl.latest_change def fetchMany(critic, search_type, user, count, offset): """Fetch the dashboard for user""" import api.impl assert search_type is not None if user is None: assert search_type == "all" assert isinstance(search_type, str) assert isinstance(user, api.user.User) or user is None assert search_type in ReviewSummary.TYPE_VALUES assert isinstance(count, int) or isinstance(count, NoneType) assert isinstance(offset, int) or isinstance(count, NoneType) if count is not None: assert count > 0 if offset is not None: assert offset >= 0 return api.impl.reviewsummary.fetchMany(critic, search_type, user, count, offset) ================================================ FILE: src/api/transaction/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class Transaction(object): def __init__(self, critic): self.critic = critic self.tables = set() self.items = Queries() self.callbacks = [] def modifyUser(self, subject): from user import ModifyUser assert isinstance(subject, api.user.User) api.PermissionDenied.raiseUnlessUser(self.critic, subject) return ModifyUser(self, subject) def modifyAccessToken(self, access_token): from accesstoken import ModifyAccessToken, CreatedAccessToken assert isinstance(access_token, (api.accesstoken.AccessToken, CreatedAccessToken)) api.PermissionDenied.raiseUnlessAdministrator(self.critic) return ModifyAccessToken(self, access_token) def createAccessControlProfile(self, callback=None): from accesscontrolprofile import ModifyAccessControlProfile api.PermissionDenied.raiseUnlessAdministrator(self.critic) return ModifyAccessControlProfile.create(self, callback) def modifyAccessControlProfile(self, profile): from accesscontrolprofile import ModifyAccessControlProfile assert isinstance( profile, api.accesscontrolprofile.AccessControlProfile) api.PermissionDenied.raiseUnlessAdministrator(self.critic) return ModifyAccessControlProfile(self, profile) def createLabeledAccessControlProfile(self, labels, profile, callback=None): from labeledaccesscontrolprofile \ import ModifyLabeledAccessControlProfile api.PermissionDenied.raiseUnlessAdministrator(self.critic) assert isinstance( profile, api.accesscontrolprofile.AccessControlProfile) return ModifyLabeledAccessControlProfile.create( self, labels, profile, callback) def modifyLabeledAccessControlProfile(self, labeled_profile): from labeledaccesscontrolprofile \ import ModifyLabeledAccessControlProfile api.PermissionDenied.raiseUnlessAdministrator(self.critic) assert isinstance( labeled_profile, api.labeledaccesscontrolprofile.LabeledAccessControlProfile) return ModifyLabeledAccessControlProfile(self, labeled_profile) def modifyReview(self, review): from review import ModifyReview assert isinstance(review, api.review.Review) return ModifyReview(self, review) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None and exc_val is None and exc_tb is None: self.__commit() return False def __commit(self): if not self.items: return try: with self.critic.getUpdatingDatabaseCursor(*self.tables) as cursor: for item in self.items: item(self.critic, cursor) for callback in self.callbacks: callback() finally: self.critic._impl.transactionEnded(self.critic, self.tables) class Query(object): def __init__(self, statement, *values, **kwargs): self.statement = statement self.__values = list(values) self.collector = kwargs.get("collector") def merge(self, query): if self.statement == query.statement \ and not self.collector \ and not query.collector: self.__values.extend(query.__values) return True return False @property def values(self): def evaluate(value): if isinstance(value, LazyValue): return value.evaluate() elif isinstance(value, (set, list, tuple)): return [evaluate(element) for element in value] else: return value for values in self.__values: yield evaluate(values) def __call__(self, critic, cursor): if self.collector: for values in self.values: cursor.execute(self.statement, values) for row in cursor: self.collector(*row) else: cursor.executemany(self.statement, self.values) class Queries(list): def append(self, query): if self and self[-1].merge(query): return super(Queries, self).append(query) def extend(self, queries): raise Exception("Append queries one at a time!") class LazyValue(object): def evaluate(self): raise Exception("LazyValue.evaluate() must be implemented!") class LazyInt(LazyValue): def __init__(self, source): self.source = source def evaluate(self): return self.source() class LazyStr(LazyValue): def __init__(self, source): self.source = source def evaluate(self): return self.source() class LazyObject(LazyValue): def __init__(self, callback=None): self.object_id = None self.callback = callback def __call__(self, object_id): self.object_id = object_id if self.callback: self.callback(self) @property def id(self): return LazyInt(self.evaluate) def evaluate(self): assert self.object_id is not None return self.object_id class LazyAPIObject(LazyObject): def __init__(self, critic, fetch, callback=None): super(LazyAPIObject, self).__init__( callback=self.callback_wrapper if callback else None) self.critic = critic self.__fetch = fetch self.__callback = callback def fetch(self): return self.__fetch(self.critic, self.evaluate()) @staticmethod def callback_wrapper(self): self.__callback(self.fetch()) ================================================ FILE: src/api/transaction/accesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class InvalidRuleValue(api.TransactionError): pass class InvalidRequestMethod(api.TransactionError): pass class InvalidPathPattern(api.TransactionError): pass class InvalidRepositoryAccessType(api.TransactionError): pass class InvalidExtensionAccessType(api.TransactionError): pass class ModifyExceptions(object): def __init__(self, transaction, profile): self.transaction = transaction self.profile = profile def __addTable(self): self.transaction.tables.add("accesscontrol_" + self.table_name) def delete(self, exception_id): self.__addTable() self.transaction.items.append( api.transaction.Query( """DELETE FROM accesscontrol_{} WHERE id=%s AND profile=%s""".format(self.table_name), (exception_id, self.profile.id))) def deleteAll(self): self.__addTable() self.transaction.items.append( api.transaction.Query( """DELETE FROM accesscontrol_{} WHERE profile=%s""".format(self.table_name), (self.profile.id,))) def add(self, *values): self.__addTable() self.transaction.items.append( api.transaction.Query( """INSERT INTO accesscontrol_{} (profile, {}) VALUES (%s, {})""".format( self.table_name, ", ".join(self.column_names), ", ".join(["%s"] * len(self.column_names))), (self.profile.id,) + values)) class ModifyHTTPExceptions(ModifyExceptions): table_name = "http" column_names = ("request_method", "path_pattern") def add(self, request_method, path_pattern): REQUEST_METHODS = api.accesscontrolprofile \ .AccessControlProfile.HTTPException.REQUEST_METHODS if request_method is not None: if request_method not in REQUEST_METHODS: raise InvalidRequestMethod(request_method) if path_pattern is not None: import re try: re.compile(path_pattern) except re.error as error: raise InvalidPathPattern( "%r: %s" % (path_pattern, error.message)) super(ModifyHTTPExceptions, self).add(request_method, path_pattern) class ModifyRepositoriesExceptions(ModifyExceptions): table_name = "repositories" column_names = ("access_type", "repository") def add(self, access_type, repository): assert (repository is None or isinstance(repository, api.repository.Repository)) if access_type not in (None, "read", "modify"): raise InvalidRepositoryAccessType(access_type) repository_id = repository.id if repository else None super(ModifyRepositoriesExceptions, self).add( access_type, repository_id) class ModifyExtensionsExceptions(ModifyExceptions): table_name = "extensions" column_names = ("access_type", "extension_key") def add(self, access_type, extension): assert (extension is None or isinstance(extension, api.extension.Extension)) if access_type not in (None, "install", "execute"): raise InvalidExtensionAccessType(access_type) extension_key = extension.key if extension else None super(ModifyExtensionsExceptions, self).add( access_type, extension_key) class ModifyAccessControlProfile(object): def __init__(self, transaction, profile): self.transaction = transaction self.profile = profile def setTitle(self, value): self.transaction.tables.add("accesscontrolprofiles") self.transaction.items.append( api.transaction.Query( """UPDATE accesscontrolprofiles SET title=%s WHERE id=%s""", (value, self.profile.id))) def setRule(self, category, value): assert category in ("http", "repositories", "extensions") if value not in ("allow", "deny"): raise InvalidRuleValue(value) self.transaction.tables.add("accesscontrolprofiles") self.transaction.items.append( api.transaction.Query( """UPDATE accesscontrolprofiles SET {}=%s WHERE id=%s""".format(category), (value, self.profile.id))) def modifyExceptions(self, category): assert category in ("http", "repositories", "extensions") if category == "http": return ModifyHTTPExceptions(self.transaction, self.profile) if category == "repositories": return ModifyRepositoriesExceptions(self.transaction, self.profile) # category == "extensions" return ModifyExtensionsExceptions(self.transaction, self.profile) def delete(self): self.transaction.tables.add("accesscontrolprofiles") self.transaction.items.append( api.transaction.Query( """DELETE FROM accesscontrolprofiles WHERE id=%s""", (self.profile.id,))) @staticmethod def create(transaction, callback=None): critic = transaction.critic profile = CreatedAccessControlProfile(critic, None, callback) transaction.tables.add("accesscontrolprofiles") transaction.items.append( api.transaction.Query( """INSERT INTO accesscontrolprofiles DEFAULT VALUES RETURNING id""", (), collector=profile)) return ModifyAccessControlProfile(transaction, profile) class CreatedAccessControlProfile(api.transaction.LazyAPIObject): def __init__(self, critic, access_token, callback=None): super(CreatedAccessControlProfile, self).__init__( critic, api.accesscontrolprofile.fetch, callback) self.access_token = access_token ================================================ FILE: src/api/transaction/accesstoken.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ModifyAccessToken(object): def __init__(self, transaction, access_token): self.transaction = transaction self.access_token = access_token def setTitle(self, value): self.transaction.tables.add("accesstokens") self.transaction.items.append( api.transaction.Query( """UPDATE accesstokens SET title=%s WHERE id=%s""", (value, self.access_token.id))) def delete(self): self.transaction.tables.add("accesstokens") self.transaction.items.append( api.transaction.Query( """DELETE FROM accesstokens WHERE id=%s""", (self.access_token.id,))) def modifyProfile(self): from accesscontrolprofile import ModifyAccessControlProfile assert self.access_token.profile return ModifyAccessControlProfile( self.transaction, self.access_token.profile) class CreatedAccessToken(api.transaction.LazyAPIObject): def __init__(self, critic, user, callback=None): from accesscontrolprofile import CreatedAccessControlProfile super(CreatedAccessToken, self).__init__( critic, api.accesstoken.fetch, callback) self.user = user self.profile = CreatedAccessControlProfile(critic, self) ================================================ FILE: src/api/transaction/comment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ModifyComment(object): def __init__(self, transaction, comment): self.transaction = transaction self.comment = comment def __raiseUnlessDraft(self, action): if not self.comment.is_draft: raise api.comment.CommentError( "Published comments cannot be " + action) def setText(self, text): self.__raiseUnlessDraft("edited") self.transaction.tables.add("comments") self.transaction.items.append( api.transaction.Query( """UPDATE comments SET comment=%s WHERE id in (SELECT first_comment FROM commentchains WHERE id=%s)""", (text, self.comment.id))) def addReply(self, author, text, callback=None): assert isinstance(author, api.user.User) assert isinstance(text, str) if self.comment.is_draft: raise api.comment.CommentError( "Draft comments cannot be replied to") if self.comment.draft_changes and self.comment.draft_changes.reply: raise api.comment.CommentError( "Comment already has a draft reply") critic = self.transaction.critic # Users are not (generally) allowed to create comments as other users. api.PermissionDenied.raiseUnlessUser(critic, author) reply = CreatedReply(critic, self.comment, callback) self.transaction.tables.add("comments") self.transaction.items.append( api.transaction.Query( """INSERT INTO comments (chain, uid, state, comment) VALUES (%s, %s, 'draft', %s) RETURNING id""", (self.comment.id, author.id, text), collector=reply)) return reply def modifyReply(self, reply): from reply import ModifyReply assert isinstance(reply, api.reply.Reply) assert reply.comment == self.comment api.PermissionDenied.raiseUnlessUser(self.transaction.critic, reply.author) return ModifyReply(self.transaction, reply) def resolveIssue(self): critic = self.transaction.critic if isinstance(self.comment, api.comment.Note): raise api.comment.CommentError( "Only issues can be resolved") if self.comment.is_draft: raise api.comment.CommentError( "Unpublished issues cannot be resolved") self.transaction.tables.add("commentchainchanges") if self.comment.draft_changes: if ((self.comment.draft_changes.new_state and self.comment.draft_changes.new_state != "open") or self.comment.draft_changes.new_type): raise api.comment.CommentError( "Issue has unpublished conflicting modifications") if self.comment.draft_changes.new_state == "open": self.transaction.items.append( api.transaction.Query( """DELETE FROM commentchainchanges WHERE uid=%s AND chain=%s AND to_state='open'""", (critic.actual_user.id, self.comment.id))) return if self.comment.state != "open": raise api.comment.CommentError( "Only open issues can be resolved") self.transaction.items.append( api.transaction.Query( """INSERT INTO commentchainchanges (uid, chain, from_state, to_state) VALUES (%s, %s, %s, %s)""", (critic.actual_user.id, self.comment.id, "open", "closed"))) def reopenIssue(self): critic = self.transaction.critic if isinstance(self.comment, api.comment.Note): raise api.comment.CommentError( "Only issues can be reopened") self.transaction.tables.add("commentchainchanges") if self.comment.draft_changes: if ((self.comment.draft_changes.new_state and self.comment.draft_changes.new_state != "resolved") or self.comment.draft_changes.new_type): raise api.comment.CommentError( "Issue has unpublished conflicting modifications") if self.comment.draft_changes.new_state == "resolved": self.transaction.items.append( api.transaction.Query( """DELETE FROM commentchainchanges WHERE uid=%s AND chain=%s AND to_state='closed'""", (critic.actual_user.id, self.comment.id))) return if self.comment.state != "resolved": raise api.comment.CommentError( "Only resolved issues can be reopened") self.transaction.items.append( api.transaction.Query( """INSERT INTO commentchainchanges (uid, chain, from_state, to_state) VALUES (%s, %s, %s, %s)""", (critic.actual_user.id, self.comment.id, "closed", "open"))) def delete(self): critic = self.transaction.critic api.PermissionDenied.raiseUnlessUser(critic, self.comment.author) self.__raiseUnlessDraft("deleted") self.transaction.tables.add("commentchains") self.transaction.items.append( api.transaction.Query( """DELETE FROM commentchains WHERE id=%s""", (self.comment.id,))) class CreatedReply(api.transaction.LazyAPIObject): def __init__(self, critic, comment, callback=None): super(CreatedReply, self).__init__( critic, api.reply.fetch, callback) self.comment = comment ================================================ FILE: src/api/transaction/filters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api.transaction class ModifyRepositoryFilter(object): def __init__(self, transaction, repository_filter): self.transaction = transaction self.repository_filter = repository_filter def setDelegates(self, value): assert all(isinstance(delegate, api.user.User) for delegate in value) self.transaction.tables.add("filters") self.transaction.items.append( api.transaction.Query( """UPDATE filters SET delegate=%s WHERE id=%s""", (",".join(delegate.name for delegate in value), self.repository_filter.id))) def delete(self): self.transaction.tables.add("filters") self.transaction.items.append( api.transaction.Query( """DELETE FROM filters WHERE id=%s""", (self.repository_filter.id,))) ================================================ FILE: src/api/transaction/labeledaccesscontrolprofile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ModifyLabeledAccessControlProfile(object): def __init__(self, transaction, labeled_profile): self.transaction = transaction self.labeled_profile = labeled_profile def delete(self): self.transaction.tables.add("labeledaccesscontrolprofiles") self.transaction.items.append( api.transaction.Query( """DELETE FROM labeledaccesscontrolprofiles WHERE labels=%s""", (str(self.labeled_profile),))) @staticmethod def create(transaction, labels, profile, callback=None): critic = transaction.critic labeled_profile = CreatedLabeledAccessControlProfile(critic, callback) transaction.tables.add("labeledaccesscontrolprofiles") transaction.items.append( api.transaction.Query( """INSERT INTO labeledaccesscontrolprofiles (labels, profile) VALUES (%s, %s) RETURNING labels""", ("|".join(sorted(labels)), profile.id), collector=labeled_profile)) return ModifyLabeledAccessControlProfile(transaction, labeled_profile) class CreatedLabeledAccessControlProfile(api.transaction.LazyAPIObject): def __init__(self, critic, callback=None): def fetch(critic, labels): return api.labeledaccesscontrolprofile.fetch( critic, labels.split("|")) super(CreatedLabeledAccessControlProfile, self).__init__( critic, fetch, callback) ================================================ FILE: src/api/transaction/reply.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ModifyReply(object): def __init__(self, transaction, reply): self.transaction = transaction self.reply = reply def __raiseUnlessDraft(self, action): if not self.reply.is_draft: raise api.reply.ReplyError( "Published replies cannot be " + action) def setText(self, text): self.__raiseUnlessDraft("edited") if not text.strip(): raise api.reply.ReplyError("Empty reply") self.transaction.tables.add("comments") self.transaction.items.append( api.transaction.Query( """UPDATE comments SET comment=%s WHERE id=%s""", (text, self.reply.id))) def delete(self): self.__raiseUnlessDraft("deleted") self.transaction.tables.add("comments") self.transaction.items.append( api.transaction.Query( """DELETE FROM comments WHERE id=%s""", (self.reply.id,))) ================================================ FILE: src/api/transaction/review.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import dbutils import gitutils from reviewing.comment.propagate import Propagation class ModifyReview(object): def __init__(self, transaction, review): self.transaction = transaction self.review = review def createComment(self, comment_type, author, text, location=None, callback=None): assert comment_type in api.comment.Comment.TYPE_VALUES assert isinstance(author, api.user.User) assert isinstance(text, str) critic = self.transaction.critic # Users are not (generally) allowed to create comments as other users. api.PermissionDenied.raiseUnlessUser(critic, author) side = file_id = first_commit_id = last_commit_id = lines = None if isinstance(location, api.comment.CommitMessageLocation): first_commit_id = last_commit_id = location.commit.id # FIXME: Make commit message comment line numbers one-based too! lines = [(location.commit.sha1, (location.first_line - 1, location.last_line - 1))] # FIXME: ... and then delete the " - 1" from the above two lines. elif isinstance(location, api.comment.FileVersionLocation): # Propagate the comment using "legacy" comment propagation helper. if location.changeset: if location.side == "old": commit = location.changeset.from_commit else: commit = location.changeset.to_commit side = location.side first_commit_id = location.changeset.from_commit.id last_commit_id = location.changeset.to_commit.id else: commit = location.commit first_commit_id = last_commit_id = location.commit.id legacy_review = dbutils.Review.fromAPI(self.review) legacy_commit = gitutils.Commit.fromAPI(commit) propagation = Propagation(critic.database) propagation.setCustom( legacy_review, legacy_commit, location.file.id, location.first_line, location.last_line) propagation.calculateInitialLines() file_id = location.file.id lines = propagation.all_lines.items() comment = CreatedComment(critic, self.review) initial_comment = api.transaction.LazyObject() def collectInitialComment(comment_id): initial_comment_id.append(comment_id) self.transaction.tables.update(("commentchains", "comments")) self.transaction.items.append( api.transaction.Query( """INSERT INTO commentchains (review, uid, type, origin, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id""", (self.review.id, author.id, comment_type, side, file_id, first_commit_id, last_commit_id), collector=comment)) self.transaction.items.append( api.transaction.Query( """INSERT INTO comments (chain, uid, state, comment) VALUES (%s, %s, 'draft', %s) RETURNING id""", (comment.id, author.id, text), collector=initial_comment)) self.transaction.items.append( api.transaction.Query( """UPDATE commentchains SET first_comment=%s WHERE id=%s""", (initial_comment.id, comment.id))) if lines: self.transaction.tables.add("commentchainlines") self.transaction.items.append( api.transaction.Query( """INSERT INTO commentchainlines (chain, uid, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s)""", *((comment.id, author.id, sha1, first_line, last_line) for sha1, (first_line, last_line) in lines))) if callback: self.transaction.callbacks.append( lambda: callback(comment.fetch())) return comment def modifyComment(self, comment): from comment import ModifyComment assert comment.review == self.review # Users are not (generally) allowed to modify other users' draft # comments. if comment.is_draft: api.PermissionDenied.raiseUnlessUser(self.transaction.critic, comment.author) return ModifyComment(self.transaction, comment) def prepareRebase(self, user, new_upstream=None, history_rewrite=None, branch=None, callback=None): assert isinstance(user, api.user.User) assert new_upstream is None or isinstance(new_upstream, str) assert history_rewrite is None or isinstance(history_rewrite, bool) assert (new_upstream is None) != (history_rewrite is None) assert callback is None or callable(callback) pending = self.review.pending_rebase if pending is not None: creator = pending.creator raise api.log.rebase.RebaseError( "The review is already being rebased by %s <%s>." % (creator.fullname, creator.email if creator.email is not None else "email missing")) commitset = self.review.branch.commits tails = commitset.filtered_tails heads = commitset.heads assert len(heads) == 1 head = next(iter(heads)) old_upstream_id = None new_upstream_id = None if new_upstream is not None: if len(tails) > 1: raise api.log.rebase.RebaseError( "Rebase of branch with multiple tails, to new upstream " "commit, is not supported.") tail = next(iter(tails)) old_upstream_id = tail.id if new_upstream == "0" * 40: new_upstream_id = None else: if not gitutils.re_sha1.match(new_upstream): cursor = self.transaction.critic.getDatabaseCursor() cursor.execute("SELECT sha1 FROM tags WHERE repository=%s AND name=%s", (self.review.repository.id, new_upstream)) row = cursor.fetchone() if row: new_upstream_arg = row[0] else: raise api.log.rebase.RebaseError( "Specified new_upstream is invalid.") try: new_upstream_commit = api.commit.fetch( self.review.repository, ref=new_upstream) except: raise api.log.rebase.RebaseError( "The specified new upstream commit does not exist " "in Critic's repository") new_upstream_id = new_upstream_commit.id rebase = CreatedRebase(self.transaction.critic, self.review) self.transaction.tables.add("reviewrebases") self.transaction.items.append( api.transaction.Query( """INSERT INTO reviewrebases (review, old_head, new_head, old_upstream, new_upstream, uid, branch) VALUES (%s, %s, NULL, %s, %s, %s, %s) RETURNING id""", (self.review.id, head.id, old_upstream_id, new_upstream_id, user.id, branch), collector=rebase)) if callback: self.transaction.callbacks.append( lambda: callback(rebase.fetch())) def cancelRebase(self, rebase): self.transaction.tables.add("reviewrebases") self.transaction.items.append( api.transaction.Query( """DELETE FROM reviewrebases WHERE review=%s AND new_head IS NULL AND id=%s""", (self.review.id, rebase.id))) def submitChanges(self, batch_comment, callback): critic = self.transaction.critic unpublished_changes = api.batch.fetchUnpublished(critic, self.review) if unpublished_changes.is_empty: raise api.batch.BatchError("No unpublished changes to submit") created_comments = [] empty_comments = [] if batch_comment: created_comments.append(batch_comment) for comment in unpublished_changes.created_comments: if comment.text.strip(): created_comments.append(comment) else: empty_comments.append(comment) batch = CreatedBatch(critic, self.review) self.transaction.tables.add("batches") self.transaction.items.append( api.transaction.Query( """INSERT INTO batches (review, uid, comment) VALUES (%s, %s, %s) RETURNING id""", (self.review.id, critic.actual_user.id, batch_comment.id if batch_comment else None), collector=batch)) def ids(api_objects): return [api_object.id for api_object in api_objects] self.transaction.tables.add("commentchains") self.transaction.items.append( api.transaction.Query( """UPDATE commentchains SET state='open', batch=%s WHERE id=ANY (%s)""", (batch.id, ids(created_comments)))) self.transaction.items.append( api.transaction.Query( """DELETE FROM commentchains WHERE id=ANY (%s)""", (ids(empty_comments),))) self.transaction.tables.add("comments") self.transaction.items.append( api.transaction.Query( """UPDATE comments SET state='current', batch=%s WHERE id IN (SELECT first_comment FROM commentchains WHERE id=ANY (%s))""", (batch.id, ids(created_comments)))) self.transaction.items.append( api.transaction.Query( """UPDATE comments SET state='current', batch=%s WHERE id=ANY (%s)""", (batch.id, ids(unpublished_changes.written_replies)))) self.transaction.tables.add("commentchainlines") self.transaction.items.append( api.transaction.Query( """UPDATE commentchainlines SET state='current' WHERE chain=ANY (%s)""", (ids(created_comments),))) # Lock all rows in |commentchains| that we may want to update. self.transaction.items.append( api.transaction.Query( """SELECT 1 FROM commentchains WHERE id=ANY (%s) FOR UPDATE""", (ids(unpublished_changes.resolved_issues) + ids(unpublished_changes.reopened_issues) + ids(unpublished_changes.morphed_comments.keys()),))) # Mark valid comment state changes as performed. self.transaction.tables.add("commentchainchanges") self.transaction.items.append( api.transaction.Query( """UPDATE commentchainchanges SET batch=%s, state='performed' WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE id=ANY (%s) AND type='issue' AND state=%s)""", (batch.id, critic.actual_user.id, ids(unpublished_changes.resolved_issues), "open"), (batch.id, critic.actual_user.id, ids(unpublished_changes.reopened_issues), "closed"))) # FIXME: handle |state='addressed'| # Mark valid comment type changes as performed. morphed_to_issue = [] morphed_to_note = [] for comment, new_type in unpublished_changes.morphed_comments.items(): if new_type == "issue": morphed_to_issue.append(comment) else: morphed_to_note.append(comment) self.transaction.items.append( api.transaction.Query( """UPDATE commentchainchanges SET batch=%s, state='performed' WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE id=ANY (%s) AND type=%s)""", (batch.id, critic.actual_user.id, ids(morphed_to_issue), "note"), (batch.id, critic.actual_user.id, ids(morphed_to_note), "issue"))) # Actually perform state changes marked as valid above. self.transaction.items.append( api.transaction.Query( """UPDATE commentchains SET state=%s, closed_by=%s WHERE id IN (SELECT chain FROM commentchainchanges WHERE batch=%s AND state='performed' AND to_state=%s)""", ("closed", critic.actual_user.id, batch.id, "closed"), ("open", None, batch.id, "open"))) # Actually perform type changes marked as valid above. self.transaction.items.append( api.transaction.Query( """UPDATE commentchains SET type=%s WHERE id IN (SELECT chain FROM commentchainchanges WHERE batch=%s AND state='performed' AND to_type=%s)""", ('issue', batch.id, 'issue'), ('note', batch.id, 'note'))) # Lock all rows in |reviewfiles| that we may want to update. self.transaction.tables.add("reviewfilechanges") self.transaction.items.append( api.transaction.Query( """SELECT 1 FROM reviewfiles WHERE id=ANY (%s) FOR UPDATE""", (ids(unpublished_changes.reviewed_file_changes) + ids(unpublished_changes.unreviewed_file_changes),))) # Mark valid draft changes as "performed". self.transaction.items.append( api.transaction.Query( """UPDATE reviewfilechanges SET batch=%s, state='performed' WHERE uid=%s AND state='draft' AND file IN (SELECT id FROM reviewfiles WHERE id=ANY (%s) AND state=%s)""", (batch.id, critic.actual_user.id, ids(unpublished_changes.reviewed_file_changes), "pending"), (batch.id, critic.actual_user.id, ids(unpublished_changes.unreviewed_file_changes), "reviewed"))) # Actually perform all the changes we previously marked as performed. self.transaction.tables.add("reviewfiles") self.transaction.items.append( api.transaction.Query( """UPDATE reviewfiles SET state=%s, reviewer=%s WHERE id IN (SELECT file FROM reviewfilechanges WHERE batch=%s AND state='performed' AND to_state=%s)""", ('reviewed', critic.actual_user.id, batch.id, 'reviewed'), ('pending', None, batch.id, 'pending'))) if callback: self.transaction.callbacks.append( lambda: callback(batch.fetch())) return batch def markChangeAsReviewed(self, filechange): assert isinstance(filechange, api.reviewablefilechange.ReviewableFileChange) critic = self.transaction.critic if filechange.draft_changes: current_state = filechange.draft_changes.new_is_reviewed else: current_state = filechange.is_reviewed if current_state: raise api.reviewablefilechange.ReviewableFileChangeError( "Specified file change is already marked as reviewed") if critic.actual_user not in filechange.assigned_reviewers: raise api.reviewablefilechange.ReviewableFileChangeError( "Specified file change is not assigned to current user") self.transaction.tables.add("reviewfilechanges") if filechange.draft_changes: self.transaction.items.append( api.transaction.Query( """DELETE FROM reviewfilechanges WHERE file=%s AND uid=%s AND to_state='pending'""", (filechange.id, critic.actual_user.id))) if not filechange.is_reviewed: self.transaction.items.append( api.transaction.Query( """INSERT INTO reviewfilechanges (file, uid, from_state, to_state) VALUES (%s, %s, 'pending', 'reviewed')""", (filechange.id, critic.actual_user.id))) def markChangeAsPending(self, filechange): assert isinstance(filechange, api.reviewablefilechange.ReviewableFileChange) critic = self.transaction.critic if filechange.draft_changes: current_state = filechange.draft_changes.new_is_reviewed else: current_state = filechange.is_reviewed if not current_state: raise api.reviewablefilechange.ReviewableFileChangeError( "Specified file change is already marked as pending") if critic.actual_user not in filechange.assigned_reviewers: raise api.reviewablefilechange.ReviewableFileChangeError( "Specified file change is not assigned to current user") self.transaction.tables.add("reviewfilechanges") if filechange.draft_changes: self.transaction.items.append( api.transaction.Query( """DELETE FROM reviewfilechanges WHERE file=%s AND uid=%s AND to_state='reviewed'""", (filechange.id, critic.actual_user.id))) if filechange.is_reviewed: self.transaction.items.append( api.transaction.Query( """INSERT INTO reviewfilechanges (file, uid, from_state, to_state) VALUES (%s, %s, 'reviewed', 'pending')""", (filechange.id, critic.actual_user.id))) class CreatedComment(api.transaction.LazyAPIObject): def __init__(self, critic, review, callback=None): super(CreatedComment, self).__init__( critic, api.comment.fetch, callback) self.review = review class CreatedRebase(api.transaction.LazyAPIObject): def __init__(self, critic, review, callback=None): super(CreatedRebase, self).__init__( critic, api.log.rebase.fetch, callback) self.review = review class CreatedBatch(api.transaction.LazyAPIObject): def __init__(self, critic, review): super(CreatedBatch, self).__init__(critic, api.batch.fetch) self.review = review ================================================ FILE: src/api/transaction/user.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class ModifyUser(object): def __init__(self, transaction, user): self.transaction = transaction self.user = user def setFullname(self, value): self.transaction.tables.add("users") self.transaction.items.append( api.transaction.Query( """UPDATE users SET fullname=%s WHERE id=%s""", (value, self.user.id))) # Repository filters # ================== def createFilter(self, filter_type, repository, path, delegates, callback=None): assert filter_type in ("reviewer", "watcher", "ignore") assert isinstance(repository, api.repository.Repository) assert all(isinstance(delegate, api.user.User) for delegate in delegates) def collectCreatedFilter(filter_id): if callback: callback(api.filters.fetchRepositoryFilter( self.transaction.critic, filter_id)) self.transaction.tables.add("filters") self.transaction.items.append( api.transaction.Query( """INSERT INTO filters (uid, repository, path, type, delegate) VALUES (%s, %s, %s, %s, %s) RETURNING id""", (self.user.id, repository.id, path, filter_type, ",".join(delegate.name for delegate in delegates)), collector=collectCreatedFilter)) def modifyFilter(self, repository_filter): from filters import ModifyRepositoryFilter assert repository_filter.subject == self.user return ModifyRepositoryFilter(self.transaction, repository_filter) # Access tokens # ============= def createAccessToken(self, access_type, title, callback=None): import auth import base64 from accesstoken import CreatedAccessToken critic = self.transaction.critic if access_type != "user": api.PermissionDenied.raiseUnlessAdministrator(critic) user_id = self.user.id if access_type == "user" else None part1 = auth.getToken(encode=base64.b64encode, length=12) part2 = auth.getToken(encode=base64.b64encode, length=21) access_token = CreatedAccessToken( critic, self.user if access_type == "user" else None, callback) self.transaction.tables.update(("accesstokens", "accesscontrolprofiles")) self.transaction.items.append( api.transaction.Query( """INSERT INTO accesstokens (access_type, uid, part1, part2, title) VALUES (%s, %s, %s, %s, %s) RETURNING id""", (access_type, user_id, part1, part2, title), collector=access_token)) self.transaction.items.append( api.transaction.Query( """INSERT INTO accesscontrolprofiles (access_token) VALUES (%s) RETURNING id""", (access_token,), collector=access_token.profile)) return access_token def modifyAccessToken(self, access_token): from accesstoken import ModifyAccessToken assert access_token.user == self.user critic = self.transaction.critic if critic.access_token and critic.access_token == access_token: # Don't allow any modifications of the access token used to # authenticate. This could for instance be used to remove the # access restrictions of the token, which would obviously be bad. raise api.PermissionDenied("Access token used to authenticate") return ModifyAccessToken(self.transaction, access_token) ================================================ FILE: src/api/user.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api class UserError(api.APIError): """Base exception for all errors related to the User class""" pass class InvalidUserIds(UserError): """Raised when one or more invalid user ids is used""" def __init__(self, values): """Constructor""" super(InvalidUserIds, self).__init__("Invalid user ids: %r" % values) self.values = values class InvalidUserId(InvalidUserIds): """Raised when a single invalid user id is used""" def __init__(self, value): """Constructor""" super(InvalidUserId, self).__init__([value]) self.message = "Invalid user id: %r" % value self.value = value class InvalidUserNames(UserError): """Raised when one or more invalid user names is used""" def __init__(self, values): """Constructor""" super(InvalidUserNames, self).__init__( "Invalid user names: %r" % values) self.values = values class InvalidUserName(InvalidUserNames): """Raised when a single user name is used""" def __init__(self, value): """Constructor""" super(InvalidUserName, self).__init__([value]) self.message = "Invalid user name: %r" % value self.value = value class InvalidRole(UserError): """Raised when an invalid role is used""" def __init__(self, role): """Constructor""" super(InvalidRole, self).__init__("Invalid role: %r" % role) self.role = role class User(api.APIObject): """Representation of a Critic user""" STATUS_VALUES = frozenset(["current", "absent", "retired"]) @property def id(self): """The user's unique id""" return self._impl.id @property def name(self): """The user's unique username""" return self._impl.name @property def fullname(self): """The user's full name""" return self._impl.fullname @property def status(self): """The user's status For regular users, the value is one of the strings in the User.STATUS_VALUES set. For the anonymous user, the value is "anonymous". For the Critic system user, the value is "system".""" return self._impl.status @property def is_anonymous(self): """True if this object represents an anonymous user""" return self.id is None @property def email(self): """The user's selected primary email address If the user has no primary email address or if the selected primary email address is unverified, this attribute's value is None.""" return self._impl.email class PrimaryEmail(object): """Primary email address The 'address' attribute is the email address as a string. The 'selected' attribute is True if this is the user's currently selected primary email address. The 'verified' attribute is False if the address is unverified and shouldn't be used until it has been verified, True if the address has been verified by us, and None if it hasn't been verified but can be used anyway.""" def __init__(self, address, selected, verified): self.address = address self.selected = selected self.verified = verified @property def primary_emails(self): """The user's primary email addresses The value is a list of PrimaryEmail objects.""" return self._impl.getPrimaryEmails(self.critic) @property def git_emails(self): """The user's "git" email addresses The value is a set of strings. These addresses are used to identify the user as author or committer of Git commits by matching the email address in the commit's meta data.""" return self._impl.getGitEmails(self.critic) @property def repository_filters(self): """The user's repository filters The value is a dictionary mapping api.repository.Repository objects to lists of api.filters.RepositoryFilter objects.""" return self._impl.getRepositoryFilters(self.critic) @property def internal(self): """The corresponding internal dbutils.User object Should only be used when interfacing with legacy code.""" return self._impl.getInternal(self.critic) def hasRole(self, role): """Return True if the user has the named role If the argument is not a valid role name, an InvalidRole exception is raised.""" return self._impl.hasRole(self.critic, role) def getPreference(self, item, repository=None): """Fetch the user's preference setting for 'item' The setting is returned as an api.preference.Preference object, whose 'user' and 'repository' attributes can be used to determine whether there was a per-user and/or per-repository override, or if a system default value was used. If 'repository' is not None, fetch a per-repository override if there is one.""" assert (repository is None or isinstance(repository, api.repository.Repository)) return self._impl.getPreference(self.critic, item, self, repository) def fetch(critic, user_id=None, name=None): """Fetch a User object with the given user id or name Exactly one of the 'user_id' and 'name' arguments can be used. Exceptions: InvalidUserIds: if 'user_id' is used and is not a valid user id. InvalidUserNames: if 'name' is used and is not a valid user name.""" import api.impl assert isinstance(critic, api.critic.Critic) assert (user_id is None) != (name is None) return api.impl.user.fetch(critic, user_id, name) def fetchMany(critic, user_ids=None, names=None): """Fetch many User objects with given user ids or names Exactly one of the 'user_ids' and 'names' arguments can be used. If the value of the provided 'user_ids' or 'names' argument is a set, the return value is a also set of User objects, otherwise it is a list of User objects, in the same order as the argument sequence. Exceptions: InvalidUserIds: if 'user_ids' is used and any element in it is not a valid user id. InvalidUserNames: if 'names' is used and any element in it is not a valid user name.""" import api.impl assert isinstance(critic, api.critic.Critic) assert (user_ids is None) != (names is None) return api.impl.user.fetchMany(critic, user_ids, names) def fetchAll(critic, status=None): """Fetch User objects for all users of the system If |status| is not None, it must be one of the user statuses "current", "absent" or "retired", or an iterable containing one or more of those strings.""" import api.impl assert isinstance(critic, api.critic.Critic) if status is not None: if isinstance(status, basestring): status = set([status]) else: status = set(status) assert not (status - User.STATUS_VALUES) return api.impl.user.fetchAll(critic, status) def anonymous(critic): """Fetch a User object representing an anonymous user""" import api.impl assert isinstance(critic, api.critic.Critic) return api.impl.user.anonymous(critic) ================================================ FILE: src/auth/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import base64 import re import configuration import dbutils class CheckFailed(Exception): pass class NoSuchUser(CheckFailed): pass class WrongPassword(CheckFailed): pass def createCryptContext(): try: from passlib.context import CryptContext except ImportError: if not configuration.debug.IS_QUICKSTART: raise # Support quick-starting without 'passlib' installed by falling back to # completely bogus unsalted SHA-256-based hashing. import hashlib class CryptContext: def __init__(self, **kwargs): pass def encrypt(self, password): return hashlib.sha256(password).hexdigest() def verify_and_update(self, password, hashed): return self.encrypt(password) == hashed, None kwargs = {} for scheme, min_rounds in configuration.auth.MINIMUM_ROUNDS.items(): kwargs["%s__min_rounds" % scheme] = min_rounds all_schemes = configuration.auth.PASSWORD_HASH_SCHEMES default_scheme = configuration.auth.DEFAULT_PASSWORD_HASH_SCHEME return CryptContext( schemes=all_schemes, default=default_scheme, deprecated=filter(lambda scheme: scheme != default_scheme, all_schemes), **kwargs) def checkPassword(db, username, password): cursor = db.cursor() cursor.execute("SELECT id, password FROM users WHERE name=%s", (username,)) row = cursor.fetchone() if not row: raise NoSuchUser user_id, hashed = row if hashed is None: # No password set => there is no "right" password. raise WrongPassword ok, new_hashed = createCryptContext().verify_and_update(password, hashed) if not ok: raise WrongPassword if new_hashed: with db.updating_cursor("users") as cursor: cursor.execute("UPDATE users SET password=%s WHERE id=%s", (new_hashed, user_id)) return dbutils.User.fromId(db, user_id) def hashPassword(password): return createCryptContext().encrypt(password) def getToken(encode=base64.b64encode, length=20): return encode(os.urandom(length)) class InvalidUserName(Exception): pass def validateUserName(name): if not name: raise InvalidUserName("Empty user name is not allowed.") elif not re.sub(r"\s", "", name, re.UNICODE): raise InvalidUserName( "A user name containing only white-space is not allowed.") elif configuration.base.USER_NAME_PATTERN is not None: if not re.match(configuration.base.USER_NAME_PATTERN, name): raise InvalidUserName( configuration.base.USER_NAME_PATTERN_DESCRIPTION) def isValidUserName(name): try: validateUserName(name) except InvalidUserName: return False return True class InvalidRequest(Exception): pass class Failure(Exception): pass from session import createSessionId, deleteSessionId, checkSession from accesscontrol import (AccessDenied, AccessControlError, AccessControlProfile, AccessControl) from database import AuthenticationError, AuthenticationFailed, Database DATABASE = None import databases from provider import Provider from oauth import OAuthProvider PROVIDERS = {} import providers ================================================ FILE: src/auth/accesscontrol.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import base import auth import configuration class AccessDenied(Exception): """Raised by AccessControl checks on failure""" pass class AccessControlError(Exception): """Raised in case of system configuration errors""" pass class HTTPException(object): def __init__(self, request_method, path_pattern): self.request_method = request_method self.path_regexp = (re.compile("^" + path_pattern + "$") if path_pattern is not None else None) def applies(self, req): if self.request_method is not None \ and self.request_method != req.method: return False if self.path_regexp is not None \ and not self.path_regexp.match(req.path): return False return True class RepositoryException(object): def __init__(self, access_type, repository_id): self.access_type = access_type self.repository_id = repository_id def applies(self, access_type, repository_id): if self.access_type is not None \ and self.access_type != access_type: return False if self.repository_id is not None \ and self.repository_id != repository_id: return False return True class ExtensionException(object): def __init__(self, access_type, extension_key): self.access_type = access_type self.extension_key = extension_key def applies(self, access_type, extension): if self.access_type is not None \ and self.access_type != access_type: return False if self.extension_key is not None \ and self.extension_key != extension.getKey(): return False return True def _isAllowed(rule, exceptions, *args): # If an exception applies, then allow if the rule is "deny" ... if any(exception.applies(*args) for exception in exceptions): return rule == "deny" # ... otherwise allow if the rule is "allow". return rule == "allow" class AccessControlProfile(object): def __init__(self, *rules): if len(rules) == 1: rules = rules * 3 (self.http_rule, self.repositories_rule, self.extensions_rule) = rules self.http_exceptions = [] self.repositories_exceptions = [] self.extensions_exceptions = [] @staticmethod def isAllowedHTTP(profiles, req): return all(_isAllowed(profile.http_rule, profile.http_exceptions, req) for profile in profiles) @staticmethod def isAllowedRepository(profiles, access_type, repository_id): return all(_isAllowed(profile.repositories_rule, profile.repositories_exceptions, access_type, repository_id) for profile in profiles) @staticmethod def isAllowedExtension(profiles, access_type, extension): return all(_isAllowed(profile.extensions_rule, profile.extensions_exceptions, access_type, extension) for profile in profiles) @staticmethod def forUser(db, user, authentication_labels=()): cursor = db.readonly_cursor() if user.isSystem(): # The system user can always do everything. return AccessControlProfile("allow") if user.isAnonymous(): if not configuration.base.ALLOW_ANONYMOUS_USER: profile = AccessControlProfile("deny") if configuration.base.SESSION_TYPE == "cookie": # Hard-coded exceptions to allow access to things that must # be accessed in order for the user to load the login page # and successfully sign in. profile.http_exceptions.extend([ HTTPException("GET", "login"), HTTPException("POST", "validatelogin") ]) return profile cursor.execute("""SELECT profile FROM useraccesscontrolprofiles WHERE access_type='anonymous'""") row = cursor.fetchone() else: cursor.execute("""SELECT profile FROM useraccesscontrolprofiles WHERE access_type='user' AND uid=%s""", (user.id,)) row = cursor.fetchone() if not row and authentication_labels: cursor.execute("""SELECT profile FROM labeledaccesscontrolprofiles WHERE labels=%s""", ("|".join(sorted(authentication_labels)),)) row = cursor.fetchone() if not row: cursor.execute("""SELECT profile FROM useraccesscontrolprofiles WHERE access_type='user' AND uid IS NULL""") row = cursor.fetchone() if not row: # By default, allow everything. return AccessControlProfile("allow") profile_id, = row return AccessControlProfile.fromId(db, profile_id) @staticmethod def fromId(db, profile_id): cursor = db.readonly_cursor() cursor.execute( """SELECT http, repositories, extensions FROM accesscontrolprofiles WHERE id=%s""", (profile_id,)) profile = AccessControlProfile(*cursor.fetchone()) cursor.execute("""SELECT request_method, path_pattern FROM accesscontrol_http WHERE profile=%s""", (profile_id,)) profile.http_exceptions.extend( HTTPException(request_method, path_pattern) for request_method, path_pattern in cursor) cursor.execute("""SELECT access_type, repository FROM accesscontrol_repositories WHERE profile=%s""", (profile_id,)) profile.repositories_exceptions.extend( RepositoryException(access_type, repository_id) for access_type, repository_id in cursor) if configuration.extensions.ENABLED: cursor.execute("""SELECT access_type, extension_key FROM accesscontrol_extensions WHERE profile=%s""", (profile_id,)) profile.extensions_exceptions.extend( ExtensionException(access_type, extension_key) for access_type, extension_key in cursor) return profile class AccessControl(object): @staticmethod def forRequest(db, req): # Check the session status of the request. This raises exceptions in # various situations. If no exception is raised, req.user will have # been set, possibly to the anonymous user (or the system user.) auth.checkSession(db, req) assert db.user assert db.profiles @staticmethod def accessHTTP(db, req): if not AccessControlProfile.isAllowedHTTP(db.profiles, req): raise AccessDenied("Access denied: %s /%s" % (req.method, req.path)) class Repository(object): def __init__(self, repository_id, path): self.id = repository_id self.path = path @staticmethod def accessRepository(db, access_type, repository): if not AccessControlProfile.isAllowedRepository( db.profiles, access_type, repository.id): raise AccessDenied("Repository access denied: %s %s" % (access_type, repository.path)) @staticmethod def accessExtension(db, access_type, extension): if not AccessControlProfile.isAllowedExtension( db.profiles, access_type, extension): raise AccessDenied("Access denied to extension: %s %s" % (access_type, extension.getKey())) ================================================ FILE: src/auth/database.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration class AuthenticationError(Exception): """Raised by Database.authenticate() on error "Error" here means something the system administrator should be informed about, rather than the user. The user trying to sign in will not see this exception's message, but will instead be shown a generic error message saying that something went wrong.""" pass class AuthenticationFailed(Exception): """Raised by Database.authenticate() on failure "Failure" here means the identification and/or password provided by the user was incorrect. The user trying to sign in will be presented with this exception's message as the reason for the failure. Note: this value is taken as HTML source text, meaning it can contain tags, but also that '<' and '&' characters must be replaced with < and & entity references.""" pass class Database(object): def __init__(self, name): self.name = name self.configuration = configuration.auth.DATABASES.get(name, {}) def getFields(self): """The fields in the sign-in form The return value should be a sequence of tuples, with elements as follows: The first element should be a boolean. True means the value should be hidden, e.g. that it's a password or similar. The second element should be an (in this context unique) identifer, for internal use. The third element should be the field's label in the login form, e.g. "Username" or "Email (@example.com)". The fourth element is optional; if present it should be a longer description of the field to be used in a help popup or similar. Note: this value is taken as HTML source text, meaning it can contain tags, but also that '<' and '&' characters must be replaced with < and & entity references.""" raise base.NotReached() def authenticate(self, db, fields): """Authenticate user based on values input The |fields| argument is a dictionary mapping identifiers (the second element in the tuples returned by getFields()) to the values the user entered. On success, a dbutils.User object must be returned. No other type of return value is acceptable. On failure/error, either AuthenticationError or AuthenticationFailed should be raised. Any other exception raised will be treated similar to an AuthenticationError exception.""" raise base.NotReached() def getAuthenticationLabels(self, user): return () def supportsHTTPAuthentication(self): """Returns true if HTTP authentication is supported By default it is if the database declares two fields, where the first is not hidden (not a password) and the second is hidden. The first field will receive the HTTP username and the second field the HTTP password.""" fields = self.getFields() return (len(fields) == 2 and not fields[0][0] and fields[1][0]) def performHTTPAuthentication(self, db, username, password): if not self.supportsHTTPAuthentication(): raise AuthenticationFailed("HTTP authentication not supported") fields = self.getFields() self.authenticate(db, { fields[0][1]: username, fields[1][1]: password }) def supportsPasswordChange(self): """Returns true if password changing is supported""" return False def changePassword(self, db, user, current_pw, new_pw): """Change the user's password Raises auth.WrongPassword if |current_pw| is incorrect.""" pass ================================================ FILE: src/auth/databases/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import internaldb import ldapdb # This must be last, since it wraps the other enabled database. import accesstokensdb ================================================ FILE: src/auth/databases/accesstokensdb.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import auth import configuration import dbutils class AccessTokens(auth.Database): def __init__(self, authdb): super(AccessTokens, self).__init__("accesstokens") self.authdb = authdb def getFields(self): return self.authdb.getFields() def authenticate(self, db, fields): self.authdb.authenticate(db, fields) def getAuthenticationLabels(self, user): return self.authdb.getAuthenticationLabels(user) def supportsHTTPAuthentication(self): # HTTP authentication is the primary use-case. return True def performHTTPAuthentication(self, db, username, password): cursor = db.readonly_cursor() cursor.execute("""SELECT id, access_type, uid FROM accesstokens WHERE part1=%s AND part2=%s""", (username, password)) row = cursor.fetchone() if row: token_id, access_type, user_id = row if access_type == "anonymous": db.setUser(dbutils.User.makeAnonymous()) elif access_type == "system": db.setUser(dbutils.User.makeSystem()) else: user = dbutils.User.fromId(db, user_id) authentication_labels = self.getAuthenticationLabels(user) db.setUser(user, authentication_labels) import api db.critic.setAccessToken(api.accesstoken.fetch(db.critic, token_id)) cursor.execute("""SELECT id FROM accesscontrolprofiles WHERE access_token=%s""", (token_id,)) row = cursor.fetchone() if row: profile_id, = row db.addProfile(auth.AccessControlProfile.fromId(db, profile_id)) return return self.authdb.performHTTPAuthentication(db, username, password) def supportsPasswordChange(self): return self.authdb.supportsPasswordChange() def changePassword(self, db, user, current_pw, new_pw): if db.critic.access_token is not None: raise auth.AccessDenied return self.authdb.changePassword(db, user, current_pw, new_pw) if configuration.auth.ENABLE_ACCESS_TOKENS: auth.DATABASE = AccessTokens(auth.DATABASE) ================================================ FILE: src/auth/databases/internaldb.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import auth import configuration class Internal(auth.Database): def __init__(self): super(Internal, self).__init__("internal") def getFields(self): return [(False, "username", "Username:"), (True, "password", "Password:")] def authenticate(self, db, values): username = values["username"].strip() if not username: raise auth.database.AuthenticationFailed("Empty username") password = values["password"] if not password: raise auth.database.AuthenticationFailed("Empty password") try: db.setUser(auth.checkPassword(db, username, password)) except auth.NoSuchUser: raise auth.AuthenticationFailed("Invalid username") except auth.WrongPassword: raise auth.AuthenticationFailed("Wrong password") def supportsPasswordChange(self): return True def changePassword(self, db, user, current_pw, new_pw): # If |current_pw| is True, then this is an administrator changing # another user's password. The usual rules do not apply. if current_pw is not True: cursor = db.readonly_cursor() cursor.execute("SELECT password FROM users WHERE id=%s", (user.id,)) hashed_pw, = cursor.fetchone() if current_pw is not None: auth.checkPassword(db, user.name, current_pw) elif hashed_pw is not None: # This is mostly a sanity check; the only way to trigger this is # if the user has no password when he loads /home, sets a # password in another tab or using another browser, and then # tries to set (rather than change) the password using the old # stale /home. raise auth.WrongPassword with db.updating_cursor("users") as cursor: cursor.execute("UPDATE users SET password=%s WHERE id=%s", (auth.hashPassword(new_pw), user.id)) if configuration.auth.DATABASE == "internal": auth.DATABASE = Internal() ================================================ FILE: src/auth/databases/ldapdb.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import hashlib import threading import time import auth import dbutils import configuration def escaped(fields, fn): return { identifier: fn(value) for identifier, value in fields.items() } class LDAPCache(object): def __init__(self, max_age): self.lock = threading.Lock() # sha256(repr(fields)) => (user_id, timestamp) self.cache = {} self.max_age = max_age @staticmethod def __key(fields): return hashlib.sha256(repr(sorted(fields.items()))).hexdigest() def get(self, fields): # A max age of zero (or less) means the cache is disabled. if self.max_age <= 0: return None with self.lock: key = LDAPCache.__key(fields) value, timestamp = self.cache.get(key, (None, None)) if value is None: return None if time.time() - timestamp > self.max_age: del self.cache[key] return None return value def set(self, fields, value): # A max age of zero (or less) means the cache is disabled. if self.max_age <= 0: return with self.lock: # Note: We might overwrite an existing (presumably identical) entry # here, due to two threads racing to authenticate the same user. key = LDAPCache.__key(fields) self.cache[key] = (value, time.time()) class LDAP(auth.Database): def __init__(self): super(LDAP, self).__init__("ldap") self.cache = LDAPCache(self.configuration["cache_max_age"]) def __startConnection(self): import ldap connection = ldap.initialize(self.configuration["url"]) if self.configuration["use_tls"]: connection.start_tls_s() return connection def __isMemberOfGroup(self, connection, group, fields): import ldap result = connection.search_s( group["dn"], ldap.SCOPE_BASE, attrlist=[group["members_attribute"]]) if len(result) != 1: raise auth.AuthenticationError( "Required group '%s' not found" % group["dn"]) group_dn, group_attributes = result[0] if group["members_attribute"] not in group_attributes: raise auth.AuthenticationError( "Required group '%s' has no attribute '%s'" % (group["dn"], group["members_attribute"])) members = group_attributes[group["members_attribute"]] member_value = (group["member_value"] % escaped(fields, ldap.dn.escape_dn_chars)) return member_value in members def getFields(self): return self.configuration["fields"] def authenticate(self, db, fields): import ldap import ldap.filter cached_data = self.cache.get(fields) if cached_data is not None: cached_user_id, cached_authentication_labels = cached_data try: user = dbutils.User.fromId(db, cached_user_id) except dbutils.InvalidUserId: pass else: db.setUser(user, cached_authentication_labels) return connection = self.__startConnection() search_base = (self.configuration["search_base"] % escaped(fields, ldap.dn.escape_dn_chars)) search_filter = (self.configuration["search_filter"] % escaped(fields, ldap.filter.escape_filter_chars)) attributes = [self.configuration["username_attribute"]] if self.configuration["create_user"]: attributes.extend([self.configuration["fullname_attribute"], self.configuration["email_attribute"]]) result = connection.search_s( search_base, ldap.SCOPE_SUBTREE, search_filter, attributes) if not result: raise auth.AuthenticationFailed("LDAP search found no matches") if len(result) > 1: raise auth.AuthenticationFailed("LDAP search found multiple matches") dn, attributes = result[0] # fields_with_dn = fields.copy() # fields_with_dn["dn"] = dn try: connection.simple_bind_s( dn, fields[self.configuration["credentials"]]) except ldap.INVALID_CREDENTIALS: raise auth.AuthenticationFailed("Invalid credentials") except ldap.UNWILLING_TO_PERFORM: # Might be raised for e.g. empty password. raise auth.AuthenticationFailed("Invalid credentials") authentication_labels = set() if "require_groups" in self.configuration: for group in self.configuration["require_groups"]: if not self.__isMemberOfGroup(connection, group, fields): raise auth.AuthenticationFailed( "Not member of required LDAP groups") if "label" in group: authentication_labels.add(group["label"]) if "additional_groups" in self.configuration: for group in self.configuration["additional_groups"]: if self.__isMemberOfGroup(connection, group, fields): authentication_labels.add(group["label"]) connection.unbind_s() def getAttribute(name, defvalue=None): value_list = attributes.get(name) if value_list is None or not value_list or not value_list[0]: defvalue return value_list[0] username = getAttribute(self.configuration["username_attribute"]) if username is None: raise auth.AuthenticationError( "Configured username LDAP attribute missing") try: user = dbutils.User.fromName(db, username) except dbutils.NoSuchUser: if not self.configuration["create_user"]: raise auth.AuthenticationFailed("No matching Critic user found") fullname = getAttribute(self.configuration["fullname_attribute"], username) email = getAttribute(self.configuration["email_attribute"]) user = dbutils.User.create( db, username, fullname, email, email_verified=None) db.setUser(user, authentication_labels) self.cache.set(fields, (user.id, authentication_labels)) def getAuthenticationLabels(self, user): connection = self.__startConnection() fields = self.configuration["fields_from_user"](user) authentication_labels = set() if "require_groups" in self.configuration: for group in self.configuration["require_groups"]: if "label" in group \ and self.__isMemberOfGroup(connection, group, fields): authentication_labels.add(group["label"]) if "additional_groups" in self.configuration: for group in self.configuration["additional_groups"]: if self.__isMemberOfGroup(connection, group, fields): authentication_labels.add(group["label"]) return authentication_labels if configuration.auth.DATABASE == "ldap": auth.DATABASE = LDAP() ================================================ FILE: src/auth/oauth.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import urllib import dbutils import auth import textutils import request class OAuthProvider(auth.Provider): def start(self, db, req, target_url=None): state = auth.getToken() authorize_url = self.getAuthorizeURL(state) if authorize_url is None: return None if target_url is None: target_url = req.getParameter("target", None) with db.updating_cursor("oauthstates") as cursor: cursor.execute("""INSERT INTO oauthstates (state, url) VALUES (%s, %s)""", (state, target_url)) return authorize_url def finish(self, db, req): if req.method != "GET": raise auth.InvalidRequest code = req.getParameter("code", default=None) state = req.getParameter("state", default=None) if code is None or state is None: raise auth.InvalidRequest("Missing parameter(s)") cursor = db.cursor() cursor.execute("""SELECT url FROM oauthstates WHERE state=%s""", (state,)) row = cursor.fetchone() if not row: raise auth.InvalidRequest("Invalid OAuth state: %s" % state) (target_url,) = row access_token = self.getAccessToken(code) if access_token is None: raise auth.Failure("failed to get access token") user_data = self.getUserData(access_token) if user_data is None: raise auth.Failure("failed to get user data") account = textutils.encode(user_data["account"]) username = textutils.encode(user_data["username"]) email = user_data["email"] email = textutils.encode(email) if email else None fullname = textutils.encode(user_data.get("fullname", username)) cursor.execute("""SELECT id, uid FROM externalusers WHERE provider=%s AND account=%s""", (self.name, account)) row = cursor.fetchone() if row: external_user_id, user_id = row else: with db.updating_cursor("externalusers") as updating_cursor: updating_cursor.execute( """INSERT INTO externalusers (provider, account, email) VALUES (%s, %s, %s) RETURNING id""", (self.name, account, email)) external_user_id, = updating_cursor.fetchone() user_id = None user = None if user_id is not None: user = dbutils.User.fromId(db, user_id) else: if auth.isValidUserName(username) \ and self.configuration.get("bypass_createuser"): try: dbutils.User.fromName(db, username) except dbutils.NoSuchUser: user = dbutils.User.create( db, username, fullname, email, email_verified=None, external_user_id=external_user_id) user.sendUserCreatedMail("wsgi[oauth/%s]" % self.name, { "provider": self.name, "account": account }) if user is None: token = auth.getToken() with db.updating_cursor("externalusers") as updating_cursor: updating_cursor.execute( """UPDATE externalusers SET token=%s WHERE id=%s""", (token, external_user_id)) data = { "provider": self.name, "account": account, "token": token } if target_url: data["target"] = target_url if username: data["username"] = username if email: data["email"] = email if fullname: data["fullname"] = fullname target_url = "/createuser?%s" % urllib.urlencode(data) if user is not None: auth.createSessionId(db, req, user) raise request.Found(target_url or "/") def validateToken(self, db, account, token): cursor = db.cursor() cursor.execute("""SELECT token FROM externalusers WHERE provider=%s AND account=%s""", (self.name, account)) row = cursor.fetchone() return row and token == row[0] ================================================ FILE: src/auth/provider.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration class Provider(object): def __init__(self, name): self.name = name self.configuration = configuration.auth.PROVIDERS[name] def getTitle(self): """Title, suitable as X in 'Sign in using your X'""" pass def getAccountIdDescription(self): """Description of the value used as the account identifier""" pass def start(self, db, req, target_url=None): pass def finish(self, db, req): pass ================================================ FILE: src/auth/providers/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import github import google import dummy ================================================ FILE: src/auth/providers/dummy.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import hashlib import urllib import configuration import auth class DummyOAuthProvider(auth.OAuthProvider): """Dummy OAuth authentication provider used by automatic tests""" def __init__(self, name): super(DummyOAuthProvider, self).__init__(name) self.access_token = None def getTitle(self): """Title, suitable as X in 'Sign in using your X'""" return self.name.capitalize() + " account" def getAccountIdDescription(self): return self.name.capitalize() + " username" def getAccountURL(self, name): return "https://example.com/user/%s" % name def getAuthorizeURL(self, state): query = urllib.urlencode({ "state": state }) return "https://example.com/authorize?%s" % query def getAccessToken(self, code): if code == "incorrect": raise auth.Failure("Incorrect code") self.access_token = hashlib.sha1(code).hexdigest() return self.access_token def getUserData(self, access_token): if access_token != self.access_token: raise auth.Failure("Invalid access token") return { "account": "account-" + self.name, "username": self.name, "email": self.name + "@example.org", "fullname": self.name.capitalize() + " von Testing" } if configuration.debug.IS_TESTING: def createProvider(name, allow_user_registration, verify_email_addresses, bypass_createuser): configuration.auth.PROVIDERS[name] = { "enabled": True, "allow_user_registration": allow_user_registration, "verify_email_addresses": verify_email_addresses, "client_id": "DummyClientId", "client_secret": "DummyClientSecret", "bypass_createuser": bypass_createuser } auth.PROVIDERS[name] = DummyOAuthProvider(name) createProvider("alice", False, False, False) createProvider("carol", True, False, False) createProvider("felix", True, False, True) createProvider("gina", True, True, False) ================================================ FILE: src/auth/providers/github.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import urllib import urlutils import configuration import auth class GitHubAuthentication(auth.OAuthProvider): def __init__(self): super(GitHubAuthentication, self).__init__("github") self.client_id = self.configuration["client_id"] self.client_secret = self.configuration["client_secret"] self.redirect_uri = self.configuration["redirect_uri"] def getTitle(self): """Title, suitable as X in 'Sign in using your X'""" return "GitHub account" def getAccountIdDescription(self): return "GitHub username" def getAccountURL(self, name): return "https://github.com/%s" % name def getAuthorizeURL(self, state): query = urllib.urlencode({ "client_id": self.client_id, "redirect_uri": self.redirect_uri, "state": state }) return "https://github.com/login/oauth/authorize?%s" % query def getAccessToken(self, code): response = urlutils.post( "https://github.com/login/oauth/access_token", data={ "client_id": self.client_id, "client_secret": self.client_secret, "code": code }, headers={ "Accept": "application/json" }) if response.status_code != 200: return None data = response.json() if data is None: return None elif "error" in data: raise auth.Failure(data["error"]) elif "access_token" not in data: return None return data["access_token"] def getUserData(self, access_token): response = urlutils.get( "https://api.github.com/user?access_token=%s" % access_token) if response.status_code != 200: return None data = response.json() if data is None or "login" not in data: return None return { "account": data["login"], "username": data["login"], "email": data.get("email"), "fullname": data.get("name") } if "github" in configuration.auth.PROVIDERS: if configuration.auth.PROVIDERS["github"]["enabled"]: auth.PROVIDERS["github"] = GitHubAuthentication() ================================================ FILE: src/auth/providers/google.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import urllib import urlutils import configuration import auth class GoogleAuthentication(auth.OAuthProvider): def __init__(self): super(GoogleAuthentication, self).__init__("google") self.client_id = self.configuration["client_id"] self.client_secret = self.configuration["client_secret"] self.redirect_uri = self.configuration["redirect_uri"] def getTitle(self): """Title, suitable as X in 'Sign in using your X'""" return "Google account" def getAccountIdDescription(self): return "Google (e.g. GMail) email address" def getAccountURL(self, name): return None def getAuthorizeURL(self, state): query = urllib.urlencode({ "client_id": self.client_id, "response_type": "code", "scope": "openid email", "redirect_uri": self.redirect_uri, "state": state }) return "https://accounts.google.com/o/oauth2/auth?" + query def getAccessToken(self, code): response = urlutils.post( "https://accounts.google.com/o/oauth2/token", data={ "code": code, "client_id": self.client_id, "client_secret": self.client_secret, "redirect_uri": self.redirect_uri, "grant_type": "authorization_code" }, headers={ "Accept": "application/json" }, verify=False) if response.status_code != 200: return None data = response.json() if data is None: return None elif "error" in data: raise auth.Failure(data["error"]) elif "access_token" not in data: return None return data["access_token"] def getUserData(self, access_token): response = urlutils.get( "https://www.googleapis.com/oauth2/v3/userinfo", params={ "access_token": access_token }, verify=False) if response.status_code != 200: return None data = response.json() if data is None or "email" not in data: return None email = data["email"] username = email.partition("@")[0] return { "account": email, "username": username, "email": email, "fullname": data.get("name", username) } if "google" in configuration.auth.PROVIDERS: if configuration.auth.PROVIDERS["google"]["enabled"]: auth.PROVIDERS["google"] = GoogleAuthentication() ================================================ FILE: src/auth/session.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import auth import dbutils import configuration import request def isInsecurePath(req): """Check if the request is for an insecure path "Insecure" here means that unauthenticated/anonymous access should be allowed even if the system doesn't normally allow anonymous access.""" if configuration.base.SESSION_TYPE == "cookie": # Login machinery must be accessible before being logged in. if req.path in ("login", "validatelogin"): return True if configuration.base.ALLOW_USER_REGISTRATION: # User creation machinery also, if user creation is enabled. if req.path in ("createuser", "registeruser"): return True # Allow unauthenticated access to all static resources. if req.path.startswith("static-resource/"): return True return False try: from customization.email import getUserEmailAddress except ImportError: def getUserEmailAddress(_username): return None def createSessionId(db, req, user, authentication_labels=None): sid = auth.getToken() if authentication_labels: labels = "|".join(sorted(authentication_labels)) else: labels = "" with db.updating_cursor("usersessions") as cursor: cursor.execute("""INSERT INTO usersessions (key, uid, labels) VALUES (%s, %s, %s)""", (sid, user.id, labels)) req.setCookie("sid", sid, secure=True) req.setCookie("has_sid", "1") def deleteSessionId(db, req, user): sid = req.cookies.get("sid", None) if sid is None: return False req.deleteCookie("sid") req.setCookie("has_sid", "0") cursor = db.readonly_cursor() cursor.execute("""SELECT 1 FROM usersessions WHERE key=%s AND uid=%s""", (sid, user.id)) if not cursor.fetchone(): # Not a valid session cookie..? return False with db.updating_cursor("usersessions") as cursor: cursor.execute("""DELETE FROM usersessions WHERE key=%s AND uid=%s""", (sid, user.id)) return True def checkSession(db, req): """Check if the request is part of a session and if so set req.user Raises an request.HTTPResponse exception if immediate action is required, otherwise sets req.user to non-None (but possibly to the anonymous user) and returns.""" # Step 1: If the host web server is supposed to authenticate users, use the # $REMOTE_USER environment variable. if configuration.base.AUTHENTICATION_MODE == "host": # Strip white-space, since Apache is known to do this internally when # authenticating, but then passing on the original unstripped string to # us on success. username = req.getEnvironment().get("REMOTE_USER", "").strip() if not username: # No REMOTE_USER variable. If we support anonymous users, this is # fine, otherwise it indicates a configuration error. if configuration.base.ALLOW_ANONYMOUS_USER: db.setUser(dbutils.User.makeAnonymous()) return raise request.MissingWSGIRemoteUser() # We have a username. Fetch the (or create a) matching user record. try: db.setUser(dbutils.User.fromName(db, username)) except dbutils.NoSuchUser: email = getUserEmailAddress(username) db.setUser(dbutils.User.create( db, username, username, email, email_verified=None)) return # Step 2: If cookie based sessions are used, check if there is a valid # session cookie. if configuration.base.SESSION_TYPE == "cookie": sid = req.cookies.get("sid") if sid: cursor = db.cursor() cursor.execute( """SELECT uid, labels, EXTRACT('epoch' FROM NOW() - atime) AS age FROM usersessions WHERE key=%s""", (sid,)) row = cursor.fetchone() if row: user_id, labels, session_age = row if configuration.base.SESSION_MAX_AGE == 0 \ or session_age < configuration.base.SESSION_MAX_AGE: # This is a valid session cookie. user = dbutils.User.fromId(db, user_id) if labels is None: labels = auth.DATABASE.getAuthenticationLabels(user) else: labels = labels.split("|") if labels else () db.setUser(user, labels) return # The session cookie is too old. Delete it from the database. with db.updating_cursor("usersessions") as cursor: cursor.execute("""DELETE FROM usersessions WHERE key=%s""", (sid,)) # The session cookie is not valid. Delete it from the browser. req.deleteCookie("sid") # Also delete the has_sid cookie, if there is one. req.deleteCookie("has_sid") # Since the session seems to have expired, offer the user to sign in # again by redirecting to the login page. Signing in is optional # though, meaning the login page will have a "Continue anonymously" # link (if anonymous access is allowed.) # # Exception: Don't do this if /login is being requested. if req.allowRedirect(307) and req.path != "login": raise request.NeedLogin(req, optional=True) elif req.cookies.get("has_sid") == "1": # The request had no session cookie, but had the has_sid cookie that # indicates the browser ought to have a sesssion cookie. Typically, # this means a signed in user accesses a mixed HTTP/HTTPS system # over HTTP. If so, redirect the user to HTTPS. req.ensureSecure() # The above call would have raised if a redirect was meaningful. If # it didn't, the has_sid cookie is bogus, so delete it. req.deleteCookie("has_sid") elif req.cookies.get("has_sid") == "0": # This indicates that the user just signed out. If anonymous access # is not allowed, we'll redirect the user to the login page again, # which is sort of a bit unhelpful. # # Worse yet; if use of an external authentication provider is # enforced, the login page will redirect there, which might sign the # user back in, non-interactively. In that case, signing out would # be impossible. # # So, instead, detect the sign-out and return a simple "you have # signed out" page in this case. # Delete the cookie. This means that on reload, the user is # redirected to the login page again. (This is to prevent the user # from getting stuck on this "you have signed out" page.) req.deleteCookie("has_sid") # Do the redirect if anonymous access isn't allowed. Also don't do # it on the actual login page. if not configuration.base.ALLOW_ANONYMOUS_USER \ and req.path != "login": raise request.DisplayMessage( title="You have signed out", body="To use this system, you will need to sign in again.") # Step 3(a): Check if there's a valid HTTP Authorization header (even if # cookie based sessions are typically used.) If there is such a # header, we assume HTTP authentication was meant to be used, and # respond with a 401 Unauthorized response if authentication # using the header fails. authorization_header = req.getRequestHeader("Authorization") if authorization_header: import base64 try: authtype, base64_credentials = authorization_header.split(None, 1) except ValueError: authtype = "invalid" if authtype.lower() != "basic": raise request.RequestHTTPAuthentication() try: credentials = base64.b64decode(base64_credentials) except (ValueError, TypeError) as error: raise request.RequestHTTPAuthentication() username, _, password = credentials.partition(":") username = username.strip() if username and password: try: auth.DATABASE.performHTTPAuthentication(db, username, password) req.session_type = "httpauth" return except auth.AuthenticationFailed: pass raise request.RequestHTTPAuthentication() # Step 3(b): If the request has a "use_httpauth" cookie, request/require # HTTP authentication. This is a just a convenience feature for # clients using HTTP stacks that only send credentials in # response to server challenges. (If cookie sessions are used, # no such challenge would normally be returned, we'd rather # redirect to the login page.) if req.cookies.get("use_httpauth"): raise request.RequestHTTPAuthentication() # Also do this for requests with a "httpauth=yes" query parameter. if req.getParameter("httpauth", "no") == "yes": raise request.RequestHTTPAuthentication() # Step 4: If anonymous access is supported or if it should be allowed as an # exception for the accessed path, leave the session anonymous. if configuration.base.ALLOW_ANONYMOUS_USER or isInsecurePath(req): db.setUser(dbutils.User.makeAnonymous()) req.session_type = None return # Step 5: If HTTP authentication is required (i.e. no session cookies) then # request that now. if configuration.base.SESSION_TYPE == "httpauth": raise request.RequestHTTPAuthentication() # Step 6: Cookie based sessions are enabled, and not anonymous access. If # this is a POST or PUT request, respond with 403 Forbidden, and # otherwise redirect to the login page. if not req.allowRedirect(307): raise request.Forbidden("Valid user session required") raise request.NeedLogin(req, optional=req.cookies.has_key("has_sid")) ================================================ FILE: src/background/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: src/background/branchtracker.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time import traceback sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import background.utils import dbutils import gitutils import mailutils import configuration # Git (git-send-pack) appends a line suffix to its output. This suffix depends # on the $TERM value. When $TERM is "dumb", the suffix is 8 spaces. We strip # this suffix if it's present. (If we incorrectly strip 8 spaces not actually # added by Git, it's not the end of the world.) # # See https://github.com/git/git/blob/master/sideband.c for details. DUMB_SUFFIX = " " class BranchTracker(background.utils.BackgroundProcess): def __init__(self): super(BranchTracker, self).__init__(service=configuration.services.BRANCHTRACKER) def update(self, trackedbranch_id, repository_id, local_name, remote, remote_name, forced): repository = gitutils.Repository.fromId(self.db, repository_id) try: with repository.relaycopy("branchtracker") as relay: relay.run("remote", "add", "source", remote) current = None new = None tags = [] if local_name == "*": output = relay.run("fetch", "source", "refs/tags/*:refs/tags/*", include_stderr=True) for line in output.splitlines(): if "[new tag]" in line: tags.append(line.rsplit(" ", 1)[-1]) else: relay.run("fetch", "--quiet", "--no-tags", "source", "refs/heads/%s:refs/remotes/source/%s" % (remote_name, remote_name)) try: current = repository.revparse("refs/heads/%s" % local_name) except gitutils.GitReferenceError: # It's okay if the local branch doesn't exist (yet). pass new = relay.run("rev-parse", "refs/remotes/source/%s" % remote_name).strip() if current != new or tags: if local_name == "*": refspecs = [("refs/tags/%s" % tag) for tag in tags] else: refspecs = ["refs/remotes/source/%s:refs/heads/%s" % (remote_name, local_name)] returncode, stdout, stderr = relay.run( "push", "--force", "origin", *refspecs, env={ "CRITIC_FLAGS": "trackedbranch_id=%d" % trackedbranch_id, "TERM": "dumb" }, check_errors=False) stderr_lines = [] remote_lines = [] for line in stderr.splitlines(): if line.endswith(DUMB_SUFFIX): line = line[:-len(DUMB_SUFFIX)] stderr_lines.append(line) if line.startswith("remote: "): line = line[8:] remote_lines.append(line) if returncode == 0: if local_name == "*": for tag in tags: self.info(" updated tag: %s" % tag) elif current: self.info(" updated branch: %s: %s..%s" % (local_name, current[:8], new[:8])) else: self.info(" created branch: %s: %s" % (local_name, new[:8])) hook_output = "" for line in remote_lines: self.debug(" [hook] " + line) hook_output += line + "\n" if local_name != "*": cursor = self.db.cursor() cursor.execute("INSERT INTO trackedbranchlog (branch, from_sha1, to_sha1, hook_output, successful) VALUES (%s, %s, %s, %s, %s)", (trackedbranch_id, current if current else '0' * 40, new if new else '0' * 40, hook_output, True)) self.db.commit() else: if local_name == "*": error = "update of tags from %s failed" % remote else: error = "update of branch %s from %s in %s failed" % (local_name, remote_name, remote) hook_output = "" for line in stderr_lines: error += "\n " + line for line in remote_lines: hook_output += line + "\n" self.error(error) cursor = self.db.cursor() if local_name != "*": cursor.execute("""INSERT INTO trackedbranchlog (branch, from_sha1, to_sha1, hook_output, successful) VALUES (%s, %s, %s, %s, %s)""", (trackedbranch_id, current, new, hook_output, False)) self.db.commit() cursor.execute("SELECT uid FROM trackedbranchusers WHERE branch=%s", (trackedbranch_id,)) recipients = [dbutils.User.fromId(self.db, user_id) for (user_id,) in cursor] if local_name == "*": mailutils.sendMessage(recipients, "%s: update of tags from %s stopped!" % (repository.name, remote), """\ The automatic update of tags in %s:%s from the remote %s failed, and has been disabled. Manual intervention is required to resume the automatic updating. Output from Critic's git hook ----------------------------- %s""" % (configuration.base.HOSTNAME, repository.path, remote, hook_output)) else: mailutils.sendMessage(recipients, "%s: update from %s in %s stopped!" % (local_name, remote_name, remote), """\ The automatic update of the branch '%s' in %s:%s from the branch '%s' in %s failed, and has been disabled. Manual intervention is required to resume the automatic updating. Output from Critic's git hook ----------------------------- %s""" % (local_name, configuration.base.HOSTNAME, repository.path, remote_name, remote, hook_output)) # Disable the tracking. return False else: self.debug(" fetched %s in %s; no changes" % (remote_name, remote)) # Everything went well; keep the tracking enabled. return True except: exception = traceback.format_exc() if local_name == "*": error = " update of tags from %s failed" % remote else: error = " update of branch %s from %s in %s failed" % (local_name, remote_name, remote) for line in exception.splitlines(): error += "\n " + line self.error(error) # The expected failure (in case of diverged branches, or review branch # irregularities) is a failed "git push" and is handled above. This is # an unexpected failure, so might be intermittent. Leave the tracking # enabled and spam the system administrator(s). return True def run(self): self.db = dbutils.Database.forSystem() while not self.terminated: self.interrupted = False cursor = self.db.cursor() cursor.execute("""SELECT id, repository, local_name, remote, remote_name, forced FROM trackedbranches WHERE NOT disabled AND (next IS NULL OR next < NOW()) ORDER BY next ASC NULLS FIRST""") rows = cursor.fetchall() for trackedbranch_id, repository_id, local_name, remote, remote_name, forced in rows: if local_name == "*": self.info("checking tags in %s" % remote) else: self.info("checking %s in %s" % (remote_name, remote)) cursor.execute("""UPDATE trackedbranches SET previous=NOW(), next=NOW() + delay, updating=TRUE WHERE id=%s""", (trackedbranch_id,)) self.db.commit() if self.update(trackedbranch_id, repository_id, local_name, remote, remote_name, forced): cursor.execute("""UPDATE trackedbranches SET updating=FALSE WHERE id=%s""", (trackedbranch_id,)) cursor.execute("""SELECT next::text FROM trackedbranches WHERE id=%s""", (trackedbranch_id,)) self.info(" next scheduled update at %s" % cursor.fetchone()) else: cursor.execute("""UPDATE trackedbranches SET updating=FALSE, disabled=TRUE WHERE id=%s""", (trackedbranch_id,)) self.info(" tracking disabled") self.db.commit() if self.terminated: break cursor.execute("""SELECT 1 FROM trackedbranches WHERE NOT disabled AND next IS NULL""") if not cursor.fetchone(): maintenance_delay = self.run_maintenance() if maintenance_delay is None: maintenance_delay = 3600 cursor.execute("""SELECT COUNT(*), EXTRACT('epoch' FROM (MIN(next) - NOW())) FROM trackedbranches WHERE NOT disabled""") enabled_branches, update_delay = cursor.fetchone() if not enabled_branches: self.info("nothing to do") update_delay = 3600 else: update_delay = max(0, int(update_delay)) delay = min(maintenance_delay, update_delay) if delay: self.signal_idle_state() self.debug("sleeping %d seconds" % delay) gitutils.Repository.forEach(self.db, lambda db, repository: repository.stopBatch()) self.db.commit() before = time.time() time.sleep(delay) if self.interrupted: self.debug("sleep interrupted after %.2f seconds" % (time.time() - before)) self.db.commit() def start_service(): tracker = BranchTracker() return tracker.start() background.utils.call("branchtracker", start_service) ================================================ FILE: src/background/branchtrackerhook.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import signal import time sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import dbutils from textutils import json_encode, json_decode if "--wait-for-update" in sys.argv: data = json_decode(sys.stdin.read()) branch_id = data["branch_id"] timeout = data["timeout"] log_offset = data["log_offset"] db = dbutils.Database.forSystem() cursor = db.cursor() cursor.execute("SELECT MAX(time) FROM trackedbranchlog WHERE branch=%s", (branch_id,)) last_log_entry = cursor.fetchone()[0] start = time.time() status = None output = "" while time.time() - start < timeout: time.sleep(0.5) db.commit() cursor = db.cursor() cursor.execute("SELECT hook_output FROM trackedbranchlog WHERE branch=%s ORDER BY time ASC OFFSET %s", (branch_id, log_offset)) rows = cursor.fetchall() if rows: for (hook_output,) in rows: output += hook_output status = "output" break cursor.execute("SELECT updating FROM trackedbranches WHERE id=%s", (branch_id,)) if not cursor.fetchone()[0]: # Update performed, but no log entries added. status = "no-output" break else: status = "timeout" sys.stdout.write(json_encode({ "status": status, "output": output or None })) sys.stdout.flush() db.close() else: import configuration from background.utils import PeerServer from textutils import json_decode from subprocess import STDOUT class BranchTrackerHook(PeerServer): class WaitForUpdate(PeerServer.ChildProcess): def __init__(self, client, branch_id, timeout, log_offset): super(BranchTrackerHook.WaitForUpdate, self).__init__(client.server, [sys.executable, sys.argv[0], "--wait-for-update"], stderr=STDOUT) self.client = client self.client.write("wait\n") self.write(json_encode({ "branch_id": branch_id, "timeout": timeout, "log_offset": log_offset })) self.close() def handle_input(self, _file, data): try: data = json_decode(data) except ValueError: self.server.error("invalid response from wait-for-update child: %r" % data) self.client.close() if data["status"] == "output": self.client.write(data["output"]) self.server.debug(" hook output written to client") elif data["status"] == "no-output": self.server.debug(" update produced no hook output") else: self.server.debug(" timeout") self.client.close() class Client(PeerServer.SocketPeer): def __init__(self, server, peersocket, peeraddress): super(BranchTrackerHook.Client, self).__init__(server, peersocket) self.__peeraddress = peeraddress def handle_input(self, _file, data): try: data = json_decode(data) except ValueError: return message = "connection from %s:%d:" % self.__peeraddress message += "\n repository: %s" % data["repository"] if data.has_key("timeout"): message += "\n timeout: %d" % data["timeout"] if data["branches"]: message += "\n branches: %s" % ", ".join(data["branches"]) if data["tags"]: message += "\n tags: %s" % ", ".join(data["tags"]) self.server.info(message) db = dbutils.Database.forSystem() try: cursor = db.cursor() notify_tracker = False wait_for_reply = False # Make sure the 'knownremotes' table has this remote listed # as "pushing" since it obviously is. cursor.execute("""SELECT pushing FROM knownremotes WHERE url=%s""", (data["repository"],)) row = cursor.fetchone() if not row: cursor.execute("""INSERT INTO knownremotes (url, pushing) VALUES (%s, TRUE)""", (data["repository"],)) elif not row[0]: cursor.execute("""UPDATE knownremotes SET pushing=TRUE WHERE url=%s""", (data["repository"],)) # If we just recorded this remote as "pushing," adjust the # configured updating frequency of any existing tracked # branches from it. if not row or not row[0]: cursor.execute("""UPDATE trackedbranches SET delay='1 week' WHERE remote=%s""", (data["repository"],)) for branch in data["branches"]: cursor.execute("""SELECT id, local_name FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], branch)) row = cursor.fetchone() if row: branch_id, local_name = row cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True self.server.debug("tracked branch: %s" % local_name) if len(data["branches"]) == 1 and local_name.startswith("r/"): wait_for_reply = (True, branch_id) self.server.debug(" will wait for reply") if data["tags"]: cursor.execute("""SELECT id FROM trackedbranches WHERE remote=%s AND remote_name=%s AND NOT disabled AND next IS NOT NULL""", (data["repository"], "*")) row = cursor.fetchone() if row: branch_id = row[0] cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) notify_tracker = True db.commit() if notify_tracker: if wait_for_reply: branch_id = wait_for_reply[1] cursor.execute("SELECT COUNT(*) FROM trackedbranchlog WHERE branch=%s", (branch_id,)) log_offset = cursor.fetchone()[0] self.server.add_peer(BranchTrackerHook.WaitForUpdate(self, branch_id, data.get("timeout", 30), log_offset)) try: branchtracker_pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(branchtracker_pid, signal.SIGHUP) except: self.server.exception() return if wait_for_reply: return self.close() finally: try: db.close() except: pass def __init__(self): super(BranchTrackerHook, self).__init__(service=configuration.services.BRANCHTRACKERHOOK) def handle_peer(self, peersocket, peeraddress): return BranchTrackerHook.Client(self, peersocket, peeraddress) server = BranchTrackerHook() sys.exit(server.start()) ================================================ FILE: src/background/changeset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os.path sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import dbutils import background.utils from textutils import json_decode, json_encode if "--json-job" in sys.argv[1:]: from resource import getrlimit, setrlimit, RLIMIT_RSS from traceback import print_exc def perform_job(): soft_limit, hard_limit = getrlimit(RLIMIT_RSS) rss_limit = configuration.services.CHANGESET["rss_limit"] if soft_limit < rss_limit: setrlimit(RLIMIT_RSS, (rss_limit, hard_limit)) from changeset.create import createChangeset request = json_decode(sys.stdin.read()) try: db = dbutils.Database.forSystem() createChangeset(db, request) db.close() sys.stdout.write(json_encode(request)) except: print "Request:" print json_encode(request, indent=2) print print_exc(file=sys.stdout) background.utils.call("changeset_job", perform_job) else: from background.utils import JSONJobServer def describeRequest(request): if request["changeset_type"] in ("direct", "merge", "conflicts"): return "%s (%s)" % (request["changeset_type"], request["child_sha1"][:8]) else: return "custom (%s..%s)" % (request["parent_sha1"][:8], request["child_sha1"][:8]) class ChangesetServer(JSONJobServer): def __init__(self): service = configuration.services.CHANGESET super(ChangesetServer, self).__init__(service) if "purge_at" in service: hour, minute = service["purge_at"] self.register_maintenance(hour=hour, minute=minute, callback=self.__purge) def execute_command(self, client, command): if command["command"] == "purge": purged_count = self.__purge() client.write(json_encode({ "status": "ok", "purged": purged_count })) client.close() else: super(ChangesetServer, self).execute_command(client, command) def request_started(self, job, request): super(ChangesetServer, self).request_started(job, request) self.debug("started: %s in %s [pid=%d]" % (describeRequest(request), request["repository_name"], job.pid)) def request_finished(self, job, request, result): super(ChangesetServer, self).request_finished(job, request, result) if "error" not in result: for parent_sha1, changeset_id in result["changeset_ids"].items(): self.info("finished: %d for %s (%s..%s) in %s [pid=%d]" % (changeset_id, request["changeset_type"], parent_sha1[:8], request["child_sha1"][:8], request["repository_name"], job.pid)) def __purge(self): db = dbutils.Database.forSystem() cursor = db.cursor() cursor.execute("""SELECT COUNT(*) FROM changesets JOIN customchangesets ON (customchangesets.changeset=changesets.id) WHERE time < NOW() - INTERVAL '3 months'""") npurged = cursor.fetchone()[0] if npurged: self.info("purging %d old custom changesets" % npurged) cursor.execute("""DELETE FROM changesets WHERE id IN (SELECT changeset FROM customchangesets WHERE time < NOW() - INTERVAL '3 months')""") db.commit() db.close() return npurged def start_service(): server = ChangesetServer() return server.start() background.utils.call("changeset", start_service) ================================================ FILE: src/background/daemon.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys def detach(parent_exit_hook=lambda: 0): try: if os.fork() != 0: # Exit from parent process. sys.exit(parent_exit_hook()) except OSError as error: print >>sys.stderr, "fork failed: %s" % error.message sys.exit(1) os.setsid() os.umask(0) try: if os.fork() != 0: # Exit from parent process. sys.exit(0) except OSError as error: print >>sys.stderr, "fork failed: %s" % error.message sys.exit(1) sys.stdout.flush() sys.stderr.flush() stdin = open("/dev/null", "r") stdout = open("/dev/null", "a+") stderr = open("/dev/null", "a+") os.dup2(stdin.fileno(), sys.stdin.fileno()) os.dup2(stdout.fileno(), sys.stdout.fileno()) os.dup2(stderr.fileno(), sys.stderr.fileno()) ================================================ FILE: src/background/extensionrunner.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import textutils import background.utils import extensions.execute import communicate class ExtensionRunner(background.utils.PeerServer): class Extension(background.utils.PeerServer.SpawnedProcess): def __init__(self, server, client, process, timeout): super(ExtensionRunner.Extension, self).__init__( server, process, deadline=time.time() + timeout) self.client = client self.stdout = self.stderr = None self.did_time_out = False def handle_input(self, pipe, data): assert pipe in (self.process.stdout, self.process.stderr) if pipe == self.process.stdout: self.stdout = data else: self.stderr = data def timed_out(self): super(ExtensionRunner.Extension, self).timed_out() self.server.debug("Timeout, killing process [pid=%d]" % self.pid) self.did_time_out = True def check_result(self): self.client.finished(self) class Client(background.utils.PeerServer.SocketPeer): def __init__(self, server, peersocket): super(ExtensionRunner.Client, self).__init__(server, peersocket) def handle_input(self, _file, data): data = textutils.json_decode(data) process = self.server.get_process(data["flavor"]) extension = ExtensionRunner.Extension( self.server, self, process, data["timeout"]) extension.write(data["stdin"]) extension.close() self.server.add_peer(extension) def finished(self, process): if process.did_time_out: status = status_text = "timeout" else: status = "ok" if process.returncode == 0: status_text = "success" else: status_text = "error(%d)" % process.returncode self.server.debug("Process finished: %s [pid=%d]" % (status_text, process.pid)) if process.stdout: self.server.debug(" stdout=%r" % process.stdout) if process.stderr: self.server.debug(" stderr=%r" % process.stderr) self.write(textutils.json_encode({ "status": status, "stdout": process.stdout, "stderr": process.stderr, "returncode": process.returncode })) self.close() def __init__(self): service = configuration.services.EXTENSIONRUNNER super(ExtensionRunner, self).__init__(service=service) self.target_cached_processes = service["cached_processes"] self.cached_processes = [] def run(self): if not configuration.extensions.ENABLED: self.info("service stopping: extension support not enabled") return self.__fill_cache() super(ExtensionRunner, self).run() def handle_peer(self, peersocket, peeraddress): return ExtensionRunner.Client(self, peersocket) def peer_destroyed(self, peer): self.__cache_process() def signal_idle_state(self): super(ExtensionRunner, self).signal_idle_state() self.__fill_cache() def get_process(self, flavor): if flavor == configuration.extensions.DEFAULT_FLAVOR \ and self.cached_processes: process = self.cached_processes.pop(0) self.debug("Using cached process [pid=%d]" % process.pid) else: process = extensions.execute.startProcess(flavor) self.debug("Started new process [pid=%d]" % process.pid) return process def __fill_cache(self): while self.__cache_process(): pass def __cache_process(self): if len(self.cached_processes) < self.target_cached_processes: process = extensions.execute.startProcess( configuration.extensions.DEFAULT_FLAVOR) self.debug("Started cached process [pid=%d]" % process.pid) self.cached_processes.append(process) return True else: return False def start_service(): extensionrunner = ExtensionRunner() return extensionrunner.start() background.utils.call("extensionrunner", start_service) ================================================ FILE: src/background/extensiontasks.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import dbutils import background.utils import extensions.role.filterhook class ExtensionTasks(background.utils.BackgroundProcess): def __init__(self): service = configuration.services.EXTENSIONTASKS super(ExtensionTasks, self).__init__(service=service) def run(self): if not configuration.extensions.ENABLED: self.info("service stopping: extension support not enabled") return failed_events = set() while not self.terminated: self.interrupted = False with dbutils.Database.forSystem() as db: cursor = db.cursor() cursor.execute("""SELECT id FROM extensionfilterhookevents ORDER BY id ASC""") finished_events = [] for (event_id,) in cursor: if event_id not in failed_events: try: extensions.role.filterhook.processFilterHookEvent( db, event_id, self.debug) except Exception: self.exception() failed_events.add(event_id) else: finished_events.append(event_id) cursor.execute("""DELETE FROM extensionfilterhookevents WHERE id=ANY (%s)""", (finished_events,)) db.commit() timeout = self.run_maintenance() if timeout is None: timeout = 86400 self.debug("sleeping %d seconds" % timeout) self.signal_idle_state() before = time.time() time.sleep(timeout) if self.interrupted: self.debug("sleep interrupted after %.2f seconds" % (time.time() - before)) def start_service(): extensiontasks = ExtensionTasks() return extensiontasks.start() background.utils.call("extensiontasks", start_service) ================================================ FILE: src/background/githook.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import os.path sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import background.utils import dbutils import auth from textutils import json_decode, json_encode try: from customization.email import getUserEmailAddress except ImportError: def getUserEmailAddress(_username): return None def getUser(db, user_name): if user_name == configuration.base.SYSTEM_USER_NAME: return dbutils.User.makeSystem() try: return dbutils.User.fromName(db, user_name) except dbutils.NoSuchUser: if configuration.base.AUTHENTICATION_MODE == "host": email = getUserEmailAddress(user_name) return dbutils.User.create( db, user_name, user_name, email, email_verified=None) raise sys_stdout = sys.stdout def slave(): import StringIO import traceback import dbutils import gitutils import index def reject(message): sys_stdout.write(json_encode({ "status": "reject", "message": message })) sys.exit(0) def error(message): sys_stdout.write(json_encode({ "status": "error", "error": message })) sys.exit(0) db = dbutils.Database.forUser() try: data = sys.stdin.read() request = json_decode(data) create_branches = [] delete_branches = [] update_branches = [] create_tags = [] delete_tags = [] update_tags = [] user = getUser(db, request["user_name"]) authentication_labels = auth.DATABASE.getAuthenticationLabels(user) db.setUser(user, authentication_labels) try: repository = gitutils.Repository.fromName( db, request["repository_name"], for_modify=True) except auth.AccessDenied as error: reject(error.message) if request["flags"] and user.isSystem(): flags = dict(flag.split("=", 1) for flag in request["flags"].split(",")) else: flags = {} sys.stdout = StringIO.StringIO() commits_to_process = set() for ref in request["refs"]: name = ref["name"] old_sha1 = ref["old_sha1"] new_sha1 = ref["new_sha1"] if "//" in name: reject("invalid ref name: '%s'" % name) if not name.startswith("refs/"): reject("unexpected ref name: '%s'" % name) if new_sha1 != '0000000000000000000000000000000000000000': commits_to_process.add(new_sha1) name = name[len("refs/"):] if name.startswith("heads/"): name = name[len("heads/"):] if new_sha1 == '0000000000000000000000000000000000000000': delete_branches.append((name, old_sha1)) elif old_sha1 == '0000000000000000000000000000000000000000': create_branches.append((name, new_sha1)) else: update_branches.append((name, old_sha1, new_sha1)) elif name.startswith("tags/"): name = name[len("tags/"):] if old_sha1 == '0000000000000000000000000000000000000000': create_tags.append((name, new_sha1)) elif new_sha1 == '0000000000000000000000000000000000000000': delete_tags.append(name) else: update_tags.append((name, old_sha1, new_sha1)) elif name.startswith("temporary/") or name.startswith("keepalive/"): # len("temporary/") == len("keepalive/") name = name[len("temporary/"):] if name != new_sha1: reject("invalid update of '%s'; value is not %s" % (ref["name"], name)) else: reject("unexpected ref name: '%s'" % ref["name"]) multiple = (len(delete_branches) + len(update_branches) + len(create_branches) + len(delete_tags) + len(update_tags) + len(create_tags)) > 1 info = [] for sha1 in commits_to_process: index.processCommits(db, repository, sha1) for name, old in delete_branches: index.deleteBranch(db, user, repository, name, old) info.append("branch deleted: %s" % name) for name, old, new in update_branches: index.updateBranch(db, user, repository, name, old, new, multiple, flags) info.append("branch updated: %s (%s..%s)" % (name, old[:8], new[:8])) index.createBranches(db, user, repository, create_branches, flags) for name, new in create_branches: info.append("branch created: %s (%s)" % (name, new[:8])) for name in delete_tags: index.deleteTag(db, user, repository, name) info.append("tag deleted: %s" % name) for name, old, new in update_tags: index.updateTag(db, user, repository, name, old, new) info.append("tag updated: %s (%s..%s)" % (name, old[:8], new[:8])) for name, new in create_tags: index.createTag(db, user, repository, name, new) info.append("tag created: %s (%s)" % (name, new[:8])) sys_stdout.write(json_encode({ "status": "ok", "accept": True, "output": sys.stdout.getvalue(), "info": info })) db.commit() except index.IndexException as exception: sys_stdout.write(json_encode({ "status": "ok", "accept": False, "output": exception.message, "info": info })) except SystemExit: raise except: exception = traceback.format_exc() message = """\ %s Request: %s %s""" % (exception.splitlines()[-1], json_encode(request, indent=2), traceback.format_exc()) sys_stdout.write(json_encode({ "status": "error", "error": message })) finally: db.close() class GitHookServer(background.utils.PeerServer): class ChildProcess(background.utils.PeerServer.ChildProcess): def __init__(self, server, client): super(GitHookServer.ChildProcess, self).__init__(server, [sys.executable, sys.argv[0], "--slave"]) self.__client = client def handle_input(self, _file, data): try: result = json_decode(data) except ValueError: result = { "status": "error", "error": ("invalid response:\n" + background.utils.indent(data)) } if result["status"] == "ok": for item in result["info"]: self.server.info(item) if result["output"]: self.__client.write(result["output"].strip() + "\n") if result["accept"]: self.__client.write("ok\n") elif result["status"] == "reject": self.server.warning(result["message"]) self.__client.write(result["message"].strip() + "\n") else: self.server.error(result["error"]) self.__client.write("""\ An exception was raised while processing the request. A message has been sent to the system administrator(s). """) if configuration.debug.IS_DEVELOPMENT: self.__client.write("\n" + result["error"].strip() + "\n") self.__client.close() class Client(background.utils.PeerServer.SocketPeer): def handle_input(self, _file, data): lines = data.splitlines() user_name = lines[0] # The second line is the value of the REMOTE_USER environment # variable (from the environment with which the git hook ran.) # # We use it as the actual user only if the actual user was the # Critic system user, meaning the push was performed by the # branch tracker service, the web front-end (for instance via # 'git http-backend') or an extension. if user_name == configuration.base.SYSTEM_USER_NAME and lines[1]: user_name = lines[1] self.__request = { "user_name": user_name, "repository_name": lines[2], "flags": lines[3], "refs": [{ "name": name, "old_sha1": old_sha1, "new_sha1": new_sha1 } for old_sha1, new_sha1, name in map(str.split, lines[4:])] } self.server.info("session started: %s / %s" % (self.__request["user_name"], self.__request["repository_name"])) child_process = GitHookServer.ChildProcess(self.server, self) child_process.write(json_encode(self.__request)) child_process.close() self.server.add_peer(child_process) def destroy(self): self.server.info("session ended: %s / %s" % (self.__request["user_name"], self.__request["repository_name"])) def __init__(self): super(GitHookServer, self).__init__(service=configuration.services.GITHOOK) def startup(self): super(GitHookServer, self).startup() os.chmod(configuration.services.GITHOOK["address"], 0770) def handle_peer(self, peersocket, peeraddress): return GitHookServer.Client(self, peersocket) def start_service(): server = GitHookServer() return server.start() if "--slave" in sys.argv[1:]: background.utils.call("githook", slave) else: background.utils.call("githook", start_service) ================================================ FILE: src/background/highlight.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time from subprocess import Popen as process sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import background.utils from textutils import json_decode, json_encode if "--json-job" in sys.argv[1:]: def perform_job(): import syntaxhighlight.generate request = json_decode(sys.stdin.read()) request["highlighted"] = syntaxhighlight.generate.generateHighlight( repository_path=request["repository_path"], sha1=request["sha1"], language=request["language"], mode=request["mode"]) sys.stdout.write(json_encode(request)) background.utils.call("highlight_job", perform_job) else: import background.utils from syntaxhighlight import isHighlighted from syntaxhighlight.context import importCodeContexts import configuration import dbutils class HighlightServer(background.utils.JSONJobServer): def __init__(self): service = configuration.services.HIGHLIGHT super(HighlightServer, self).__init__(service) self.db = dbutils.Database.forSystem() if "compact_at" in service: hour, minute = service["compact_at"] self.register_maintenance(hour=hour, minute=minute, callback=self.__compact) def request_result(self, request): if isHighlighted(request["sha1"], request["language"], request["mode"]): result = request.copy() result["highlighted"] = True return result def request_started(self, job, request): super(HighlightServer, self).request_started(job, request) self.debug("started: %s:%s (%s) in %s [pid=%d]" % (request["path"], request["sha1"][:8], request["language"], request["repository_path"], job.pid)) def request_finished(self, job, request, result): super(HighlightServer, self).request_finished(job, request, result) failed = "" if "error" not in result else " (failed!)" self.info("finished: %s:%s (%s) in %s [pid=%d]%s" % (request["path"], request["sha1"][:8], request["language"], request["repository_path"], job.pid, failed)) ncontexts = importCodeContexts(self.db, request["sha1"], request["language"]) if ncontexts: self.debug(" added %d code contexts" % ncontexts) else: self.debug(" no code contexts added") def execute_command(self, client, command): if command["command"] == "compact": uncompressed_count, compressed_count, purged_files_count, purged_contexts_count = self.__compact() client.write(json_encode({ "status": "ok", "uncompressed": uncompressed_count, "compressed": compressed_count, "purged_files": purged_files_count, "purged_contexts": purged_contexts_count })) client.close() else: super(HighlightServer, self).execute_command(client, command) def __compact(self): import syntaxhighlight cache_dir = configuration.services.HIGHLIGHT["cache_dir"] if not os.path.isdir(cache_dir): # Newly installed system that hasn't highlighted anything. return 0, 0, 0, 0 self.info("cache compacting started") now = time.time() max_age_uncompressed = 7 * 24 * 60 * 60 max_age_compressed = 90 * 24 * 60 * 60 uncompressed_count = 0 compressed_count = 0 purged_paths = [] db = dbutils.Database.forSystem() cursor = db.cursor() cursor.execute("CREATE TEMPORARY TABLE purged (sha1 CHAR(40) PRIMARY KEY)") cursor.execute("INSERT INTO purged (sha1) SELECT DISTINCT sha1 FROM codecontexts") for section in sorted(os.listdir(cache_dir)): if len(section) == 2: for filename in os.listdir("%s/%s" % (cache_dir, section)): fullname = "%s/%s/%s" % (cache_dir, section, filename) age = now - os.stat(fullname).st_mtime parts = filename.split(".") if len(parts) < 2 \ or len(parts[0]) != 38 \ or parts[1] not in syntaxhighlight.LANGUAGES: os.unlink(fullname) continue sha1 = section + parts[0] if parts[-1] == "bz2": if age > max_age_compressed: self.debug("purging: %s/%s" % (section, filename)) purged_paths.append(fullname) else: cursor.execute("DELETE FROM purged WHERE sha1=%s", (sha1,)) compressed_count += 1 elif parts[-1] == "ctx": self.debug("deleting context file: %s/%s" % (section, filename)) os.unlink(fullname) else: cursor.execute("DELETE FROM purged WHERE sha1=%s", (sha1,)) if age > max_age_uncompressed: self.debug("compressing: %s/%s" % (section, filename)) worker = process(["/bin/bzip2", fullname]) worker.wait() compressed_count += 1 else: uncompressed_count += 1 self.info("cache compacting finished: uncompressed=%d / compressed=%d / purged=%d" % (uncompressed_count, compressed_count, len(purged_paths))) if purged_paths: for path in purged_paths: os.unlink(path) cursor.execute("SELECT COUNT(*) FROM purged") purged_contexts = cursor.fetchone()[0] cursor.execute("""DELETE FROM codecontexts WHERE sha1 IN (SELECT sha1 FROM purged)""") db.commit() db.close() return uncompressed_count, compressed_count, len(purged_paths), purged_contexts def start_service(): server = HighlightServer() return server.start() background.utils.call("highlight", start_service) ================================================ FILE: src/background/maildelivery.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time import json import smtplib import email.mime.text import email.header import email.utils sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import background.utils class User: def __init__(self, *args): if len(args) > 1: self.email, self.fullname = args[-2:] else: self.fullname, self.email = email.utils.parseaddr(args[0]) class MailDelivery(background.utils.PeerServer): def __init__(self, credentials): # We disable the automatic administrator mails (using the # 'send_administrator_mails' argument) since # # 1) it's pretty pointless to report mail delivery problems # via mail, and # # 2) it can cause runaway mail generation, since failure to # timely deliver the mail delivery problem report emails # would trigger further automatic problem report emails. # # Instead, we keep track of having encountered any problems, # and send a single administrator mail ("check the logs") # after having successfully delivered an email. service = configuration.services.MAILDELIVERY super(MailDelivery, self).__init__(service=service, send_administrator_mails=False) self.__credentials = credentials self.__connection = None self.__connection_timeout = service.get("timeout") self.__has_logged_warning = 0 self.__has_logged_error = 0 self.register_maintenance(hour=3, minute=45, callback=self.__cleanup) def __sendAdministratorMessage(self): from_user = User(configuration.base.SYSTEM_USER_EMAIL, "Critic System") recipients = [] for recipient in configuration.base.SYSTEM_RECIPIENTS: recipients.append(User(recipient)) if self.__has_logged_warning and self.__has_logged_error: what = "%d warning%s and %d error%s" % (self.__has_logged_warning, "s" if self.__has_logged_warning > 1 else "", self.__has_logged_error, "s" if self.__has_logged_error > 1 else "") elif self.__has_logged_warning: what = "%d warning%s" % (self.__has_logged_warning, "s" if self.__has_logged_warning > 1 else "") else: what = "%d error%s" % (self.__has_logged_error, "s" if self.__has_logged_error > 1 else "") for to_user in recipients: self.__send(message_id=None, parent_message_id=None, headers={}, date=time.time(), from_user=from_user, to_user=to_user, recipients=recipients, subject="maildelivery: check the logs!", body="%s have been logged.\n\n-- critic\n" % what, try_once=True) self.__has_logged_warning = 0 self.__has_logged_error = 0 def run(self): try: sleeptime = 0 while not self.terminated: self.interrupted = False filenames = os.listdir(configuration.paths.OUTBOX) pending = [] for filename in filenames: if filename.endswith(".txt"): pending.append("%s/%s" % (configuration.paths.OUTBOX, filename)) if pending: self.__connect() # We may have been terminated while attempting to connect. if self.terminated: return sleeptime = 0 now = time.time() def age(filename): return now - os.stat(filename).st_ctime too_old = len(filter(lambda filename: age(filename) > 60, pending)) oldest_age = max(map(age, pending)) if too_old > 0: self.warning(("%d files were created more than 60 seconds ago\n" " The oldest is %s which is %d seconds old.") % (too_old, os.path.basename(filename), oldest_age)) self.__has_logged_warning += 1 for filename in sorted(pending): lines = open(filename).readlines() try: if self.__send(**eval(lines[0])): os.rename(filename, "%s/sent/%s.sent" % (configuration.paths.OUTBOX, os.path.basename(filename))) if self.__has_logged_warning or self.__has_logged_error: try: self.__sendAdministratorMessage() except: self.exception() self.__has_logged_error += 1 # We may have been terminated while attempting to send. if self.terminated: return except: self.exception() self.__has_logged_error += 1 os.rename(filename, "%s/%s.invalid" % (configuration.paths.OUTBOX, os.path.basename(filename))) continue else: self.signal_idle_state() if sleeptime > 25: self.__disconnect() before = time.time() timeout = (30 - sleeptime) if self.__connection else self.run_maintenance() self.debug("sleeping %d seconds" % timeout) time.sleep(timeout) if self.interrupted: self.debug("sleep interrupted after %.2f seconds" % (time.time() - before)) sleeptime += (time.time() - before) finally: self.__disconnect() def __connect(self): if not self.__connection: attempts = 0 while not self.terminated: attempts += 1 try: if configuration.smtp.USE_SSL: self.__connection = smtplib.SMTP_SSL(timeout=self.__connection_timeout) else: self.__connection = smtplib.SMTP(timeout=self.__connection_timeout) self.__connection.connect(configuration.smtp.HOST, configuration.smtp.PORT) if configuration.smtp.USE_STARTTLS: self.__connection.starttls() if self.__credentials: self.__connection.login(self.__credentials["username"], self.__credentials["password"]) self.debug("connected") return except: self.debug("failed to connect to SMTP server") if (attempts % 5) == 0: self.error("Failed to connect to SMTP server %d times. " "Will keep retrying." % attempts) self.__has_logged_error += 1 self.__connection = None seconds = min(60, 2 ** attempts) self.debug("sleeping %d seconds" % seconds) time.sleep(seconds) def __disconnect(self): if self.__connection: try: self.__connection.quit() self.debug("disconnected") except: pass self.__connection = None def __send(self, message_id, parent_message_id, headers, from_user, to_user, recipients, subject, body, **kwargs): def isascii(s): return all(ord(c) < 128 for c in s) def usersAsHeader(users, header_name): header = email.header.Header(header_name=header_name) for index, user in enumerate(users): if isascii(user.fullname): header.append(user.fullname, "us-ascii") else: header.append(user.fullname, "utf-8") if index < len(users) - 1: header.append("<%s>," % user.email, "us-ascii") else: header.append("<%s>" % user.email, "us-ascii") return header def stringAsHeader(s, name): if isascii(s): return email.header.Header(s, "us-ascii", header_name=name) else: return email.header.Header(s, "utf-8", header_name=name) message = email.mime.text.MIMEText(body, "plain", "utf-8") recipients = filter(lambda user: bool(user.email), recipients) if not to_user.email: return True if message_id: message_id = "<%s@%s>" % (message_id, configuration.base.HOSTNAME) message["Message-ID"] = message_id else: message_id = "N/A" if parent_message_id: message["In-Reply-To"] = parent_message_id message["References"] = parent_message_id message["From"] = usersAsHeader([from_user], "From") message["To"] = usersAsHeader(recipients, "To") message["Subject"] = stringAsHeader(subject, "Subject") message["Date"] = email.utils.formatdate(kwargs.get("date", time.time())) for name, value in headers.items(): message[name] = value self.debug("%s => %s (%s)" % (from_user.email, to_user.email, message_id)) # Used from __sendAdministratorMessage(); we'll try once to send it even # if self.terminated. try_once = kwargs.get("try_once", False) attempts = 0 while try_once or not self.terminated: try_once = False try: self.__connection.sendmail(configuration.base.SYSTEM_USER_EMAIL, [to_user.email], message.as_string()) return True except: self.exception() self.__has_logged_error += 1 if self.terminated: return False attempts += 1 sleeptime = min(60, 2 ** attempts) self.error("delivery failure: sleeping %d seconds" % sleeptime) self.__disconnect() time.sleep(sleeptime) self.__connect() # We were terminated before the mail was sent. Return false to keep the # mail in the outbox for later delivery. return False def __cleanup(self): now = time.time() deleted = 0 for filename in os.listdir(os.path.join(configuration.paths.OUTBOX, "sent")): if filename.endswith(".txt.sent"): filename = os.path.join(configuration.paths.OUTBOX, "sent", filename) age = now - os.stat(filename).st_ctime if age > 7 * 24 * 60 * 60: os.unlink(filename) deleted += 1 if deleted: self.info("deleted %d files from %s" % (deleted, os.path.join(configuration.paths.OUTBOX, "sent"))) def start_service(): stdin_data = sys.stdin.read() if stdin_data: credentials = json.loads(stdin_data)["credentials"] if not credentials.get("username") or not credentials.get("password"): credentials = None else: credentials = None maildelivery = MailDelivery(credentials) return maildelivery.start() background.utils.call("maildelivery", start_service) ================================================ FILE: src/background/maintenance.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time import shutil sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import dbutils import gitutils import background.utils class Maintenance(background.utils.BackgroundProcess): def __init__(self): service = configuration.services.MAINTENANCE super(Maintenance, self).__init__(service=service) hour, minute = service["maintenance_at"] self.register_maintenance(hour=hour, minute=minute, callback=self.__maintenance) def run(self): with dbutils.Database.forSystem() as db: # Do an initial load/update of timezones. # # The 'timezones' table initially (post-installation) only contains # the Universal/UTC timezone; this call adds all the others that the # PostgreSQL database server knows about. dbutils.loadTimezones(db) super(Maintenance, self).run() def __maintenance(self): with dbutils.Database.forSystem() as db: cursor = db.cursor() # Update the UTC offsets of all timezones. # # The PostgreSQL database server has accurate (DST-adjusted) values, # but is very slow to query, so we cache the UTC offsets in our # 'timezones' table. This call updates that cache every night. # (This is obviously a no-op most nights, but we don't want to have # to care about which nights it isn't.) self.debug("updating timezones") dbutils.updateTimezones(db) if self.terminated: return # Execute scheduled review branch archivals. if configuration.base.ARCHIVE_REVIEW_BRANCHES: repository = None cursor.execute("""SELECT branches.repository, branches.id, branches.name FROM scheduledreviewbrancharchivals JOIN reviews ON (reviews.id=scheduledreviewbrancharchivals.review) JOIN branches ON (branches.id=reviews.branch) WHERE scheduledreviewbrancharchivals.deadline <= NOW() AND reviews.state IN ('closed', 'dropped') AND NOT branches.archived ORDER BY branches.repository""", for_update=True) for repository_id, branch_id, branch_name in cursor: if not repository or repository.id != repository_id: if repository: repository.stopBatch() repository = gitutils.Repository.fromId(db, repository_id) self.info("archiving branches in: " + repository.name) self.info(" " + branch_name) branch = dbutils.Branch.fromId(db, branch_id, repository=repository) try: branch.archive(db) except Exception: self.exception(as_warning=True) # Since NOW() returns the same value each time within a single # transaction, this is guaranteed to delete only the set of # archivals we selected above. cursor.execute("""DELETE FROM scheduledreviewbrancharchivals WHERE deadline <= NOW()""") db.commit() # Run a garbage collect in all Git repositories, to keep them neat # and tidy. Also pack keepalive refs. cursor.execute("SELECT name FROM repositories") for (repository_name,) in cursor: self.debug("repository GC: %s" % repository_name) try: repository = gitutils.Repository.fromName(db, repository_name) repository.packKeepaliveRefs() repository.run("gc", "--prune=1 day", "--quiet") repository.stopBatch() except Exception: self.exception("repository GC failed: %s" % repository_name) if self.terminated: return if configuration.extensions.ENABLED: now = time.time() max_age = 7 * 24 * 60 * 60 base_path = os.path.join(configuration.paths.DATA_DIR, "temporary", "EXTENSIONS") for user_name in os.listdir(base_path): user_dir = os.path.join(base_path, user_name) for extension_id in os.listdir(user_dir): extension_dir = os.path.join(user_dir, extension_id) for repository_name in os.listdir(extension_dir): repository_dir = os.path.join(extension_dir, repository_name) age = now - os.stat(repository_dir).st_mtime if age > max_age: self.info("Removing repository work copy: %s" % repository_dir) shutil.rmtree(repository_dir) def start_service(): maintenance = Maintenance() return maintenance.start() background.utils.call("maintenance", start_service) ================================================ FILE: src/background/servicemanager.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import subprocess import time import signal import os import json import glob import configuration # Number of seconds to wait for startup synchronization. STARTUP_SYNC_TIMEOUT = 30 if "--slave" in sys.argv: import background.utils class ServiceManager(background.utils.PeerServer): # The master process manages our pid file, so tell our base class to # leave it alone. manage_pidfile = False class Service(object): class Process(background.utils.PeerServer.ChildProcess): def __init__(self, service, input_data): super(ServiceManager.Service.Process, self).__init__( service.manager, [sys.executable, "-m", service.module], stderr=subprocess.STDOUT) self.__service = service self.__output = None if input_data: self.write(json.dumps(input_data)) self.close() def handle_input(self, _file, data): self.__output = data def destroy(self): super(ServiceManager.Service.Process, self).destroy() self.__service.stopped(self.returncode, self.__output) def __init__(self, manager, service_data): self.manager = manager self.name = service_data["name"] self.module = service_data["module"] self.started = None self.process = None self.callbacks = [] def signal_callbacks(self, event): self.callbacks = filter(lambda callback: callback(event), self.callbacks) def start(self, input_data): self.process = ServiceManager.Service.Process(self, input_data) self.started = time.time() self.manager.add_peer(self.process) self.manager.info("%s: started (pid=%d)" % (self.name, self.process.pid)) self.input_data = input_data self.signal_callbacks("started") def restart(self, callback=None): if callback: self.callbacks.append(callback) self.start(self.input_data) def stop(self, callback=None): if callback: self.callbacks.append(callback) if self.process: self.manager.info("%s: sending process SIGTERM" % self.name) self.process.kill(signal.SIGTERM) def stopped(self, returncode, output): restart = not self.manager.terminated and not self.manager.restart_requested if returncode != 0: message = "%s: exited with returncode %d" % (self.name, returncode) if output: message += "\n" + background.utils.indent(output) if time.time() - self.started < 1: message += "\n Process restarted less than 1 second ago; not restarting." restart = False self.manager.error(message) else: self.manager.info("%s: exited normally" % self.name) if not self.callbacks: restart = False self.process = None if restart: self.restart() else: self.signal_callbacks("stopped") class Client(background.utils.PeerServer.SocketPeer): def __init__(self, manager, peersocket): super(ServiceManager.Client, self).__init__(manager, peersocket) self.__manager = manager def send_response(self, value): self.write(background.utils.json_encode(value)) self.close() def handle_input(self, _file, data): result = self.send_response try: request = background.utils.json_decode(data) except: return result({ "status": "error", "error": "invalid input: JSON decode failed" }) if type(request) is not dict: return result({ "status": "error", "error": "invalid input: expected object" }) if request.get("query") == "status": services = { "manager": { "module": "background.servicemanager", "uptime": time.time() - self.__manager.started, "pid": os.getpid() }} for service in self.__manager.services: uptime = time.time() - service.started if service.started else -1 pid = service.process.pid if service.process else -1 services[service.name] = { "module": service.module, "uptime": uptime, "pid": pid } return result({ "status": "ok", "services": services }) elif request.get("command") == "restart": if "service" not in request: return result({ "status": "error", "error": "invalid input: no service specified" }) if request["service"] == "manager": self.__manager.info("restart requested") self.__manager.requestRestart() return result({ "status": "ok" }) for service in self.__manager.services: if service.name == request.get("service"): self.__manager.info("%s: restart requested" % service.name) def callback(event): self.send_response({ "status": "ok", "event": event }) if service.process: service.stop(callback) else: service.restart(callback) break else: return result({ "status": "error", "error": "%s: no such service" % request.get("service") }) else: return result({ "status": "error", "error": "invalid input: unsupported data" }) def __init__(self, input_data): service = configuration.services.SERVICEMANAGER.copy() super(ServiceManager, self).__init__(service=service) self.input_data = input_data self.services = [] self.started = time.time() def handle_peer(self, peersocket, peeraddress): return ServiceManager.Client(self, peersocket) def startup(self): super(ServiceManager, self).startup() for service_data in configuration.services.SERVICEMANAGER["services"]: starting_path = service_data["pidfile_path"] + ".starting" with open(starting_path, "w") as starting: starting.write("%s\n" % time.ctime()) service = ServiceManager.Service(self, service_data) service.start(self.input_data.get(service.name)) self.services.append(service) def shutdown(self): for service in self.services: service.stop() super(ServiceManager, self).shutdown() def requestRestart(self): super(ServiceManager, self).requestRestart() for service in self.services: service.stop() def start_service(): stdin_data = sys.stdin.read() if stdin_data: input_data = json.loads(stdin_data) else: input_data = {} manager = ServiceManager(input_data) return manager.start() background.utils.call("servicemanager", start_service) else: import errno import pwd import grp import stat pwentry = pwd.getpwnam(configuration.base.SYSTEM_USER_NAME) grentry = grp.getgrnam(configuration.base.SYSTEM_GROUP_NAME) uid = pwentry.pw_uid gid = grentry.gr_gid home = pwentry.pw_dir import daemon pidfile_path = configuration.services.SERVICEMANAGER["pidfile_path"] if os.path.isfile(pidfile_path): print >>sys.stderr, "%s: file exists; daemon already running?" % pidfile_path sys.exit(1) # Our RUN_DIR (/var/run/critic/IDENTITY) is typically on a tmpfs that gets # nuked on reboot, so recreate it with the right access if it doesn't exist. def mkdir(path, mode): if not os.path.isdir(path): if not os.path.isdir(os.path.dirname(path)): mkdir(os.path.dirname(path), mode) os.mkdir(path, mode) else: os.chmod(path, mode) os.chown(path, uid, gid) mkdir(configuration.paths.RUN_DIR, 0755 | stat.S_ISUID | stat.S_ISGID) mkdir(os.path.join(configuration.paths.RUN_DIR, "sockets"), 0755) mkdir(os.path.join(configuration.paths.RUN_DIR, "wsgi"), 0750) os.environ["HOME"] = home os.chdir(home) smtp_credentials_path = os.path.join(configuration.paths.CONFIG_DIR, "configuration", "smtp-credentials.json") if os.path.isfile(smtp_credentials_path): with open(smtp_credentials_path) as smtp_credentials_file: smtp_credentials = json.load(smtp_credentials_file) else: smtp_credentials = None input_data = { "maildelivery": { "credentials": smtp_credentials }} os.setgid(gid) os.setuid(uid) starting_pattern = os.path.join(os.path.dirname(pidfile_path), "*.starting") # Remove any stale/unexpected *.starting files that would otherwise break # our startup synchronization. for filename in glob.glob(starting_pattern): try: os.unlink(filename) except OSError as error: print >>sys.stderr, error with open(pidfile_path + ".starting", "w") as starting: starting.write("%s\n" % time.ctime()) def wait_for_startup_sync(): deadline = time.time() + STARTUP_SYNC_TIMEOUT while True: filenames = glob.glob(starting_pattern) if not filenames: return 0 if time.time() > deadline: break time.sleep(0.1) print >>sys.stderr print >>sys.stderr, ("Startup synchronization timeout after %d seconds!" % STARTUP_SYNC_TIMEOUT) print >>sys.stderr, "Services still starting:" for filename in filenames: print >>sys.stderr, " " + os.path.basename(filename) return 1 with open(pidfile_path, "w") as pidfile: daemon.detach(parent_exit_hook=wait_for_startup_sync) pidfile.write("%s\n" % os.getpid()) os.umask(022) was_terminated = False def terminated(signum, frame): global was_terminated was_terminated = True signal.signal(signal.SIGTERM, terminated) while not was_terminated: process = subprocess.Popen( [sys.executable, "-m", "background.servicemanager", "--slave"], stdin=subprocess.PIPE) process.stdin.write(json.dumps(input_data)) process.stdin.close() while not was_terminated: try: pid, status = os.wait() if pid == process.pid: process = None break except OSError as error: if error.errno == errno.EINTR: continue else: break if process: try: process.send_signal(signal.SIGTERM) process.wait() except: pass try: os.unlink(pidfile_path) except: pass ================================================ FILE: src/background/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import subprocess import os import logging import logging.handlers import atexit import socket import errno import select import traceback import signal import fcntl import time import datetime import configuration from textutils import json_encode, json_decode, indent def freeze(d): return tuple(sorted(d.items())) def thaw(f): return dict(f) class AdministratorMailHandler(logging.Handler): def __init__(self, logfile_path): super(AdministratorMailHandler, self).__init__() self.__logfile_name = os.path.basename(logfile_path) def emit(self, record): import mailutils try: import dbutils db = dbutils.Database.forSystem() except: db = None mailutils.sendAdministratorErrorReport(db, self.__logfile_name, record.message.splitlines()[0], self.formatter.format(record)) if db: db.close() class BackgroundProcess(object): # Set to False in sub-class to disable pid file creation and deletion. manage_pidfile = True def __init__(self, service, send_administrator_mails=True): try: loglevel = getattr(logging, service["loglevel"].upper()) except: loglevel = logging.INFO formatter = logging.Formatter("%(asctime)s - %(levelname)5s - %(message)s") file_handler = logging.handlers.RotatingFileHandler(service["logfile_path"], maxBytes=1024**2, backupCount=5) file_handler.setFormatter(formatter) file_handler.setLevel(loglevel) logger = logging.getLogger() logger.setLevel(loglevel) logger.addHandler(file_handler) if send_administrator_mails: mail_handler = AdministratorMailHandler(service["logfile_path"]) mail_handler.setFormatter(formatter) mail_handler.setLevel(logging.WARNING) logger.addHandler(mail_handler) self.terminated = False self.interrupted = False self.restart_requested = False self.synchronize_when_idle = False self.force_maintenance = False self.__maintenance_hooks = [] self.__logger = logger self.__pidfile_path = service["pidfile_path"] self.__create_pidfile() signal.signal(signal.SIGHUP, self.__handle_SIGHUP) signal.signal(signal.SIGTERM, self.__handle_SIGTERM) signal.signal(signal.SIGUSR1, self.__handle_SIGUSR1) signal.signal(signal.SIGUSR2, self.__handle_SIGUSR2) self.info("service started") atexit.register(self.__stopped) def __handle_SIGHUP(self, signum, frame): self.interrupted = True def __handle_SIGTERM(self, signum, frame): self.terminated = True def __handle_SIGUSR1(self, signum, frame): # Used for synchronization during testing. # # Someone creates a file named ".busy", then sends # SIGUSR1, and expects the file to be deleted as soon as this service # reaches an idle point. self.synchronize_when_idle = True def __handle_SIGUSR2(self, signum, frame): # Used for running maintenance tasks during testing. # # Works the same way as SIGUSR1, but additionally makes sure to run all # scheduled maintenance tasks before reporting an idle state. self.force_maintenance = True self.synchronize_when_idle = True def __create_pidfile(self): if self.manage_pidfile: try: os.makedirs(os.path.dirname(self.__pidfile_path)) except OSError as error: if error.errno == errno.EEXIST: pass else: raise pidfile = open(self.__pidfile_path, "w") pidfile.write(str(os.getpid()) + "\n") pidfile.close() def __delete_pidfile(self): if self.manage_pidfile: try: os.unlink(self.__pidfile_path) except: pass def __signal_started(self): try: os.unlink(self.__pidfile_path + ".starting") except OSError as error: if error.errno != errno.ENOENT: self.exception() except Exception: self.exception() def __stopped(self): self.info("service stopped") self.__delete_pidfile() def start(self): try: try: self.startup() except Exception: self.exception() self.__signal_started() return 1 self.__signal_started() try: return self.run() or 0 except Exception: self.exception() return 1 finally: try: self.shutdown() except Exception: self.exception() return 1 def startup(self): pass def shutdown(self): pass def signal_idle_state(self): if self.synchronize_when_idle: if self.force_maintenance: self.run_maintenance() self.force_maintenance = False os.unlink(self.__pidfile_path + ".busy") self.synchronize_when_idle = False def error(self, message): self.__logger.error(message) def warning(self, message): self.__logger.warning(message) def info(self, message): self.__logger.info(message) def debug(self, message): self.__logger.debug(message) def exception(self, message=None, as_warning=False): backtrace = traceback.format_exc() if message is None: message = "unhandled exception: " + backtrace.splitlines()[-1] if as_warning: self.__logger.warning(message + "\n" + indent(backtrace)) else: self.__logger.error(message + "\n" + indent(backtrace)) def register_maintenance(self, hour, minute, callback): self.__maintenance_hooks.append( [hour, minute, callback, datetime.datetime.now()]) def run_maintenance(self): if self.__maintenance_hooks: sleep_seconds = 86400 for hook in self.__maintenance_hooks: hour, minute, callback, last = hook now = datetime.datetime.now() if hour is None: scheduled_at = datetime.time(now.hour, minute) interval = datetime.timedelta(seconds=3600) interval_type = "hourly" else: scheduled_at = datetime.time(hour, minute) interval = datetime.timedelta(days=1) interval_type = "daily" scheduled_at = datetime.datetime.combine(datetime.date.today(), scheduled_at) while scheduled_at <= last: # We already ran the callback this hour/day. scheduled_at += interval if scheduled_at <= now: self.info("performing %s maintenance task" % interval_type) callback() hook[3] = scheduled_at scheduled_at += interval elif self.force_maintenance: self.info("performing %s maintenance task (forced)" % interval_type) callback() now = datetime.datetime.now() seconds_remaining = (scheduled_at - now).total_seconds() # Wait at least 60 seconds, even if that would make us over- # shoot the deadline slightly. Maintenance tasks are not really # that sensitive. seconds_remaining = max(seconds_remaining, 60) sleep_seconds = min(sleep_seconds, seconds_remaining) return sleep_seconds def run(self): while not self.terminated: # Aside from scheduled maintenance task, this service is always # idle, so... self.signal_idle_state() timeout = self.run_maintenance() if timeout is None: # No configured maintenance hooks; nothing to do. Returning will # probably cause service to terminate, and we just started, so the # service manager will leave the service not running. return 0 self.debug("sleeping %d seconds" % timeout) time.sleep(timeout) def requestRestart(self): self.restart_requested = True class PeerServer(BackgroundProcess): class Peer(object): def __init__(self, server, writing, *reading, **kwargs): self.server = server self.deadline = kwargs.get("deadline", None) self.__writing = writing self.__write_data = "" self.__write_closed = False self.__write_failed = False if writing: fcntl.fcntl(writing, fcntl.F_SETFL, fcntl.fcntl(writing, fcntl.F_GETFL) | os.O_NONBLOCK) self.__reading = list(reading) self.__read_data = [""] * len(reading) self.__read_closed = [False] * len(reading) for readfile in reading: if readfile and readfile.fileno() != writing.fileno(): fcntl.fcntl(readfile, fcntl.F_SETFL, fcntl.fcntl(readfile, fcntl.F_GETFL) | os.O_NONBLOCK) self.__timed_out = False def timed_out(self): self.__timed_out = True self.__writing = None self.__reading = [] def is_finished(self): return not self.__writing and not any(self.__reading) def writing(self): if self.__write_data or self.__write_closed: return self.__writing else: return None def write(self, data): assert self.__writing assert not self.__write_closed self.__write_data += data def close(self): assert self.__writing assert not self.__write_closed self.__write_closed = True def do_write(self): try: while self.__write_data: nwritten = os.write(self.__writing.fileno(), self.__write_data) self.__write_data = self.__write_data[nwritten:] except EnvironmentError as error: if error.errno in (errno.EAGAIN, errno.EINTR): raise self.server.warning("Failed to write to peer: %s" % error) if error.errno == errno.EPIPE: self.__write_failed = True else: raise if self.__write_closed or self.__write_failed: self.writing_done(self.__writing) self.__writing = None def reading(self): return [readfile if not closed else None for readfile, closed in zip(self.__reading, self.__read_closed)] def read(self): return [data if closed else None for data, closed in zip(self.__read_data, self.__read_closed)] def do_read(self, index): while True: readfile = self.__reading[index] read = os.read(readfile.fileno(), 4096) if not read: self.reading_done(readfile) self.__reading[index] = None self.__read_closed[index] = True self.handle_input(readfile, self.__read_data[index]) break self.__read_data[index] += read def writing_done(self, writing): writing.close() def reading_done(self, reading): reading.close() def destroy(self): pass class SocketPeer(Peer): def __init__(self, server, clientsocket): super(PeerServer.SocketPeer, self).__init__(server, clientsocket, clientsocket) def reading_done(self, reading): reading.shutdown(socket.SHUT_RD) def writing_done(self, writing): writing.shutdown(socket.SHUT_WR) def handle_input(self, _file, data): pass class SpawnedProcess(Peer): def __init__(self, server, process, **kwargs): self.process = process self.pid = process.pid super(PeerServer.SpawnedProcess, self).__init__( server, self.process.stdin, self.process.stdout, self.process.stderr, **kwargs) def kill(self, signal): self.process.send_signal(signal) def destroy(self): self.process.wait() self.returncode = self.process.returncode self.check_result() def timed_out(self): super(PeerServer.SpawnedProcess, self).timed_out() self.kill(signal.SIGKILL) def check_result(self): if self.returncode: self.server.error("child process exited (pid=%d, returncode=%d)" % (self.pid, self.returncode)) else: self.server.debug("child process exited (pid=%d, returncode=0)" % self.pid) class ChildProcess(SpawnedProcess): def __init__(self, server, args, **kwargs): process = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs) super(PeerServer.ChildProcess, self).__init__(server, process) self.server.debug("spawned child process (pid=%d)" % self.process.pid) def __init__(self, service, **kwargs): super(PeerServer, self).__init__(service, **kwargs) self.__peers = [] self.__address = service.get("address") def __create_listening_socket(self): if type(self.__address) == str: try: os.makedirs(os.path.dirname(self.__address)) except OSError as error: if error.errno == errno.EEXIST: pass else: raise if os.path.exists(self.__address): connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: connection.connect(self.__address) connection.close() print >>sys.stderr, "ERROR: Server already started!" sys.exit(1) except socket.error as error: if error[0] == errno.ECONNREFUSED: self.debug("removing stale socket") os.unlink(self.__address) else: raise self.__listening_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.__listening_socket.setblocking(0) self.__listening_socket.bind(self.__address) self.__listening_socket.listen(4) os.chmod(self.__address, 0700) self.debug("listening: %s" % self.__address) elif type(self.__address) == tuple: host, port = self.__address self.__listening_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.__listening_socket.setblocking(0) self.__listening_socket.bind((host, port)) self.__listening_socket.listen(4) self.debug("listening: %s:%d" % (host, port)) elif self.__address is None: self.__listening_socket = open("/dev/null", "r") self.debug("not listening") else: raise Exception("invalid address: %r" % self.__address) atexit.register(self.__destroy_listening_socket) def __destroy_listening_socket(self): try: self.__listening_socket.close() except: pass if type(self.__address) == str: try: os.unlink(self.__address) except: pass def run(self): while not self.terminated: self.interrupted = False if self.restart_requested: if not self.__peers: break else: self.debug("restart delayed; have %d peers" % len(self.__peers)) poll = select.poll() poll.register(self.__listening_socket, select.POLLIN) writing_map = {} reading_map = {} def fileno(file): if file: return file.fileno() else: return None nearest_peer_deadline = None for peer in self.__peers: if peer.writing(): poll.register(peer.writing(), select.POLLOUT) writing_map[peer.writing().fileno()] = peer for index, readfile in enumerate(peer.reading()): if readfile: poll.register(readfile, select.POLLIN) reading_map[readfile.fileno()] = (peer, index) if peer.deadline is not None: if nearest_peer_deadline is None: nearest_peer_deadline = peer.deadline else: nearest_peer_deadline = min(peer.deadline, nearest_peer_deadline) while not self.terminated: timeout_seconds = self.run_maintenance() if timeout_seconds: if not self.__peers: self.debug("next maintenance task check scheduled in %d seconds" % timeout_seconds) if nearest_peer_deadline is not None: deadline_seconds = nearest_peer_deadline - time.time() if deadline_seconds < 0: deadline_seconds = 0 if timeout_seconds is None: timeout_seconds = deadline_seconds else: timeout_seconds = min(timeout_seconds, deadline_seconds) if timeout_seconds: timeout_ms = timeout_seconds * 1000 else: timeout_ms = None if self.synchronize_when_idle and not self.__peers: # We seem to be idle, but poll once, non-blocking, # just to be sure. timeout_ms = 0 try: events = poll.poll(timeout_ms) break except select.error as error: if error[0] == errno.EINTR: continue else: raise if self.terminated: break elif not (self.__peers or events): self.signal_idle_state() def catch_error(fn, *args): while True: try: fn(*args) except EnvironmentError as error: if error.errno == errno.EINTR: continue if error.errno == errno.EAGAIN: return raise else: return def check_peer(peer): if peer.is_finished(): peer.destroy() self.peer_destroyed(peer) self.__peers.remove(peer) for fd, event in events: if fd == self.__listening_socket.fileno(): peersocket, peeraddress = self.__listening_socket.accept() peer = self.handle_peer(peersocket, peeraddress) if peer: self.__peers.append(peer) else: try: peersocket.close() except Exception: pass else: if event != select.POLLIN and fd in writing_map: peer = writing_map[fd] catch_error(peer.do_write) check_peer(peer) if event != select.POLLOUT and fd in reading_map: peer, index = reading_map[fd] catch_error(peer.do_read, index) check_peer(peer) if nearest_peer_deadline is not None: now = time.time() for peer in self.__peers[:]: if peer.deadline is not None and peer.deadline < now: peer.timed_out() check_peer(peer) def add_peer(self, peer): self.__peers.append(peer) def handle_peer(self, peersocket, peeraddress): pass def peer_destroyed(self, peer): pass def startup(self): self.__create_listening_socket() def shutdown(self): for peer in self.__peers: try: peer.destroy() except: self.exception() try: self.peer_destroyed(peer) except: self.exception() class SlaveProcessServer(PeerServer): class SlaveChildProcess(PeerServer.ChildProcess): def __init__(self, server, client): super(SlaveProcessServer.SlaveChildProcess, self).__init__(server, [sys.executable, sys.argv[0], "--slave"]) self.__client = client def handle_input(self, _file, value): self.__client.write(value) self.__client.close() class SlaveClient(PeerServer.SocketPeer): def __init__(self, server, peersocket): super(SlaveProcessServer.SlaveClient, self).__init__(server, peersocket) def handle_input(self, _file, value): if value: child_process = SlaveProcessServer.SlaveChildProcess(self.server, self) child_process.write(value) child_process.close() self.server.add_peer(child_process) def handle_peer(self, peersocket, peeraddress): return SlaveProcessServer.SlaveClient(self, peersocket) class JSONJobServer(PeerServer): class Job(PeerServer.ChildProcess): def __init__(self, server, client, request): super(JSONJobServer.Job, self).__init__(server, [sys.executable, sys.argv[0], "--json-job"], stderr=subprocess.STDOUT) self.clients = [client] self.request = request self.write(json_encode(request)) self.close() def handle_input(self, _file, value): try: result = json_decode(value) except ValueError: self.server.error("invalid response:\n" + indent(value)) result = self.request.copy() result["error"] = value for client in self.clients: client.add_result(result) self.server.request_finished(self, self.request, result) class JobClient(PeerServer.SocketPeer): def handle_input(self, _file, value): decoded = json_decode(value) assert isinstance(decoded, dict) if "requests" in decoded: self.__requests = decoded["requests"] self.__pending_requests = map(freeze, self.__requests) self.__async = decoded.get("async", False) self.__results = [] self.server.add_requests(self) else: self.server.execute_command(self, decoded) if self.__async: self.close() def has_requests(self): return bool(self.__pending_requests) def get_request(self): return self.__pending_requests.pop() def add_result(self, result): if self.__async: # Client is already gone, so we don't really care about the # results. return self.__results.append(result) if len(self.__results) == len(self.__requests): self.write(json_encode(self.__results)) self.close() def __init__(self, service): super(JSONJobServer, self).__init__(service) self.__clients_with_requests = [] self.__started_requests = {} self.__max_workers = service.get("max_workers", 4) def __startJobs(self): # Repeat "start a job" while there are jobs to start and we haven't # reached the limit on number of concurrent jobs to run. while self.__clients_with_requests and len(self.__started_requests) < self.__max_workers: # Fetch next request from first client in list of clients with # pending requests. client = self.__clients_with_requests.pop(0) frozen = client.get_request() if client.has_requests(): # Client has more pending requests, so put it back at the end of # the list of clients with pending requests. self.__clients_with_requests.append(client) if frozen in self.__started_requests: # Another client has requested the same thing, piggy-back on # that job instead of starting another. self.__started_requests[frozen].clients.append(client) continue request = thaw(frozen) # Check if this request is already finished. Default implementation # of this callback always returns None. result = self.request_result(request) if result: # Request is already finished; don't bother starting a child # process, just report result directly to the client. client.add_result(result) else: # Start child process. job = JSONJobServer.Job(self, client, request) self.add_peer(job) self.request_started(job, request) def add_requests(self, client): assert client.has_requests() self.__clients_with_requests.append(client) self.__startJobs() def execute_command(self, client, command): client.write(json_encode({ "status": "error", "error": "command not supported" })) client.close() def handle_peer(self, peersocket, peeraddress): return JSONJobServer.JobClient(self, peersocket) def peer_destroyed(self, peer): if isinstance(peer, JSONJobServer.Job): self.__startJobs() def request_result(self, request): pass def request_started(self, job, request): self.__started_requests[freeze(request)] = job def request_finished(self, job, request, result): del self.__started_requests[freeze(request)] def call(context, fn, *args, **kwargs): if configuration.debug.COVERAGE_DIR: import coverage result = coverage.call(context, fn, *args, **kwargs) else: result = fn(*args, **kwargs) sys.exit(result) ================================================ FILE: src/background/wait-for-pidfiles.py ================================================ # This script is no longer used, but is kept around in case an old version of # the SysV init script is used, that still calls this script. ================================================ FILE: src/background/watchdog.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time import signal import errno import multiprocessing sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import configuration import background.utils import mailutils def getRSS(pid): for line in open("/proc/%d/status" % pid): words = line.split() if words[0] == "VmRSS:": if words[2].lower() == "kb": unit = 1024 elif words[2].lower() == "mb": unit = 1024 ** 2 elif words[2].lower() == "gb": unit = 1024 ** 3 else: raise Exception("unknown unit: %s" % words[2]) return int(words[1]) * unit else: raise Exception("invalid pid") class Watchdog(background.utils.BackgroundProcess): def __init__(self): service = configuration.services.WATCHDOG super(Watchdog, self).__init__(service=service) cpu_count = multiprocessing.cpu_count() self.load1_limit = service.get("load1_limit", 0) * cpu_count self.load5_limit = service.get("load5_limit", 0) * cpu_count self.load15_limit = service.get("load15_limit", 0) * cpu_count def run(self): soft_restart_attempted = set() previous = {} getloadavg_failed = False load1_has_warned = 0 load1_last_time = 0 load5_has_warned = 0 load5_last_time = 0 load15_has_warned = 0 load15_last_time = 0 while not self.terminated: self.interrupted = False def sendLoadAverageWarning(interval, limit, load): cpu_count = multiprocessing.cpu_count() mailutils.sendAdministratorMessage( "watchdog", "%d-minute load average" % interval, ("The current %d-minute load average is %.2f!\n" % (interval, load)) + ("The configured limit is %.2f (%.2f x %d CPUs).\n" % (limit, limit / cpu_count, cpu_count)) + "\n-- critic\n") try: load1, load5, load15 = os.getloadavg() self.debug("load average: %r, %r, %r" % (load1, load5, load15)) except OSError: load1, load5, load15 = 0, 0, 0 if not getloadavg_failed: self.exception("failed to detect system load average") getloadavg_failed = True now = time.time() if self.load1_limit and load1 > self.load1_limit: if load1 > load1_has_warned * 1.2 or now - load1_last_time > 60: sendLoadAverageWarning(1, self.load1_limit, load1) load1_has_warned = load1 load1_last_time = now else: load1_has_warned = 0 load1_last_time = 0 if self.load5_limit and load5 > self.load5_limit: if load5 > load5_has_warned * 1.2 or now - load5_last_time > 5 * 60: sendLoadAverageWarning(5, self.load5_limit, load5) load5_has_warned = load5 load5_last_time = now else: load5_has_warned = 0 load5_last_time = 0 if self.load15_limit and load15 > self.load15_limit: if load15 > load15_has_warned * 1.2 or now - load15_last_time > 15 * 60: sendLoadAverageWarning(15, self.load15_limit, load15) load15_has_warned = load15 load15_last_time = now else: load15_has_warned = 0 load15_last_time = 0 pidfile_dir = configuration.paths.WSGI_PIDFILE_DIR if os.path.isdir(pidfile_dir): pids = set(map(int, os.listdir(pidfile_dir))) else: pids = set() for pid in pids: try: rss = getRSS(pid) except IOError as error: if error.errno == errno.ENOENT: self.warning("unlinking stale pid-file: %s" % os.path.join(pidfile_dir, str(pid))) os.unlink(os.path.join(pidfile_dir, str(pid))) continue else: raise if previous.get(pid) != rss: self.debug("pid=%d, rss=%d bytes" % (pid, rss)) previous[pid] = rss if rss > configuration.services.WATCHDOG["rss_hard_limit"]: mailutils.sendAdministratorMessage( "watchdog", "pid(%d): hard memory limit exceeded" % pid, ("Current RSS: %d kilobytes\nSending process SIGKILL (%d).\n\n-- critic" % (rss, signal.SIGKILL))) self.info("Killing pid(%d): hard memory limit exceeded, RSS: %d kilobytes" % (pid, rss)) os.kill(pid, signal.SIGKILL) elif rss > configuration.services.WATCHDOG["rss_soft_limit"] and pid not in soft_restart_attempted: mailutils.sendAdministratorMessage( "watchdog", "pid(%d): soft memory limit exceeded" % pid, ("Current RSS: %d kilobytes\nSending process SIGINT (%d).\n\n" % (rss, signal.SIGINT))) self.info("Killing pid(%d): soft memory limit exceeded, RSS: %d kilobytes" % (pid, rss)) os.kill(pid, signal.SIGINT) soft_restart_attempted.add(pid) for pid in previous.keys(): if pid not in pids: del previous[pid] soft_restart_attempted = soft_restart_attempted & pids self.signal_idle_state() time.sleep(10) def start_service(): watchdog = Watchdog() return watchdog.start() background.utils.call("watchdog", start_service) ================================================ FILE: src/base.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class Error(Exception): pass class ImplementationError(Error): pass ================================================ FILE: src/base_unittest.py ================================================ def independence(): # Simply check that base can be imported. import base print "independence: ok" ================================================ FILE: src/changeset/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: src/changeset/client.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import socket import base import configuration from textutils import json_encode, json_decode, indent class ChangesetBackgroundServiceError(base.ImplementationError): def __init__(self, message): super(ChangesetBackgroundServiceError, self).__init__( "Changeset background service failed: %s" % message) def requestChangesets(requests, async=False): try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.CHANGESET["address"]) connection.send(json_encode({ "requests": requests, "async": async })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received connection.close() except EnvironmentError as error: raise ChangesetBackgroundServiceError(str(error)) if async: return if not data: raise ChangesetBackgroundServiceError( "returned an invalid response: no response") try: results = json_decode(data) except ValueError: raise ChangesetBackgroundServiceError( "returned an invalid response: %r" % data) if type(results) != list: # If not a list, the result is probably an error message. raise ChangesetBackgroundServiceError(str(results)) if len(results) != len(requests): raise ChangesetBackgroundServiceError("didn't process all requests") errors = [] for result in results: if "error" in result: errors.append(result["error"]) if errors: raise ChangesetBackgroundServiceError( "one or more requests failed:\n%s" % "\n".join(map(indent, errors))) ================================================ FILE: src/changeset/create.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import diff.merge import diff.parse def createChangeset(db, request): repository_name = request["repository_name"] changeset_type = request["changeset_type"] repository = gitutils.Repository.fromName(db, repository_name) def insertChangeset(db, parent, child, files): while True: # Inserting new files will often clash when creating multiple # related changesets in parallel. It's a simple operation, so if it # fails with an integrity error, just try again until it doesn't # fail. (It will typically succeed the second time because then the # new files already exist, and it doesn't need to insert anything.) try: dbutils.find_files(db, files) db.commit() break except dbutils.IntegrityError: db.rollback() cursor = db.cursor() cursor.execute("INSERT INTO changesets (type, parent, child) VALUES (%s, %s, %s) RETURNING id", (changeset_type, parent.getId(db) if parent else None, child.getId(db))) changeset_id = cursor.fetchone()[0] fileversions_values = [] chunks_values = [] file_ids = set() for file in files: if file.id in file_ids: raise Exception("duplicate:%d:%s" % (file.id, file.path)) file_ids.add(file.id) fileversions_values.append((changeset_id, file.id, file.old_sha1, file.new_sha1, file.old_mode, file.new_mode)) for index, chunk in enumerate(file.chunks): chunk.analyze(file, index == len(file.chunks) - 1) chunks_values.append((changeset_id, file.id, chunk.delete_offset, chunk.delete_count, chunk.insert_offset, chunk.insert_count, chunk.analysis, 1 if chunk.is_whitespace else 0)) file.clean() if fileversions_values: cursor.executemany("""INSERT INTO fileversions (changeset, file, old_sha1, new_sha1, old_mode, new_mode) VALUES (%s, %s, %s, %s, %s, %s)""", fileversions_values) if chunks_values: cursor.executemany("""INSERT INTO chunks (changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""", chunks_values) return changeset_id changeset_ids = request["changeset_ids"] = {} child = gitutils.Commit.fromSHA1(db, repository, request["child_sha1"]) cursor = db.cursor() if "parent_sha1" in request: assert changeset_type in ("custom", "conflicts") parent_sha1 = request["parent_sha1"] if parent_sha1 == "0" * 40: parent = parent_id = None else: parent = gitutils.Commit.fromSHA1(db, repository, parent_sha1) parent_id = parent.getId(db) cursor.execute("""SELECT id, %s FROM changesets WHERE type=%s AND parent=%s AND child=%s""", (parent_sha1, changeset_type, parent_id, child.getId(db))) else: assert changeset_type in ("direct", "merge") cursor.execute("""SELECT changesets.id, commits.sha1 FROM changesets LEFT OUTER JOIN commits ON (commits.id=changesets.parent) WHERE type=%s AND child=%s""", (changeset_type, child.getId(db))) rows = cursor.fetchall() if rows: # Changeset(s) already exists in database. for changeset_id, parent_sha1 in rows: changeset_ids[parent_sha1] = changeset_id else: # Parse diff and insert changeset(s) into the database. if changeset_type == "merge": changes = diff.merge.parseMergeDifferences(db, repository, child) elif changeset_type == "direct": changes = diff.parse.parseDifferences(repository, commit=child) else: changes = diff.parse.parseDifferences(repository, from_commit=parent, to_commit=child) for parent_sha1, files in changes.items(): if parent_sha1 is None: parent = None else: parent = gitutils.Commit.fromSHA1(db, repository, parent_sha1) changeset_ids[parent_sha1] = insertChangeset(db, parent, child, files) db.commit() ================================================ FILE: src/changeset/detectmoves.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import difflib import re import diff import diff.analyze re_ws = re.compile("\\s+") SMALLEST_INSERT = 5 MAXIMUM_GAP = 10 class Line: def __init__(self, string): self.string = string self.wsnorm = re_ws.sub(" ", string.strip()) def __str__(self): return self.string def __eq__(self, other): return self.wsnorm == other.wsnorm def __ne__(self, other): return self.wsnorm == other.wsnorm def __hash__(self): return hash(self.wsnorm) def compareChunks(source_file, source_chunk, target_file, target_chunk, extra_target_chunks, context_lines=3): source_length = source_file.oldCount() target_length = target_file.newCount() source_lines = map(Line, source_chunk.deleted_lines) target_lines = map(Line, target_chunk.inserted_lines) sm = difflib.SequenceMatcher(None, source_lines, target_lines) blocks = filter(lambda x: x[2], sm.get_matching_blocks()) if blocks: chunks = [] i, j, n = blocks.pop(0) current = [(i, j, n)] matched = n pi = i + n pj = j + n for i, j, n in blocks: if i - pi > MAXIMUM_GAP or j - pj > MAXIMUM_GAP: chunks.append((matched, current)) current = [(i, j, n)] matched = n else: current.append((i, j, n)) matched += n pi = i + n pj = j + n chunks.append((matched, current)) chunks.sort() matched, blocks = chunks[-1] if matched < SMALLEST_INSERT: return None source_begin = max(-(source_chunk.delete_offset - 1), blocks[0][0] - context_lines) source_end = min(source_length + 1 - source_chunk.delete_offset, blocks[-1][0] + blocks[-1][2] + context_lines) target_begin = max(-(target_chunk.insert_offset - 1), blocks[0][1] - context_lines) target_end = min(target_length + 1 - target_chunk.insert_offset, blocks[-1][1] + blocks[-1][2] + context_lines) new_chunk = diff.Chunk(source_chunk.delete_offset + source_begin, source_end - source_begin, target_chunk.insert_offset + target_begin, target_end - target_begin) new_chunk.source_chunk = source_chunk new_chunk.source_begin = source_begin new_chunk.source_end = source_end new_chunk.source_length = source_length if blocks[0][1] >= SMALLEST_INSERT and blocks[0][1] < target_chunk.insert_count: extra_before = diff.Chunk(0, 0, target_chunk.insert_offset, blocks[0][1]) else: extra_before = None match_end = blocks[-1][1] + blocks[-1][2] if target_chunk.insert_count - match_end >= SMALLEST_INSERT: extra_after = diff.Chunk(0, 0, target_chunk.insert_offset + match_end, target_chunk.insert_count - match_end) else: extra_after = None new_chunk.deleted_lines = source_file.getOldLines(new_chunk) new_chunk.inserted_lines = target_file.getNewLines(new_chunk) if matched > len(new_chunk.inserted_lines) * 0.25: analysis = diff.analyze.analyzeChunk(new_chunk.deleted_lines, new_chunk.inserted_lines, moved=True) if matched > len(new_chunk.inserted_lines) * 0.5 or (analysis and len(analysis.split(';')) >= len(new_chunk.inserted_lines) * 0.5): new_chunk.analysis = analysis if extra_before: extra_target_chunks.append(extra_before) if extra_after: extra_target_chunks.append(extra_after) return new_chunk return None def findSourceChunk(db, changeset, source_file_ids, target_file, target_chunk, extra_target_chunks): for source_file in changeset.files: if source_file_ids and not source_file.id in source_file_ids: continue if source_file.chunks is None: continue for source_chunk in source_file.chunks: # Shouldn't compare chunk to itself, of course. if target_file == source_file and target_chunk == source_chunk: continue # Much fewer deleted lines than inserted lines in the target chunk; # unlikely to be a relevant source chunk. #if source_chunk.delete_count * 1.5 < target_chunk.insert_count: # continue if source_chunk.analysis: # If more than half the deleted lines are mapped against # inserted lines, most likely edited rather than moved code. if source_chunk.delete_count < len(source_chunk.analysis.split(";")) * 2: continue source_file.loadOldLines() source_chunk.deleted_lines = source_file.getOldLines(source_chunk) new_chunk = compareChunks(source_file, source_chunk, target_file, target_chunk, extra_target_chunks) if new_chunk: return source_file, new_chunk return None, None def detectMoves(db, changeset, source_file_ids=None, target_file_ids=None): moves = [] for target_file in changeset.files: if target_file_ids and not target_file.id in target_file_ids: continue current_chunks = target_file.chunks count = 0 while current_chunks: extra_target_chunks = [] count += 1 for target_chunk in current_chunks: # White-space only changes; unlikely target of moved code. if target_chunk.is_whitespace: continue # Too few inserted lines; couldn't possibly be an interesting target # of moved code. if target_chunk.insert_count < 5: continue if target_chunk.analysis: # If more than half the inserted lines are mapped against # deleted lines, most likely edited rather than moved code. if target_chunk.insert_count < len(target_chunk.analysis.split(";")) * 2: continue target_file.loadNewLines() target_chunk.inserted_lines = target_file.getNewLines(target_chunk) source_file, chunk = findSourceChunk(db, changeset, source_file_ids, target_file, target_chunk, extra_target_chunks) if source_file and chunk: moves.append((source_file, target_file, chunk)) continue current_chunks = extra_target_chunks if moves: def orderChunks(a, b): a_source_file, a_target_file, a_chunk = a b_source_file, b_target_file, b_chunk = b c = cmp(a_target_file.path, b_target_file.path) if c != 0: return c else: return cmp(a_chunk.insert_offset, b_chunk.insert_offset) moves.sort(orderChunks) move_changeset = diff.Changeset(None, changeset.parent, changeset.child, 'moves', []) for source_file, target_file, chunk in moves: move_file = diff.File(0, "", source_file.old_sha1, target_file.new_sha1, source_file.repository, chunks=[chunk], move_source_file=source_file, move_target_file=target_file) move_changeset.files.append(move_file) return move_changeset else: return None ================================================ FILE: src/changeset/html.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import itertools import urllib from time import strftime from bisect import bisect_right import textutils import dbutils import diff import diff.context import changeset.utils as changeset_utils import reviewing.comment as review_comment import htmlutils import configuration from htmlutils import jsify, Generator, Text, HTML, stripStylesheet re_tag = re.compile("<([bi]) class='?([a-z]+)'?>") re_tailws = re.compile("^(.*?)(\s+)((?:<[^>]+>)*)$") class CodeContexts: class Context: def __init__(self, first_line, last_line, description): self.first_line = first_line self.last_line = last_line self.description = description def __cmp__(self, index): return cmp(self.first_line, index) def __init__(self, db, sha1, first_line, last_line): cursor = db.cursor() cursor.execute("""SELECT first_line, last_line, context FROM codecontexts WHERE sha1=%s AND first_line<=%s AND last_line>=%s ORDER BY first_line ASC, last_line DESC""", (sha1, last_line, first_line)) self.contexts = [CodeContexts.Context(first_line, last_line, description) for first_line, last_line, description in cursor] def find(self, linenr): index = bisect_right(self.contexts, linenr) if index: context = self.contexts[index - 1] if context.last_line >= linenr: return context.description return None def expandHTML(db, file, old_offset, new_offset, lines, target): if old_offset == 1: where = 'top' elif old_offset + lines - 1 == file.oldCount(): where = 'bottom' else: where = 'middle' select = target.select(onchange=('expand(this, %d, %r, %r, %r, %d, %d, %d);' % (file.id, file.path, file.new_sha1, where, old_offset, new_offset, lines))) select.option(value='none').text("%s lines not shown" % lines) if where == 'middle': actualLines = lines else: actualLines = lines * 2 if actualLines > 20: select.option(value=10).text("Show 10 more") if actualLines > 50: select.option(value=25).text("Show 25 more") if actualLines > 100: select.option(value=50).text("Show 50 more") select.option(value=lines).text("All") def generateDataScript(db, user, changeset, review, file_id_format, compact, parent_index): data = { 'parent_id': changeset.parent.id if changeset.parent else None, 'parent_sha1': htmlutils.jsify(changeset.parent.sha1) if changeset.parent else None, 'child_id': changeset.child.id, 'child_sha1': htmlutils.jsify(changeset.child.sha1), 'changeset_id': jsify(changeset.id), 'commit_ids': ", ".join([str(commit.getId(db)) for commit in reversed(changeset.commits(db))]), 'parent_index': parent_index } if parent_index is None: commits = changeset.commits(db) if review and commits: if len(commits) > 1: cursor = db.cursor() cursor.execute("SELECT id FROM changesets JOIN reviewchangesets ON (changeset=id) WHERE review=%s AND child=ANY (%s)", (review.id, [commit.getId(db) for commit in commits])) changeset_ids = [changeset_id for (changeset_id,) in cursor] else: changeset_ids = [changeset.id] else: changeset_ids = None if changeset_ids is None: changeset_ids = "null" else: changeset_ids = "[%s]" % ", ".join(map(str, changeset_ids)) data["changeset_ids"] = changeset_ids if changeset.parent: data_script = """ var changeset = { parent: { id: %(parent_id)d, sha1: %(parent_sha1)s }, child: { id: %(child_id)d, sha1: %(child_sha1)s }, id: %(changeset_id)s, ids: %(changeset_ids)s, commits: [ %(commit_ids)s ] }; var useFiles = files; """ % data else: data_script = """ var changeset = { parent: null, child: { id: %(child_id)d, sha1: %(child_sha1)s }, id: %(changeset_id)s, ids: %(changeset_ids)s, commits: [ %(commit_ids)s ] }; var useFiles = files; """ % data else: data_script = """ var changeset; var parent_index = %(parent_index)d; if (!changeset) changeset = { commits: [ %(child_id)d ] }; if (!changeset[parent_index]) changeset[parent_index] = {}; if (!changeset.child) changeset.child = { id: %(child_id)d, sha1: %(child_sha1)s }; changeset[parent_index].parent = { id: %(parent_id)d, sha1: %(parent_sha1)s }; changeset[parent_index].child = { id: %(child_id)d, sha1: %(child_sha1)s }; changeset[parent_index].id = %(changeset_id)s; var useFiles = files[parent_index] = {}; """ % data parent_index_property = "parent: %d, " % parent_index if parent_index is not None else "" all_files = set() for file in changeset.files: if file.move_source_file and file.move_target_file: all_files.add(file.move_source_file) all_files.add(file.move_target_file) elif file.hasChanges(): all_files.add(file) for file in all_files: data_script += """ useFiles[%d] = { old_sha1: %r, %snew_sha1: %r, %spath: %s }; """ % (file.id, file.old_sha1, " " * len(str(file.id)), file.new_sha1, " " * len(str(file.id)), jsify(file.path)) if file.old_sha1 != "0" * 40 and file.new_sha1 != "0" * 40: data_script += """files[%r] = { %sid: %d, side: 'o' }; """ % (file.old_sha1, parent_index_property, file.id) data_script += """files[%r] = { id: %d, side: 'n' }; """ % (file.new_sha1, file.id) elif file.new_sha1 != "0" * 40: data_script += """files[%r] = { id: %d, side: 'n' }; """ % (file.new_sha1, file.id) else: data_script += """files[%r] = { %sid: %d, side: 'o' }; """ % (file.old_sha1, parent_index_property, file.id) if review: data_script += """ %s var commentChains; """ % review.getJS() reviewable_files = ", ".join([("%d: %r" % (file_id, state)) for file_id, (is_reviewer, state, reviewers) in changeset.getReviewFiles(db, user, review).items() if is_reviewer]) if parent_index is None: data_script += """ changeset.reviewableFiles = { %s }; """ % reviewable_files else: data_script += """ changeset[parent_index].reviewableFiles = { %s }; """ % reviewable_files if compact: return re.sub(r"\B\s+\B|\b\s+\B|\B\s+\b", "", data_script).strip() else: return data_script.strip() def render(db, target, user, repository, changeset, review=None, review_mode=None, context_lines=3, style="horizontal", wrap=True, options={}, parent_index=None): addResources(db, user, repository, review, options.get("compact", False), options.get("tabify", False), target) compact = options.get("compact", False) cursor = db.cursor() if options.get("merge"): local_comments_only = True else: local_comments_only = False target.script(type='text/javascript').text(generateDataScript(db, user, changeset, review, options.get("file_id_format", "f%d"), compact, parent_index), cdata=True) target.addInternalStylesheet(""" table.file > tbody.lines > tr > td.line { white-space: pre%s !important }""" % (wrap and "-wrap" or "")) comment_chains_per_file = {} if review: comment_chain_script = "" for file in changeset.files: if file.hasChanges() and not file.wasRemoved(): comment_chains = review_comment.loadCommentChains(db, review, user, file=file, changeset=changeset, local_comments_only=local_comments_only) if comment_chains: comment_chains_per_file[file.path] = comment_chains for chain in comment_chains: if file.new_sha1 in chain.lines_by_sha1: sha1 = file.new_sha1 else: sha1 = file.old_sha1 comment_chain_script += "commentChains.push(%s);\n" % chain.getJSConstructor(sha1) if comment_chain_script: target.script(type='text/javascript').text(comment_chain_script, cdata=True) def join(a, b): if a and b: return itertools.chain(a, b) elif a: return a elif b: return b else: return [] local_options = { "style": style, "count_chunks": True } for name, value in options.items(): local_options[name] = value if local_options.get("expand"): limit = user.getPreference(db, "commit.expandFilesLimit") if limit != 0 and limit < len(changeset.files): del local_options["expand"] for index, file in enumerate(changeset.files): if file.hasChanges(): if not file.wasRemoved() and not file.isBinaryChanges(): file.loadOldLines(True, request_highlight=True) file.loadNewLines(True, request_highlight=True) if not file.isEmptyChanges(): lines = diff.context.ContextLines( file, file.chunks, [(chain, False) for chain in comment_chains_per_file.get(file.path, [])], merge=options.get("merge", False), conflicts=changeset.conflicts) file.macro_chunks = lines.getMacroChunks(context_lines, highlight=True) else: file.macro_chunks = [] else: file.macro_chunks = [] renderFile(db, target, user, review, file, first_file=index == 0, options=local_options, conflicts=changeset.conflicts, add_resources=False) file.clean() yield target def renderFile(db, target, user, review, file, first_file=False, options={}, conflicts=False, add_resources=True): if add_resources: addResources(db, user, file.repository, review, options.get("compact", False), options.get("tabify"), target) if options.get("count_chunks"): deleted = 0 inserted = 0 if file.wasRemoved(): file.loadOldLines(False) deleted = file.oldCount() else: for macro_chunk in file.macro_chunks: for chunk in macro_chunk.chunks: deleted += chunk.delete_count inserted += chunk.insert_count chunksText = "-%d/+%d lines" % (deleted, inserted) else: chunksText = "" compact = options.get("compact", False) file_table_class = "file sourcefont" compact = options.get("compact", False) if options.get("show"): file_table_class += " show" if options.get("expand"): file_table_class += " expanded" compact = False if first_file: file_table_class += " first" file_id = "f%d" % file.id customFileId = options.get("file_id") if customFileId: file_id = customFileId(file_id) if options.get("tabify"): target.script(type="text/javascript").text("calculateTabWidth();") table = target.table(file_table_class, width='100%', cellspacing=0, cellpadding=0, id=file_id, critic_file_id=file.id, critic_parent_index=options.get("parent_index")) if not compact: columns = table.colgroup() columns.col('edge') columns.col('linenr') columns.col('line') columns.col('middle') columns.col('middle') columns.col('line') columns.col('linenr') columns.col('edge') row = table.thead().tr() header_left = options.get("header_left") header_right = options.get("header_right") def make_url(url_path, path): params = { "sha1": commit.sha1, "path": path } if review is None: params["repository"] = str(file.repository.id) else: params["review"] = str(review.id) return "/%s?%s" % (url_path, urllib.urlencode(params)) if header_left: header_left(db, row.td('left', colspan=4, align='left'), file) else: cell = row.td('left', colspan=4, align='left') commit = options.get("commit") if commit: cell.a("showtree root", href=make_url("showtree", "/")).text("root") cell.span("slash").text('/') components = file.path.split("/") for index, component in enumerate(components[:-1]): cell.a("showtree", href=make_url("showtree", "/".join(components[:index + 1]))).text(component, escape=True) cell.span("slash").text('/') if not file.wasRemoved(): cell.a("showtree", href=make_url("showfile", "/".join(components))).text(components[-1], escape=True) else: cell.text(components[-1], escape=True) else: cell.text(file.path) if not compact: cell.comment("sha1: %s to %s" % (file.old_sha1, file.new_sha1)) if header_right: header_right(db, row.td('right', colspan=4, align='right'), file) else: row.td('right', colspan=4, align='right').text(chunksText) next_old_offset = 1 next_new_offset = 1 display_type = options.get("display_type", "both") deleted_file = False added_file = False if not file.isBinaryChanges() and not file.isEmptyChanges(): if file.old_sha1 == 40 * '0': display_type = "new" if file.getLanguage() is None: limit = configuration.limits.MAXIMUM_ADDED_LINES_UNRECOGNIZED else: limit = configuration.limits.MAXIMUM_ADDED_LINES_RECOGNIZED count = file.newCount() if count > limit and len(file.macro_chunks) == 1 and len(file.macro_chunks[0].lines) == count: added_file = True elif file.new_sha1 == 40 * '0': display_type = "old" deleted_file = not options.get("include_deleted", False) def baseFileId(file): if file.move_source_file and file.move_target_file: return "fm%d_%d" % (file.move_source_file.id, file.move_target_file.id) else: return "f%d" % file.id def baseLineId(file, line, index): file_id = fileId(file) if line.type == diff.Line.DELETED: return "%so%dn0" % (file_id, line.old_offset) elif line.type == diff.Line.INSERTED: return "%so0n%d" % (file_id, line.new_offset) else: return "%so%dn%d" % (file_id, line.old_offset, line.new_offset) def baseLineCellId(file, version, line): if file.move_source_file and file.move_target_file: if version == "o": file_id = file.move_source_file.id else: file_id = file.move_target_file.id else: file_id = file.id if line: return "f%d%s%d" % (file_id, version, line) else: return None fileId = baseFileId customLineId = options.get("line_id") if customLineId: lineId = lambda file, line, index: customLineId(baseLineId(file, line, index)) else: lineId = baseLineId customLineCellId = options.get("line_cell_id") if customLineCellId: lineCellId = lambda file, version, line: customLineCellId(baseLineCellId(file, version, line)) else: lineCellId = baseLineCellId def lineType(line, index): type = line.type if type == diff.Line.DELETED: return "deleted" elif type == diff.Line.INSERTED: return "inserted" elif type == diff.Line.MODIFIED: return "modified whitespace" if line.is_whitespace else "modified" elif type == diff.Line.REPLACED: return "replaced" else: return "context" support_expand = options.get("support_expand", True) style = options.get("style", "horizontal") collapse_simple_hunks = user.getPreference(db, 'commit.diff.collapseSimpleHunks') content_before = options.get("content_before") if content_before: content = table.tbody('content') row = content.tr('content') row.td(colspan=2).text() content_before(db, row.td(colspan=4)) row.td(colspan=2).text() if added_file or deleted_file: table.tbody('spacer').tr('spacer').td(colspan=8).text() verb = "added" if added_file else "deleted" side = "new" if added_file else "old" if added_file: count = file.newCount() else: count = file.oldCount() tbody = table.tbody('deleted') row = tbody.tr('deleted') row.td(colspan=2).text() row.td(colspan=4).h2().text("File was %s." % verb) row.td(colspan=2).text() if not file.isEmptyChanges(): row = tbody.tr('deleted') row.td(colspan=2).text() parent_index = options.get("parent_index", -1) if parent_index != -1: fileset = "files[%d]" % parent_index else: fileset = "files" row.td(colspan=4).button(onclick="fetchFile(%s, %d, '%s', event.currentTarget.parentNode.parentNode.parentNode);" % (fileset, file.id, side)).text("Fetch %d %s Lines" % (count, verb.capitalize())) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan=8).text() elif file.isBinaryChanges() or file.isEmptyChanges(): table.tbody('spacer').tr('spacer').td(colspan=8).text() if file.isBinaryChanges(): title = "Binary" class_name = "binary" else: title = "Empty" class_name = "empty" tbody = table.tbody(class_name) if file.wasAdded(): title += " file added." elif file.wasRemoved(): title += " file removed." else: title += " file modified." row = tbody.tr(class_name) row.td(colspan=2).text() row.td(colspan=4).h2().text(title) row.td(colspan=2).text() if file.isBinaryChanges(): row = tbody.tr('download') row.td(colspan=2).text() cell = row.td(colspan=4) def linkToFile(target, file, sha1): is_image = False base, _, extension = file.path.rpartition(".") if base and configuration.mimetypes.MIMETYPES.get(extension, "").startswith("image/"): is_image = True url = "/download/%s?sha1=%s&repository=%d" % (file.path, sha1, file.repository.id) link = target.a(href=url) if is_image: link.img(src=url) else: link.text(sha1) if file.wasAdded(): linkToFile(cell, file, file.new_sha1) elif file.wasRemoved(): linkToFile(cell, file, file.old_sha1) else: linkToFile(cell, file, file.old_sha1) cell.innerHTML(" → ") linkToFile(cell, file, file.new_sha1) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan=8).text() else: if options.get("tabify"): tabwidth = file.getTabWidth() indenttabsmode = file.getIndentTabsMode() tabify = lambda line: htmlutils.tabify(line, tabwidth, indenttabsmode) else: tabify = lambda line: line code_contexts = CodeContexts(db, file.new_sha1, file.macro_chunks[0].lines[0].new_offset, file.macro_chunks[-1].lines[-1].new_offset) blocks = [("[%d,%d]" % (macro_chunk.lines[0].new_offset, macro_chunk.lines[-1].new_offset)) for macro_chunk in file.macro_chunks] target.script(type="text/javascript").text("blocks[%d] = [%s];" % (file.id, ",".join(blocks))) for index, macro_chunk in enumerate(file.macro_chunks): first_line = macro_chunk.lines[0] last_line = macro_chunk.lines[-1] spacer = table.tbody('spacer') if support_expand and next_old_offset < first_line.old_offset and next_new_offset < first_line.new_offset: row = spacer.tr('expand').td(colspan='8') expandHTML(db, file, next_old_offset, next_new_offset, first_line.old_offset - next_old_offset, row) code_context = code_contexts.find(first_line.new_offset) if code_context: spacer.tr('context').td(colspan='8').text(code_context) spacer.tr('spacer').td(colspan='8').text() lines = table.tbody('lines') local_display_type = display_type for line in macro_chunk.lines: if line.type != diff.Line.INSERTED: match = re_tailws.match(line.old_value) if match: line.old_value = match.group(1) + "" + match.group(2) + "" + match.group(3) if line.type != diff.Line.DELETED: match = re_tailws.match(line.new_value) if match: line.new_value = match.group(1) + "" + match.group(2) + "" + match.group(3) if line.old_value: line.old_value = line.old_value.replace("\r", "") if line.old_offset == 1: line.old_value = line.old_value.replace( "", "", 1) if line.new_value: line.new_value = line.new_value.replace("\r", "") if line.new_offset == 1: line.new_value = line.new_value.replace( "", "", 1) if collapse_simple_hunks: if local_display_type == "both": deleted = False inserted = False for line in macro_chunk.lines: if line.type == diff.Line.MODIFIED or line.type == diff.Line.REPLACED: break elif line.type == diff.Line.DELETED: if inserted: break deleted = True elif line.type == diff.Line.INSERTED: if deleted: break inserted = True else: if deleted: local_display_type = "old" if inserted: local_display_type = "new" if compact: def packSyntaxHighlighting(line): return re_tag.sub(lambda m: "<%s%s>" % (m.group(1), m.group(2)), line) items = [] for line in macro_chunk.lines: if line.type == diff.Line.MODIFIED and line.is_whitespace: line_type = diff.Line.WHITESPACE elif conflicts and line.type == diff.Line.DELETED and line.isConflictMarker(): line_type = diff.Line.CONFLICT else: line_type = line.type data = [str(line_type)] if line.type != diff.Line.INSERTED: data.append(jsify(packSyntaxHighlighting(tabify(line.old_value)), as_json=True)) if line.type != diff.Line.DELETED: data.append(jsify(packSyntaxHighlighting(tabify(line.new_value)), as_json=True)) items.append("[%s]" % ",".join(data)) data = "[%d,%d,%d,%d,%s]" % (file.id, 2 if local_display_type == "both" else 1, macro_chunk.lines[0].old_offset, macro_chunk.lines[0].new_offset, "[%s]" % ",".join(items)) lines.comment(data.replace("--", "-\u002d")) elif style == "vertical" or local_display_type != "both": linesIterator = iter(macro_chunk.lines) line = linesIterator.next() def lineHTML(what, file, line, is_whitespace, target): line_class = what if is_whitespace and line.type == diff.Line.MODIFIED: line_class = "modified" if what == "deleted": linenr = line.old_offset else: linenr = line.new_offset row = target.tr("line single " + line_class, id=lineId(file, line, 0)) row.td("edge").text() row.td("linenr old").text(linenr) if what == "deleted" or local_display_type == "old": code = line.old_value lineClass = "old" else: code = line.new_value lineClass = "new" if not code: code = " " row.td('line single ' + lineClass, colspan=4, id=lineCellId(file, lineClass[0], linenr)).innerHTML(tabify(code)) row.td('linenr new').text(linenr) row.td("edge").text() try: while line: while line.type == diff.Line.CONTEXT: lineHTML("context", file, line, False, lines) line = linesIterator.next() deleted = [] inserted = [] while line.is_whitespace: lineHTML("modified", file, line, True, lines) line = linesIterator.next() previous_type = diff.Line.DELETED try: while line.type >= previous_type and not line.is_whitespace: if line.type != diff.Line.INSERTED: deleted.append(line) if line.type != diff.Line.DELETED: inserted.append(line) previous_type = line.type line = None line = linesIterator.next() except StopIteration: line = None for deletedLine in deleted: lineHTML("deleted", file, deletedLine, False, lines) for insertedLine in inserted: lineHTML("inserted", file, insertedLine, False, lines) except StopIteration: pass elif style == "horizontal": for line in macro_chunk.lines: old_offset = None new_offset = None old_line = None new_line = None if line.type != diff.Line.INSERTED: old_offset = line.old_offset old_line = tabify(line.old_value) if line.type != diff.Line.DELETED: new_offset = line.new_offset new_line = tabify(line.new_value) if not old_line: old_line = " " if old_line is None: old_offset = None if not new_line: new_line = " " if new_line is None: new_offset = None line_type = lineType(line, 0) if conflicts and line.isConflictMarker(): line_type += " conflict" row = ("" " " "%s" "%s" " " "%s" "%s" " " "\n") % (line_type, lineId(file, line, 0), str(old_offset) if old_offset else " ", " id='%s'" % lineCellId(file, "o", old_offset) if old_offset else "", old_line, " id='%s'" % lineCellId(file, "n", new_offset) if new_offset else "", new_line, str(new_offset) if new_offset else " ") lines.innerHTML(row) next_old_offset = last_line.old_offset + 1 next_new_offset = last_line.new_offset + 1 spacer = table.tbody('spacer') if support_expand and next_old_offset < file.oldCount() + 1 and next_new_offset < file.newCount() + 1: row = spacer.tr('expand').td(colspan='8') expandHTML(db, file, next_old_offset, next_new_offset, 1 + file.oldCount() - next_old_offset, row) spacer.tr('spacer').td(colspan='8').text() content_after = options.get("content_after") if content_after: content = table.tbody('content') row = content.tr('content') row.td(colspan=2).text() content_after(db, row.td(colspan=4), file=file) row.td(colspan=2).text() content.tr('spacer').td(colspan=8).text() row = table.tfoot().tr() cell = row.td('left', colspan=4) commit = options.get("commit") if commit: cell.a("showtree root", href=make_url("showtree", "/")).text("root") cell.span("slash").text('/') components = file.path.split("/") for index, component in enumerate(components[:-1]): cell.a("showtree", href=make_url("showtree", "/".join(components[:index + 1]))).text(component, escape=True) cell.span("slash").text('/') if not file.wasRemoved(): cell.a("showtree", href=make_url("showfile", "/".join(components))).text(components[-1], escape=True) else: cell.text(components[-1], escape=True) else: cell.text(file.path) row.td('right', colspan=4).text(chunksText) def addResources(db, user, repository, review, compact, tabify, target): target.addExternalStylesheet("resource/changeset.css") target.addExternalScript("resource/changeset.js") target.addInternalStylesheet(stripStylesheet(user.getResource(db, "syntax.css")[1], compact)) target.addInternalStylesheet(stripStylesheet(user.getResource(db, "diff.css")[1], compact)) if user.getPreference(db, "commit.diff.highlightIllegalWhitespace"): target.addInternalStylesheet(stripStylesheet(user.getResource(db, "whitespace.css")[1], compact)) ruler_column = user.getPreference(db, "commit.diff.rulerColumn", repository=repository) if ruler_column > 0: target.addExternalScript("resource/ruler.js") # Injected unconditionally (for tests). target.addInternalScript("var rulerColumn = %d;" % ruler_column) if review: target.addExternalStylesheet("resource/comment.css") target.addExternalStylesheet("resource/review.css") target.addExternalScript("resource/comment.js") target.addExternalScript("resource/review.js") if tabify: target.addExternalScript("resource/tabify.js") ================================================ FILE: src/changeset/load.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import diff import dbutils import gitutils def loadChangeset(db, repository, changeset_id, filtered_file_ids=None, load_chunks=True): return loadChangesets(db, repository, changesets=[diff.Changeset.fromId(db, repository, changeset_id)], filtered_file_ids=filtered_file_ids, load_chunks=load_chunks)[0] def loadChangesetsForCommits(db, repository, commits, filtered_file_ids=None, load_chunks=True): commit_ids = dict([(commit.getId(db), commit) for commit in commits]) def getCommit(commit_id): return commit_ids.get(commit_id) or gitutils.Commit.fromId(db, repository, commit_id) cursor = db.cursor() cursor.execute("SELECT id, parent, child FROM changesets WHERE child=ANY (%s) AND type='direct'", (commit_ids.keys(),)) changesets = [] for changeset_id, parent_id, child_id in cursor: changesets.append(diff.Changeset(changeset_id, getCommit(parent_id), getCommit(child_id), "direct")) return loadChangesets(db, repository, changesets, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) def loadChangesets(db, repository, changesets, filtered_file_ids=None, load_chunks=True): cursor = db.cursor() changeset_ids = [changeset.id for changeset in changesets] filtered_file_ids = list(filtered_file_ids) if filtered_file_ids else None if filtered_file_ids is None: cursor.execute("""SELECT changeset, file, path, old_sha1, new_sha1, old_mode, new_mode FROM fileversions JOIN files ON (files.id=fileversions.file) WHERE changeset=ANY (%s)""", (changeset_ids,)) else: cursor.execute("""SELECT changeset, file, path, old_sha1, new_sha1, old_mode, new_mode FROM fileversions JOIN files ON (files.id=fileversions.file) WHERE changeset=ANY (%s) AND file=ANY (%s)""", (changeset_ids, filtered_file_ids)) files = dict([(changeset.id, {}) for changeset in changesets]) for changeset_id, file_id, file_path, file_old_sha1, file_new_sha1, file_old_mode, file_new_mode in cursor.fetchall(): files[changeset_id][file_id] = diff.File(file_id, file_path, file_old_sha1, file_new_sha1, repository, old_mode=file_old_mode, new_mode=file_new_mode, chunks=[]) if load_chunks: if filtered_file_ids is None: cursor.execute("""SELECT id, changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace FROM chunks WHERE changeset=ANY (%s) ORDER BY file, deleteOffset ASC""", (changeset_ids,)) else: cursor.execute("""SELECT id, changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace FROM chunks WHERE changeset=ANY (%s) AND file=ANY (%s) ORDER BY file, deleteOffset ASC""", (changeset_ids, filtered_file_ids)) for chunk_id, changeset_id, file_id, delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace in cursor: files[changeset_id][file_id].chunks.append(diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, id=chunk_id, is_whitespace=is_whitespace, analysis=analysis)) for changeset in changesets: changeset.files = diff.File.sorted(files[changeset.id].values()) return changesets ================================================ FILE: src/changeset/process.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import gitutils import stat def joinPaths(dirname, basename): return "%s/%s" % (dirname, basename) if dirname else basename class ChangedPath: def __init__(self, path, oldEntry, newEntry): self.path = path self.oldEntry = oldEntry self.newEntry = newEntry def removedTree(repository, path, sha1): changedPaths = [] for entry in gitutils.Tree.fromSHA1(repository, sha1): changedPaths.extend( removedEntry(repository, path, entry)) return changedPaths def removedEntry(repository, path, entry): path = joinPaths(path, entry.name) changedPaths = [ChangedPath(path, entry, None)] if stat.S_ISDIR(entry.mode): changedPaths.extend( removedTree(repository, path, entry.sha1)) return changedPaths def addedTree(repository, path, sha1): changedPaths = [] for entry in gitutils.Tree.fromSHA1(repository, sha1): changedPaths.extend( addedEntry(repository, path, entry)) return changedPaths def addedEntry(repository, path, entry): path = joinPaths(path, entry.name) changedPaths = [ChangedPath(path, None, entry)] if stat.S_ISDIR(entry.mode): changedPaths.extend( addedTree(repository, path, entry.sha1)) return changedPaths def diffTrees(repository, path, oldTree, newTree): oldNames = set(oldTree.keys()) newNames = set(newTree.keys()) commonNames = oldNames & newNames removedNames = oldNames - commonNames addedNames = newNames - commonNames changedPaths = [] for name in removedNames: changedPaths.extend( removedEntry(repository, joinPaths(path, name), oldTree[name])) for name in addedNames: changedPaths.extend( addedEntry(repository, joinPaths(path, name), newTree[name])) for name in commonNames: oldEntry = oldTree[name] newEntry = newTree[name] if oldEntry.sha1 != newEntry.sha1 or oldEntry.mode != newEntry.mode: changedPath = joinPaths(path, name) changedPaths.append(ChangedPath(changedPath, oldEntry, newEntry)) commonMode = oldEntry.mode & newEntry.mode removedMode = oldEntry.mode - commonMode addedMode = newEntry.mode - commonMode if stat.S_ISDIR(removedMode): changedPaths.extend( removedTree(repository, changedPath, oldEntry.sha1)) elif stat.S_ISDIR(addedMode): changedPaths.extend( addedTree(repository, changedPath, newEntry.sha1)) elif stat.S_ISDIR(commonMode) and oldEntry.sha1 != newEntry.sha1: changedPaths.extend( diffTrees(repository, changedPath, gitutils.Tree.fromSHA1(repository, oldEntry.sha1), gitutils.Tree.fromSHA1(repository, newEntry.sha1))) return changedPaths def diffCommits(repository, commitA, commitB): return diffTrees(repository, None, gitutils.Tree.fromSHA1(repository, commitA.tree), gitutils.Tree.fromSHA1(repository, commitB.tree)) ================================================ FILE: src/changeset/text.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import diff import diff.context from changeset.utils import getCodeContext def unified(db, changeset, context_lines=3): result = "" for file in changeset.files: file.loadOldLines() file.loadNewLines() try: lines = diff.context.ContextLines(file, file.chunks) file.macro_chunks = lines.getMacroChunks(context_lines, highlight=False) oldPath = file.path if not file.wasAdded() else "dev/null" newPath = file.path if not file.wasRemoved() else "dev/null" result += "--- a/%s\n+++ b/%s\n" % (oldPath, newPath) if file.isBinaryChanges(): result += " Binary file.\n" continue for chunk in file.macro_chunks: deleteOffset = chunk.lines[0].old_offset deleteCount = len(filter(lambda line: line.type != diff.Line.INSERTED, chunk.lines)) insertOffset = chunk.lines[0].new_offset insertCount = len(filter(lambda line: line.type != diff.Line.DELETED, chunk.lines)) chunkHeader = "@@ -%d,%d +%d,%d @@" % (deleteOffset, deleteCount, insertOffset, insertCount) if db: codeContext = getCodeContext(db, file.new_sha1, insertOffset, minimized=True) else: codeContext = None if codeContext: chunkHeader += " %s" % codeContext[:80 - len(chunkHeader)] result += chunkHeader + "\n" lines = iter(chunk.lines) line = lines.next() try: while line: while line.type == diff.Line.CONTEXT: result += " %s\n" % line.new_value line = lines.next() deleted = [] inserted = [] try: while line.type != diff.Line.CONTEXT: if line.type != diff.Line.INSERTED: deleted.append(line) if line.type != diff.Line.DELETED: inserted.append(line) line = lines.next() except StopIteration: line = None for deletedLine in deleted: result += "- %s\n" % deletedLine.old_value for insertedLine in inserted: result += "+ %s\n" % insertedLine.new_value except StopIteration: pass finally: file.cleanLines() return result ================================================ FILE: src/changeset/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from subprocess import Popen as process, PIPE from sys import argv, stderr, exit import re from dbutils import find_file, describe_file import gitutils import syntaxhighlight import syntaxhighlight.request from diffutils import expandWithContext from htmlutils import htmlify, jsify from time import strftime import diff import diff.analyze import diff.parse import diff.merge import load import dbutils import client def createFullMergeChangeset(db, user, repository, commit, **kwargs): assert len(commit.parents) > 1 changesets = createChangeset(db, user, repository, commit, **kwargs) assert len(changesets) == len(commit.parents) replayed = createChangeset(db, user, repository, commit, conflicts=True, **kwargs) assert len(replayed) == 1 changesets.append(replayed[0]) return changesets def createChangesets(db, repository, commits): cursor = db.cursor() requests = [] for commit in commits: if len(commit.parents) > 1: changeset_type = 'merge' else: changeset_type = 'direct' cursor.execute("SELECT 1 FROM changesets WHERE child=%s AND type=%s", (commit.getId(db), changeset_type)) if not cursor.fetchone(): requests.append({ "repository_name": repository.name, "changeset_type": changeset_type, "child_sha1": commit.sha1 }) if requests: client.requestChangesets(requests) def createChangeset(db, user, repository, commit=None, from_commit=None, to_commit=None, rescan=False, reanalyze=False, conflicts=False, filtered_file_ids=None, review=None, do_highlight=True, load_chunks=True): cursor = db.cursor() if conflicts: if commit: assert len(commit.parents) > 1 cursor.execute("SELECT replay FROM mergereplays WHERE original=%s", (commit.getId(db),)) row = cursor.fetchone() if row: replay = gitutils.Commit.fromId(db, repository, row[0]) else: replay = repository.replaymerge(db, user, commit) if not replay: return None cursor.execute("INSERT INTO mergereplays (original, replay) VALUES (%s, %s)", (commit.getId(db), replay.getId(db))) from_commit = replay to_commit = commit parents = [replay] else: parents = [from_commit] commit = to_commit changeset_type = 'conflicts' elif commit: parents = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in commit.parents] or [None] changeset_type = 'merge' if len(parents) > 1 else 'direct' else: parents = [from_commit] commit = to_commit changeset_type = 'direct' if len(to_commit.parents) == 1 and from_commit == to_commit.parents[0] else 'custom' changesets = [] thin_diff = False changeset_ids = [] for parent in parents: if parent: cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) row = cursor.fetchone() if row: changeset_ids.append(row[0]) else: break assert len(changeset_ids) in (0, len(parents)) if changeset_ids: if rescan and user.hasRole(db, "developer"): cursor.executemany("DELETE FROM changesets WHERE id=%s", [(changeset_id,) for changeset_id in changeset_ids]) db.commit() changeset_ids = [] else: for changeset_id in changeset_ids: if changeset_type == 'custom': cursor.execute("UPDATE customchangesets SET time=NOW() WHERE changeset=%s", (changeset_id,)) changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts if reanalyze and user.hasRole(db, "developer"): analysis_values = [] for file in changeset.files: if not filtered_file_ids or file.id in filtered_file_ids: for index, chunk in enumerate(file.chunks): old_analysis = chunk.analysis chunk.analyze(file, index == len(file.chunks) - 1, True) if old_analysis != chunk.analysis: analysis_values.append((chunk.analysis, chunk.id)) if reanalyze == "commit" and analysis_values: cursor.executemany("UPDATE chunks SET analysis=%s WHERE id=%s", analysis_values) changesets.append(changeset) if not changesets: if len(parents) == 1 and from_commit and to_commit and filtered_file_ids: if from_commit.isAncestorOf(to_commit): iter_commit = to_commit while iter_commit != from_commit: if len(iter_commit.parents) > 1: thin_diff = True break iter_commit = gitutils.Commit.fromSHA1(db, repository, iter_commit.parents[0]) else: thin_diff = True if not thin_diff: if changeset_type == "direct": request = { "changeset_type": "direct", "child_sha1": commit.sha1 } elif changeset_type == "custom": request = { "changeset_type": "custom", "parent_sha1": from_commit.sha1 if from_commit else "0" * 40, "child_sha1": to_commit.sha1 } elif changeset_type == "merge": request = { "changeset_type": "merge", "child_sha1": commit.sha1 } else: request = { "changeset_type": "conflicts", "parent_sha1": from_commit.sha1, "child_sha1": to_commit.sha1 } request["repository_name"] = repository.name db.commit() client.requestChangesets([request]) db.commit() for parent in parents: if parent: cursor.execute("SELECT id FROM changesets WHERE parent=%s AND child=%s AND type=%s", (parent.getId(db), commit.getId(db), changeset_type)) else: cursor.execute("SELECT id FROM changesets WHERE parent IS NULL AND child=%s AND type=%s", (commit.getId(db), changeset_type)) changeset_id = cursor.fetchone()[0] changeset = load.loadChangeset(db, repository, changeset_id, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks) changeset.conflicts = conflicts changesets.append(changeset) else: changes = diff.parse.parseDifferences(repository, from_commit=from_commit, to_commit=to_commit, filter_paths=[describe_file(db, file_id) for file_id in filtered_file_ids])[from_commit.sha1] dbutils.find_files(db, changes) for file in changes: for index, chunk in enumerate(file.chunks): chunk.analyze(file, index == len(file.chunks) - 1) changeset = diff.Changeset(None, from_commit, to_commit, changeset_type) changeset.conflicts = conflicts changeset.files = diff.File.sorted(changes) changesets.append(changeset) if do_highlight: highlights = {} for changeset in changesets: for file in changeset.files: if file.canHighlight(): if file.old_sha1 and file.old_sha1 != '0' * 40: highlights[file.old_sha1] = (file.path, file.getLanguage()) if file.new_sha1 and file.new_sha1 != '0' * 40: highlights[file.new_sha1] = (file.path, file.getLanguage()) syntaxhighlight.request.requestHighlights(repository, highlights, "legacy") return changesets def getCodeContext(db, sha1, line, minimized=False): cursor = db.cursor() cursor.execute("SELECT context FROM codecontexts WHERE sha1=%s AND first_line<=%s AND last_line>=%s ORDER BY first_line DESC LIMIT 1", [sha1, line, line]) row = cursor.fetchone() if row: context = row[0] if minimized: context = re.sub("\\(.*(?:\\)|...$)", "(...)", context) return context else: return None ================================================ FILE: src/cli.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import argparse import sys import traceback import dbutils import gitutils import mailutils import reviewing.utils import reviewing.filters import reviewing.mail import reviewing.comment import reviewing.comment.propagate from textutils import json_encode, json_decode db = None def init(user_id, authentication_labels): global db db = dbutils.Database.forUser() if user_id is None: user = dbutils.User.makeAnonymous() else: user = dbutils.User.fromId(db, user_id) db.setUser(user, authentication_labels) def finish(): global db if db: db.commit() db.close() db = None def abort(): global db if db: db.rollback() db.close() db = None def sendCustomMail(from_user, recipients, subject, headers, body, review): assert recipients is not None or review is not None if review: if recipients is None: recipients = review.getRecipients(db) files = [] for to_user in recipients: if not to_user.getPreference(db, "email.activated") \ or to_user.status == "retired": continue if review: parent_message_id = reviewing.mail.getReviewMessageId( db, to_user, review, files) message_id = mailutils.generateMessageId(len(files) + 1) if review: filename = reviewing.mail.sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id, headers) else: filename = mailutils.queueMail( from_user, to_user, recipients, subject, body, message_id=message_id, headers=headers) files.append(filename) return files def propagateComment(data): try: review = dbutils.Review.fromId(db, data["review_id"]) commit = gitutils.Commit.fromId(db, review.repository, data["commit_id"]) propagation = reviewing.comment.propagate.Propagation(db) if "chain_id" in data: chain = reviewing.comment.CommentChain.fromId( db, data["chain_id"], user=None, review=review) if chain is None: return "invalid chain id" if commit != chain.addressed_by: return "wrong commit: must be current addressed_by" propagation.setExisting( review, chain.id, commit, data["file_id"], data["first_line"], data["last_line"], True) commits = review.getCommitSet(db).without(commit.parents) propagation.calculateAdditionalLines( commits, review.branch.getHead(db)) else: if not propagation.setCustom( review, commit, data["file_id"], data["first_line"], data["last_line"]): return "invalid location" propagation.calculateInitialLines() data = { "status": "clean" if propagation.active else "modified", "lines": [[sha1, first_line, last_line] for sha1, (first_line, last_line) in propagation.new_lines.items()] } if not propagation.active: data["addressed_by"] = propagation.addressed_by[0].child.getId(db) return data except dbutils.NoSuchReview: return "invalid review id" except gitutils.GitReferenceError: return "invalid commit id" except Exception as exception: return str(exception) def checkRepositoryAccess(data): import auth read = modify = False try: gitutils.Repository.fromId(db, data["repository_id"], for_modify=False) except auth.AccessDenied: pass else: read = True try: gitutils.Repository.fromId(db, data["repository_id"], for_modify=True) except auth.AccessDenied: pass else: modify = True return { "read": read, "modify": modify, } HANDLERS = { "propagate-comment": propagateComment, "check-repository-access": checkRepositoryAccess } def main(): parser = argparse.ArgumentParser() parser.add_argument("-u", dest="user_id", type=int) parser.add_argument("-l", dest="auth_labels", action="append", default=[]) parser.add_argument("command", nargs="*") arguments = parser.parse_args() try: init(arguments.user_id, arguments.auth_labels) for command in arguments.command: pending_mails = None if command == "generate-mails-for-batch": data = json_decode(sys.stdin.readline()) batch_id = data["batch_id"] was_accepted = data["was_accepted"] is_accepted = data["is_accepted"] pending_mails = reviewing.utils.generateMailsForBatch(db, batch_id, was_accepted, is_accepted) elif command == "generate-mails-for-assignments-transaction": data = json_decode(sys.stdin.readline()) transaction_id = data["transaction_id"] pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) elif command == "apply-filters": data = json_decode(sys.stdin.readline()) filters = reviewing.filters.Filters() user = dbutils.User.fromId(db, data["user_id"]) if "user_id" in data else None if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"]) filters.setFiles(db, review=review) filters.load(db, review=review, user=user, added_review_filters=data.get("added_review_filters", []), removed_review_filters=data.get("removed_review_filters", [])) else: repository = gitutils.Repository.fromId(db, data["repository_id"]) filters.setFiles(db, file_ids=data["file_ids"]) filters.load(db, repository=repository, recursive=data.get("recursive", False), user=user) sys.stdout.write(json_encode(filters.data) + "\n") elif command == "generate-custom-mails": pending_mails = [] for data in json_decode(sys.stdin.readline()): from_user = dbutils.User.fromId(db, data["sender"]) if data.get("recipients"): recipients = [dbutils.User.fromId(db, user_id) for user_id in data["recipients"]] else: recipients = None subject = data["subject"] headers = data.get("headers") body = data["body"] if "review_id" in data: review = dbutils.Review.fromId(db, data["review_id"]) else: review = None pending_mails.extend(sendCustomMail( from_user, recipients, subject, headers, body, review)) elif command == "set-review-state": data = json_decode(sys.stdin.readline()) error = "" try: user = dbutils.User.fromId(db, data["user_id"]) review = dbutils.Review.fromId(db, data["review_id"]) if review.state != data["old_state"]: error = "invalid old state" elif data["new_state"] == "open": review.reopen(db, user) elif data["new_state"] == "closed": review.close(db, user) elif data["new_state"] == "dropped": review.drop(db, user) else: error = "invalid new state" except dbutils.NoSuchUser: error = "invalid user id" except dbutils.NoSuchReview: error = "invalid review id" except Exception as error: error = str(error) sys.stdout.write(error + "\n") elif command in HANDLERS: data_in = json_decode(sys.stdin.readline()) data_out = HANDLERS[command](data_in) sys.stdout.write(json_encode(data_out) + "\n") else: sys.stdout.write(json_encode("unknown command: %s" % command) + "\n") sys.exit(0) if pending_mails is not None: sys.stdout.write(json_encode(pending_mails) + "\n") finish() except Exception: sys.stdout.write(json_encode(traceback.format_exc()) + "\n") finally: abort() if __name__ == "__main__": main() ================================================ FILE: src/communicate.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import fcntl import select import cStringIO import time import errno class ProcessTimeout(Exception): def __init__(self, timeout): super(ProcessTimeout, self).__init__( "Process timed out (after %d seconds)" % timeout) self.timeout = timeout class ProcessError(Exception): def __init__(self, process, stderr): super(ProcessError, self).__init__( "Process returned non-zero exit status %d" % process.returncode) self.returncode = process.returncode self.stderr = stderr def setnonblocking(fd): fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) class Communicate(object): def __init__(self, process): self.process = process self.timeout = None self.deadline = None self.stdin_data = None self.stdout_callbacks = [None, None] self.stderr_callbacks = [None, None] self.returncode = None def setTimeout(self, timeout): self.timeout = timeout self.deadline = time.time() + timeout def setInput(self, data): self.stdin_data = data def setCallbacks(self, stdout=None, stdout_line=None, stderr=None, stderr_line=None): assert stdout is None or stdout_line is None assert stderr is None or stderr_line is None self.stdout_callbacks[:] = stdout, stdout_line self.stderr_callbacks[:] = stderr, stderr_line def __read(self, source, target, callbacks): while True: cb_data, cb_line = callbacks try: if cb_line: line = source.readline() if not line: return True cb_line(line) else: data = source.read() if not data: return True if cb_data: cb_data(data) else: target.write(data) except IOError as error: if error.errno == errno.EAGAIN: return False raise def run(self): process = self.process poll = select.poll() if callable(self.stdin_data): stdin_data = "" else: stdin_data = self.stdin_data self.stdin_data = None stdin_done = False stdout = cStringIO.StringIO() stdout_done = False stderr = cStringIO.StringIO() stderr_done = False if process.stdin: setnonblocking(process.stdin) poll.register(process.stdin, select.POLLOUT) else: stdin_done = True if process.stdout: setnonblocking(process.stdout) poll.register(process.stdout, select.POLLIN) else: stdout_done = True if process.stderr: setnonblocking(process.stderr) poll.register(process.stderr, select.POLLIN) else: stderr_done = True while (not stdin_done or not stdout_done or not stderr_done) \ and (self.deadline is None or time.time() < self.deadline): if self.deadline is None: timeout = None else: timeout = 1000 * (self.deadline - time.time()) while True: try: events = poll.poll(timeout) except select.error as (errnum, _): if errnum == errno.EINTR: continue raise else: break for fd, event in events: if not stdin_done and fd == process.stdin.fileno(): if callable(self.stdin_data): data = self.stdin_data() if data is None: self.stdin_data = None else: stdin_data += data if stdin_data: nwritten = os.write(process.stdin.fileno(), stdin_data) stdin_data = stdin_data[nwritten:] if not stdin_data and self.stdin_data is None: process.stdin.close() stdin_done = True poll.unregister(fd) if not stdout_done and fd == process.stdout.fileno(): stdout_done = self.__read(process.stdout, stdout, self.stdout_callbacks) if stdout_done: poll.unregister(fd) if not stderr_done and fd == process.stderr.fileno(): stderr_done = self.__read(process.stderr, stderr, self.stderr_callbacks) if stderr_done: poll.unregister(fd) if stdin_done and stdout_done and stderr_done: process.wait() stdout_data = stdout.getvalue() if process.stdout else None stderr_data = stderr.getvalue() if process.stderr else None self.returncode = process.returncode if self.returncode == 0: return stdout_data, stderr_data else: raise ProcessError(process, stderr_data) process.kill() process.wait() raise ProcessTimeout(self.timeout) ================================================ FILE: src/coverage.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import trace import errno import tempfile import shutil import re import json def call(context, fn, *args, **kwargs): import configuration if not configuration.debug.COVERAGE_DIR: return fn(*args, **kwargs) context_dir = os.path.join(configuration.debug.COVERAGE_DIR, context) try: os.makedirs(context_dir) except OSError as error: if error.errno != errno.EEXIST: raise output_dir = tempfile.mkdtemp(dir=context_dir) counts = output_dir + ".counts" tracer = trace.Trace(count=1, trace=0, outfile=counts) try: return tracer.runfunc(fn, *args, **kwargs) finally: results = tracer.results() results.write_results(show_missing=False, coverdir=output_dir) shutil.rmtree(output_dir) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser("Critic Code Coverage Collection") parser.add_argument("--coverage-dir") parser.add_argument("--critic-dir", action="append") arguments = parser.parse_args() ignore_dirs = filter(None, sys.path) coverage_dir = arguments.coverage_dir sys.path[:0] = arguments.critic_dir data = { "contexts": [] } for context in os.listdir(coverage_dir): context_dir = os.path.join(coverage_dir, context) if not os.path.isdir(context_dir): continue context_index = len(data["contexts"]) data["contexts"].append(context) tracer = trace.Trace() results = tracer.results() for filename in os.listdir(context_dir): if filename.endswith(".counts"): counts = os.path.join(context_dir, filename) results.update(trace.Trace(infile=counts).results()) os.unlink(counts) results.write_results(show_missing=False, coverdir=context_dir) for filename in os.listdir(context_dir): if filename.endswith(".cover"): module_filename = filename[:-6].replace(".", "/") + ".py" if os.path.isfile(module_filename): counts = {} with open(os.path.join(context_dir, filename)) as coverage: lines = coverage.read().splitlines() executed = [] for index, line in enumerate(lines): match = re.match(" *\d+:", line) if match: executed.append(index) data.setdefault(module_filename, {})[context] = executed json.dump(data, sys.stdout) print ================================================ FILE: src/critic.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import gitutils import time import re import itertools import traceback import cStringIO import wsgiref.util import calendar from htmlutils import htmlify, Document from profiling import formatDBProfiling from textutils import json_encode, reflow import request import dbutils import reviewing.filters as review_filters import log.commitset as log_commitset import diff import mailutils import configuration import auth import htmlutils import api import jsonapi import operation.createcomment import operation.createreview import operation.manipulatecomment import operation.manipulatereview import operation.manipulatefilters import operation.manipulateuser import operation.manipulateassignments import operation.fetchlines import operation.markfiles import operation.draftchanges import operation.blame import operation.trackedbranch import operation.rebasereview import operation.recipientfilter import operation.editresource import operation.autocompletedata import operation.servicemanager import operation.addrepository import operation.news import operation.checkrebase import operation.applyfilters import operation.savesettings import operation.searchreview import operation.registeruser import operation.brancharchiving import operation.miscellaneous import page.utils import page.createreview import page.branches import page.showcomment import page.showcommit import page.showreview import page.showreviewlog import page.showbatch import page.showbranch import page.showtree import page.showfile import page.config import page.dashboard import page.home import page.managereviewers import page.filterchanges import page.tutorial import page.news import page.editresource import page.statistics import page.confirmmerge import page.addrepository import page.checkbranch import page.search import page.repositories import page.services import page.rebasetrackingreview import page.createuser import page.verifyemail import page.manageextensions import page.showfilters try: from customization.email import getUserEmailAddress except ImportError: def getUserEmailAddress(_username): return None if configuration.extensions.ENABLED: RE_EXTENSION_RESOURCE = re.compile("^extension-resource/([a-z0-9][-._a-z0-9]+(?:/[a-z0-9][-._a-z0-9]+)+)$", re.IGNORECASE) from operation import OperationResult, OperationError, OperationFailureMustLogin def setContentTypeFromPath(req): match = re.search("\\.([a-z]+)$", req.path) if match: req.setContentType(configuration.mimetypes.MIMETYPES.get(match.group(1), "text/plain")) else: req.setContentType("text/plain") def handleStaticResource(req): if req.path == "static-resource/": raise request.Forbidden("Directory listing disabled!") resources_path = os.path.join( configuration.paths.INSTALL_DIR, "resources") resource_path = os.path.abspath(os.path.join( resources_path, req.path.split("/", 1)[1])) if not resource_path.startswith(resources_path + "/"): raise request.Forbidden() if not os.path.isfile(resource_path): raise request.NotFound() last_modified = htmlutils.mtime(resource_path) HTTP_DATE = "%a, %d %b %Y %H:%M:%S GMT" if_modified_since = req.getRequestHeader("If-Modified-Since") if if_modified_since: try: if_modified_since = time.strptime(if_modified_since, HTTP_DATE) except ValueError: pass else: if last_modified <= calendar.timegm(if_modified_since): raise request.NotModified() req.addResponseHeader("Last-Modified", time.strftime(HTTP_DATE, time.gmtime(last_modified))) if req.query and req.query == htmlutils.base36(last_modified): req.addResponseHeader("Expires", time.strftime(HTTP_DATE, time.gmtime(time.time() + 2592000))) req.addResponseHeader("Cache-Control", "max-age=2592000") setContentTypeFromPath(req) req.start() with open(resource_path, "r") as resource_file: return [resource_file.read()] def handleDownload(db, req, user): sha1 = req.getParameter("sha1") try: repository_arg = req.getParameter("repository") repository = gitutils.Repository.fromParameter(db, repository_arg) except gitutils.NoSuchRepository as error: raise page.utils.DisplayMessage( title="No such repository", body=error.message, status=404) try: git_object = repository.fetch(sha1) except gitutils.GitReferenceError as error: raise page.utils.DisplayMessage( title="File not found", body=error.message, status=404) if git_object.type != "blob": raise page.utils.DisplayMessage( title="File not found", body=("%s is a %s, not a blob" % (git_object.sha1[:8], git_object.type)), status=404) setContentTypeFromPath(req) req.start() return [git_object.data] def findreview(req, db): sha1 = req.getParameter("sha1") try: repository = gitutils.Repository.fromSHA1(db, sha1) commit = gitutils.Commit.fromSHA1(db, repository, repository.revparse(sha1)) except gitutils.GitReferenceError as error: raise request.DisplayMessage(error.message) cursor = db.readonly_cursor() cursor.execute("""SELECT reviews.id FROM reviews JOIN branches ON (branches.id=reviews.branch) JOIN reachable ON (reachable.branch=branches.id) WHERE reachable.commit=%s""", (commit.getId(db),)) row = cursor.fetchone() if row: review_id = row[0] else: cursor.execute("""SELECT reviewchangesets.review FROM reviewchangesets JOIN changesets ON (changesets.id=reviewchangesets.changeset) WHERE changesets.child=%s""", (commit.getId(db),)) row = cursor.fetchone() if row: review_id = row[0] else: raise request.DisplayMessage("No review found!") raise request.MovedTemporarily("/r/%d?highlight=%s#%s" % (review_id, sha1, sha1)) OPERATIONS = { "fetchlines": operation.fetchlines.FetchLines(), "reviewersandwatchers": operation.createreview.ReviewersAndWatchers(), "submitreview": operation.createreview.SubmitReview(), "fetchremotebranches": operation.createreview.FetchRemoteBranches(), "fetchremotebranch": operation.createreview.FetchRemoteBranch(), "validatecommentchain": operation.createcomment.ValidateCommentChain(), "createcommentchain": operation.createcomment.CreateCommentChain(), "createcomment": operation.createcomment.CreateComment(), "reopenresolvedcommentchain": operation.manipulatecomment.ReopenResolvedCommentChain(), "reopenaddressedcommentchain": operation.manipulatecomment.ReopenAddressedCommentChain(), "resolvecommentchain": operation.manipulatecomment.ResolveCommentChain(), "morphcommentchain": operation.manipulatecomment.MorphCommentChain(), "updatecomment": operation.manipulatecomment.UpdateComment(), "deletecomment": operation.manipulatecomment.DeleteComment(), "markchainsasread": operation.manipulatecomment.MarkChainsAsRead(), "closereview": operation.manipulatereview.CloseReview(), "dropreview": operation.manipulatereview.DropReview(), "reopenreview": operation.manipulatereview.ReopenReview(), "pingreview": operation.manipulatereview.PingReview(), "updatereview": operation.manipulatereview.UpdateReview(), "setfullname": operation.manipulateuser.SetFullname(), "setgitemails": operation.manipulateuser.SetGitEmails(), "changepassword": operation.manipulateuser.ChangePassword(), "requestverificationemail": operation.manipulateuser.RequestVerificationEmail(), "deleteemailaddress": operation.manipulateuser.DeleteEmailAddress(), "selectemailaddress": operation.manipulateuser.SelectEmailAddress(), "addemailaddress": operation.manipulateuser.AddEmailAddress(), "getassignedchanges": operation.manipulateassignments.GetAssignedChanges(), "setassignedchanges": operation.manipulateassignments.SetAssignedChanges(), "watchreview": operation.manipulatereview.WatchReview(), "unwatchreview": operation.manipulatereview.UnwatchReview(), "addreviewfilters": operation.manipulatefilters.AddReviewFilters(), "removereviewfilter": operation.manipulatefilters.RemoveReviewFilter(), "queryglobalfilters": operation.applyfilters.QueryGlobalFilters(), "applyglobalfilters": operation.applyfilters.ApplyGlobalFilters(), "queryparentfilters": operation.applyfilters.QueryParentFilters(), "applyparentfilters": operation.applyfilters.ApplyParentFilters(), "suggestupstreams": operation.rebasereview.SuggestUpstreams(), "checkrebase": operation.rebasereview.CheckRebase(), "preparerebase": operation.rebasereview.PrepareRebase(), "cancelrebase": operation.rebasereview.CancelRebase(), "rebasereview": operation.rebasereview.RebaseReview(), "revertrebase": operation.rebasereview.RevertRebase(), "addfilter": operation.manipulatefilters.AddFilter(), "deletefilter": operation.manipulatefilters.DeleteFilter(), "reapplyfilters": operation.manipulatefilters.ReapplyFilters(), "countmatchedpaths": operation.manipulatefilters.CountMatchedPaths(), "getmatchedpaths": operation.manipulatefilters.GetMatchedPaths(), "markfiles": operation.markfiles.MarkFiles(), "submitchanges": operation.draftchanges.SubmitChanges(), "abortchanges": operation.draftchanges.AbortChanges(), "reviewstatechange": operation.draftchanges.ReviewStateChange(), "savesettings": operation.savesettings.SaveSettings(), "rebasebranch": operation.miscellaneous.RebaseBranch(), "checkserial": operation.miscellaneous.CheckSerial(), "suggestreview": operation.miscellaneous.SuggestReview(), "blame": operation.blame.Blame(), "addcheckbranchnote": page.checkbranch.addNote, "deletecheckbranchnote": page.checkbranch.deleteNote, "addrepository": operation.addrepository.AddRepository(), "storeresource": operation.editresource.StoreResource(), "resetresource": operation.editresource.ResetResource(), "restoreresource": operation.editresource.RestoreResource(), "addnewsitem": operation.news.AddNewsItem(), "editnewsitem": operation.news.EditNewsItem(), "getautocompletedata": operation.autocompletedata.GetAutoCompleteData(), "getrepositorypaths": operation.autocompletedata.GetRepositoryPaths(), "addrecipientfilter": operation.recipientfilter.AddRecipientFilter(), "trackedbranchlog": operation.trackedbranch.TrackedBranchLog(), "disabletrackedbranch": operation.trackedbranch.DisableTrackedBranch(), "triggertrackedbranchupdate": operation.trackedbranch.TriggerTrackedBranchUpdate(), "enabletrackedbranch": operation.trackedbranch.EnableTrackedBranch(), "deletetrackedbranch": operation.trackedbranch.DeleteTrackedBranch(), "addtrackedbranch": operation.trackedbranch.AddTrackedBranch(), "restartservice": operation.servicemanager.RestartService(), "getservicelog": operation.servicemanager.GetServiceLog(), "checkmergestatus": operation.checkrebase.CheckMergeStatus(), "checkconflictsstatus": operation.checkrebase.CheckConflictsStatus(), "checkhistoryrewritestatus": operation.checkrebase.CheckHistoryRewriteStatus(), "searchreview": operation.searchreview.SearchReview(), "registeruser": operation.registeruser.RegisterUser(), "archivebranch": operation.brancharchiving.ArchiveBranch(), "resurrectbranch": operation.brancharchiving.ResurrectBranch(), "schedulebrancharchival": operation.brancharchiving.ScheduleBranchArchival() } PAGES = { "showreview": page.showreview.renderShowReview, "showcommit": page.showcommit.renderShowCommit, "dashboard": page.dashboard.renderDashboard, "showcomment": page.showcomment.renderShowComment, "showcomments": page.showcomment.renderShowComments, "showfile": page.showfile.renderShowFile, "statistics": page.statistics.renderStatistics, "home": page.home.renderHome, "config": page.config.renderConfig, "branches": page.branches.renderBranches, "tutorial": page.tutorial.renderTutorial, "news": page.news.renderNews, "managereviewers": page.managereviewers.renderManageReviewers, "log": page.showbranch.renderShowBranch, "checkbranch": page.checkbranch.renderCheckBranch, "checkbranchtext": page.checkbranch.renderCheckBranch, "filterchanges": page.filterchanges.renderFilterChanges, "showtree": page.showtree.renderShowTree, "showbatch": page.showbatch.renderShowBatch, "showreviewlog": page.showreviewlog.renderShowReviewLog, "createreview": page.createreview.renderCreateReview, "newrepository": page.addrepository.renderNewRepository, "confirmmerge": page.confirmmerge.renderConfirmMerge, "editresource": page.editresource.renderEditResource, "search": page.search.renderSearch, "repositories": page.repositories.renderRepositories, "services": page.services.renderServices, "rebasetrackingreview": page.rebasetrackingreview.RebaseTrackingReview(), "createuser": page.createuser.CreateUser(), "verifyemail": page.verifyemail.renderVerifyEmail, "manageextensions": page.manageextensions.renderManageExtensions, "showfilters": page.showfilters.renderShowFilters } if configuration.extensions.ENABLED: import extensions import extensions.role.page import extensions.role.processcommits import operation.extensioninstallation import page.loadmanifest import page.processcommits OPERATIONS["installextension"] = operation.extensioninstallation.InstallExtension() OPERATIONS["uninstallextension"] = operation.extensioninstallation.UninstallExtension() OPERATIONS["reinstallextension"] = operation.extensioninstallation.ReinstallExtension() OPERATIONS["clearextensionstorage"] = operation.extensioninstallation.ClearExtensionStorage() OPERATIONS["addextensionhookfilter"] = operation.extensioninstallation.AddExtensionHookFilter() OPERATIONS["deleteextensionhookfilter"] = operation.extensioninstallation.DeleteExtensionHookFilter() PAGES["loadmanifest"] = page.loadmanifest.renderLoadManifest PAGES["processcommits"] = page.processcommits.renderProcessCommits if configuration.base.AUTHENTICATION_MODE != "host" and configuration.base.SESSION_TYPE == "cookie": import operation.usersession import page.login if configuration.base.AUTHENTICATION_MODE == "critic": OPERATIONS["validatelogin"] = operation.usersession.ValidateLogin() OPERATIONS["endsession"] = operation.usersession.EndSession() PAGES["login"] = page.login.Login() def handleException(db, req, user, as_html=False): error_message = traceback.format_exc() environ = req.getEnvironment() environ["wsgi.errors"].write(error_message) if not user or not db or not user.hasRole(db, "developer"): url = wsgiref.util.request_uri(environ) x_forwarded_host = req.getRequestHeader("X-Forwarded-Host") if x_forwarded_host: original_host = x_forwarded_host.split(",")[0].strip() def replace_host(match): return match.group(1) + original_host url = re.sub("^([a-z]+://)[^/]+", replace_host, url) if user and not user.isAnonymous(): user_string = str(user) else: user_string = "" mailutils.sendExceptionMessage(db, "wsgi", "\n".join(["User: %s" % user_string, "Method: %s" % req.method, "URL: %s" % url, "", error_message])) admin_message_sent = True else: admin_message_sent = False if not user or not db or user.hasRole(db, "developer") \ or configuration.debug.IS_DEVELOPMENT \ or configuration.debug.IS_TESTING: error_title = "Unexpected error!" error_message = error_message.strip() if as_html: error_message = "

%s

" % htmlify(error_message) error_body = [error_message] if admin_message_sent: admin_message_sent = ("A message has been sent to the system " "administrator(s) with details about the " "problem.") if as_html: admin_message_sent = "

%s

" % admin_message_sent error_body.append(admin_message_sent) else: error_title = "Request failed!" error_message = ("An unexpected error occurred while handling the " "request. A message has been sent to the system " "administrator(s) with details about the problem. " "Please contact them for further information and/or " "assistance.") if as_html: error_message = "

%s

" % error_message error_body = [error_message] return error_title, error_body class WrappedResult(object): def __init__(self, db, req, user, result): self.db = db self.req = req self.user = user self.result = iter(result) # Fetch the first block "prematurely," so that errors from it are raised # early, and handled by the normal error handling code in main(). self.first = self.result.next() self.failed = False def __iter__(self): return self def next(self): if self.failed: raise StopIteration try: if self.first: value = self.first self.first = None else: value = self.result.next() self.db.rollback() return value except StopIteration: self.db.close() raise except Exception: error_title, error_body = handleException( self.db, self.req, self.user) self.db.close() if self.req.getContentType().startswith("text/html"): self.failed = True error_body = "".join("

%s

" % htmlify(part) for part in error_body) # Close a bunch of tables, in case we're in any. Not # pretty, but probably makes the end result prettier. return ("" "
" "

%s

%s
" % (error_title, error_body)) else: raise StopIteration def handleRepositoryPath(db, req, user, suffix): if "http" not in configuration.base.REPOSITORY_URL_TYPES: return False components = req.path.split("/") for index in range(1, len(components) + 1): repository_path = "/".join(components[:index]) additional_path = "/".join(components[index:]) if suffix is not None: if not repository_path.endswith(suffix): continue repository_path = os.path.join(configuration.paths.GIT_DIR, repository_path) if not repository_path.endswith(".git"): repository_path += ".git" try: repository = gitutils.Repository.fromPath(db, repository_path) except gitutils.NoSuchRepository: continue else: db.close() try: repository.invokeGitHttpBackend(req, user, additional_path) except gitutils.GitHttpBackendNeedsUser: req.requestHTTPAuthentication() return True return False def handleDisplayMessage(db, req, message): user = db.user if user is None: user = dbutils.User.makeAnonymous() document = page.utils.displayMessage( db, req, user, title=message.title, message=message.body, review=message.review, is_html=message.html) req.setContentType("text/html") req.setStatus(message.status) req.start() return [str(document)] def handleDisplayFormattedText(db, req, formatted_text): user = db.user if user is None: user = dbutils.User.makeAnonymous() document = page.utils.displayFormattedText( db, req, user, formatted_text.source) req.setContentType("text/html") req.start() return [str(document)] def handleMissingWSGIRemoteUser(db, req): return handleDisplayMessage( db, req, request.DisplayMessage( title="Configuration error", body="""\

Critic was configured with "--auth-mode host", meaning the host web server will authenticate users, but there was no REMOTE_USER variable in the WSGI environment provided by the web server, indicating it is not actually configured to authenticate users.

To fix this you can either reinstall Critic using "--auth-mode critic" (to let Critic handle user authentication internally using its own user database), or you can configure user authentication properly in the web server. For Apache 2.x, the latter can be done by adding the something like the following to the apache site configuration for Critic:

  <Location />
    AuthType Basic
    AuthName "Authentication Required"
    AuthUserFile "/path/to/critic-main.htpasswd.users"
    Require valid-user
  </Location>

If you need more dynamic http authentication you can instead setup mod_wsgi with a custom WSGIAuthUserScript directive. This will cause the provided credentials to be passed to a Python function called check_password() that you can implement yourself. This way you can validate the user/pass via any existing database or for example an LDAP server.

For more information on setting up such authentication in Apache 2.x, see: Apache Authentication Provider.

""" % { "url": ("http://code.google.com/p/modwsgi/wiki/" "AccessControlMechanisms#Apache_Authentication_Provider") }, html=True, status=500)) def finishOAuth(db, req, provider): try: provider.finish(db, req) except (auth.InvalidRequest, auth.Failure): _, error_body = handleException( db, req, dbutils.User.makeAnonymous(), as_html=True) raise page.utils.DisplayMessage( title="Authentication failed", body="".join(error_body), html=True) def process_request(environ, start_response): request_start = time.time() critic = api.critic.startSession(for_user=True) db = critic.database user = None try: try: req = request.Request(db, environ, start_response) # Handle static resources very early. We don't bother with checking # for an authenticated user; static resources aren't sensitive, and # are referenced from special-case resources like the login page and # error messages that, that we want to display even if something # went wrong with the authentication. if req.path.startswith("static-resource/"): return handleStaticResource(req) if req.path.startswith("externalauth/"): provider_name = req.path[len("externalauth/"):] if provider_name in auth.PROVIDERS: provider = auth.PROVIDERS[provider_name] authorize_url = provider.start(db, req) if authorize_url: raise request.Found(authorize_url) if req.path.startswith("oauth/"): provider_name = req.path[len("oauth/"):] if provider_name in auth.PROVIDERS: provider = auth.PROVIDERS[provider_name] if isinstance(provider, auth.OAuthProvider): finishOAuth(db, req, provider) auth.checkSession(db, req) auth.AccessControl.accessHTTP(db, req) user = req.user user.loadPreferences(db) if user.status == 'retired': # If a retired user accesses the system, change the status back # to 'current' again. with db.updating_cursor("users") as cursor: cursor.execute("""UPDATE users SET status='current' WHERE id=%s""", (user.id,)) user.status = 'current' if not user.getPreference(db, "debug.profiling.databaseQueries"): db.disableProfiling() original_path = req.path if not req.path: if user.isAnonymous(): location = "tutorial" else: location = user.getPreference(db, "defaultPage") if req.query: location += "?" + req.query raise request.MovedTemporarily(location) if req.path == "redirect": target = req.getParameter("target", "/") raise request.SeeOther(target) if req.path == "findreview": # This raises either DisplayMessage or MovedTemporarily. findreview(req, db) # Require a .git suffix on HTTP(S) repository URLs unless the user- # agent starts with "git/" (as Git's normally does.) # # Our objectives are: # # 1) Not to require Git's user-agent to be its default value, since # the user might have to override it to get through firewalls. # 2) Never to send regular user requests to 'git http-backend' by # mistake. # # This is a compromise. if req.getRequestHeader("User-Agent", "").startswith("git/"): suffix = None else: suffix = ".git" if handleRepositoryPath(db, req, user, suffix): db = None return [] # Extension "page" roles. Prefixing a path with "!/" bypasses all # extensions. # # Also bypass extensions if the user is anonymous unless general # anonymous access is allowed. If it's not and the user is still # anonymous, access was allowed because of a path-based exception, # which was not intended to allow access to an extension. if req.path.startswith("!/"): req.path = req.path[2:] elif configuration.extensions.ENABLED: handled = extensions.role.page.execute(db, req, user) if isinstance(handled, basestring): req.start() return [handled] if req.path.startswith("r/"): req.updateQuery({ "id": [req.path[2:]] }) req.path = "showreview" if configuration.extensions.ENABLED: match = RE_EXTENSION_RESOURCE.match(req.path) if match: content_type, resource = extensions.resource.get(req, db, user, match.group(1)) if resource: req.setContentType(content_type) if content_type.startswith("image/"): req.addResponseHeader("Cache-Control", "max-age=3600") req.start() return [resource] else: req.setStatus(404) req.start() return [] if req.path.startswith("download/"): return handleDownload(db, req, user) if req.path == "api" or req.path.startswith("api/"): try: result = jsonapi.handleRequest(critic, req) except jsonapi.Error as error: req.setStatus(error.http_status) result = { "error": { "title": error.title, "message": error.message }} else: req.setStatus(200) accept_header = req.getRequestHeader("Accept") if accept_header == "application/vnd.api+json": default_indent = None else: default_indent = 2 indent = req.getParameter("indent", default_indent, filter=int) if indent == 0: # json.encode(..., indent=0) still gives line-breaks, just # no indentation. This is not so useful, so set indent to # None instead, which disables formatting entirely. indent = None req.setContentType("application/vnd.api+json") req.start() return [json_encode(result, indent=indent)] operationfn = OPERATIONS.get(req.path) if operationfn: result = operationfn(req, db, user) if isinstance(result, (OperationResult, OperationError)): req.setContentType("text/json") if isinstance(result, OperationResult): if db.profiling: result.set("__profiling__", formatDBProfiling(db)) result.set("__time__", time.time() - request_start) elif not req.hasContentType(): req.setContentType("text/plain") req.start() if isinstance(result, unicode): return [result.encode("utf8")] else: return [str(result)] impersonate_user = user if not user.isAnonymous(): user_parameter = req.getParameter("user", None) if user_parameter: impersonate_user = dbutils.User.fromName(db, user_parameter) while True: pagefn = PAGES.get(req.path) if pagefn: try: result = pagefn(req, db, impersonate_user) if db.profiling and not (isinstance(result, str) or isinstance(result, Document)): source = "" for fragment in result: source += fragment result = source if isinstance(result, page.utils.ResponseBody): req.setContentType(result.content_type) req.start() return [result.data] if isinstance(result, str) or isinstance(result, Document): req.setContentType("text/html") req.start() result = str(result) result += ("" % ((time.time() - request_start) * 1000)) if db.profiling: result += ("" % formatDBProfiling(db)) return [result] result = WrappedResult(db, req, user, result) req.setContentType("text/html") req.start() # Prevent the finally clause below from closing the # connection. WrappedResult does it instead. db = None return result except gitutils.NoSuchRepository as error: raise page.utils.DisplayMessage( title="Invalid URI Parameter!", body=error.message) except gitutils.GitReferenceError as error: if error.ref: raise page.utils.DisplayMessage( title="Specified ref not found", body=("There is no ref named \"%s\" in %s." % (error.ref, error.repository))) elif error.sha1: raise page.utils.DisplayMessage( title="SHA-1 not found", body=error.message) else: raise except dbutils.NoSuchUser as error: raise page.utils.DisplayMessage( title="Invalid URI Parameter!", body=error.message) except dbutils.NoSuchReview as error: raise page.utils.DisplayMessage( title="Invalid URI Parameter!", body=error.message) if "/" in req.path: repository_name, _, rest = req.path.partition("/") repository = gitutils.Repository.fromName(db, repository_name) if repository: req.path = rest else: repository = None def revparsePlain(item): try: return gitutils.getTaggedCommit(repository, repository.revparse(item)) except: raise revparse = revparsePlain if repository is None: review_id = req.getParameter("review", None, filter=int) if review_id: cursor = db.cursor() cursor.execute("""SELECT repository FROM branches JOIN reviews ON (reviews.branch=branches.id) WHERE reviews.id=%s""", (review_id,)) row = cursor.fetchone() if row: repository = gitutils.Repository.fromId(db, row[0]) def revparseWithReview(item): if re.match("^[0-9a-f]+$", item): cursor.execute("""SELECT sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s AND commits.sha1 LIKE %s""", (review_id, item + "%")) row = cursor.fetchone() if row: return row[0] else: return revparsePlain(item) revparse = revparseWithReview if repository is None: repository = user.getDefaultRepository(db) if gitutils.re_sha1.match(req.path): if repository and not repository.iscommit(req.path): repository = None if not repository: try: repository = gitutils.Repository.fromSHA1(db, req.path) except gitutils.GitReferenceError: repository = None if repository: try: items = filter(None, map(revparse, req.path.split(".."))) updated_query = {} if len(items) == 1: updated_query["repository"] = [repository.name] updated_query["sha1"] = [items[0]] elif len(items) == 2: updated_query["repository"] = [repository.name] updated_query["from"] = [items[0]] updated_query["to"] = [items[1]] if updated_query: req.updateQuery(updated_query) req.path = "showcommit" continue except gitutils.GitReferenceError: pass break raise page.utils.DisplayMessage( title="Not found!", body="Page not handled: /%s" % original_path, status=404) except GeneratorExit: raise except auth.AccessDenied as error: return handleDisplayMessage( db, req, request.DisplayMessage( title="Access denied", body=error.message, status=403)) except request.HTTPResponse as response: return response.execute(db, req) except request.MissingWSGIRemoteUser as error: return handleMissingWSGIRemoteUser(db, req) except page.utils.DisplayMessage as message: return handleDisplayMessage(db, req, message) except page.utils.DisplayFormattedText as formatted_text: return handleDisplayFormattedText(db, req, formatted_text) except Exception: # crash might be psycopg2.ProgrammingError so rollback to avoid # "InternalError: current transaction is aborted" inside handleException() if db and db.closed(): db = None elif db: db.rollback() error_title, error_body = handleException(db, req, user) error_body = reflow("\n\n".join(error_body)) error_message = "\n".join([error_title, "=" * len(error_title), "", error_body]) assert not req.isStarted() req.setStatus(500) req.setContentType("text/plain") req.start() return [error_message] finally: if db: db.close() if configuration.debug.COVERAGE_DIR: def main(environ, start_response): import coverage def do_process_request(environ, start_response): # Apply list() to force the request to be fully performed by this # call. It might return an iterator whose .next() does all the # work, and if we just return that from here, the actual work is not # subject to coverage measurement. return list(process_request(environ, start_response)) return coverage.call("wsgi", do_process_request, environ, start_response) else: main = process_request ================================================ FILE: src/data/preferences.json ================================================ { "comment.diff.contextLines": { "type": "integer", "default": 3, "description": "Default number of context lines added to diffs when displaying comment chains. Can be overridden by 'context' URI parameter." }, "commit.diff.collapseSimpleHunks": { "type": "boolean", "default": false, "description": "When a hunk in a diff contains only deleted or only inserted lines, collapse the \"other\" side, so that the hunk is displayed as a single wide column. NOT FULLY FUNCTIONAL!" }, "commit.diff.compactMode": { "type": "boolean", "default": true, "description": "Generate more compact HTML for diffs, and generate some HTML on-demand client-side. Faster download and initial rendering, and slower interaction." }, "commit.diff.contextLines": { "type": "integer", "default": 5, "description": "Default number of context lines added to diffs when displaying commits. Can be overridden by 'context' URI parameter." }, "commit.diff.highlightIllegalWhitespace": { "type": "boolean", "default": true, "description": "Use an angry red color scheme for illegal whitespace (trailing whitespace or tabs in files with \"intent-tabs-mode: nil\".)" }, "commit.diff.rulerColumn": { "type": "integer", "default": 0, "description": "The column at which a ruler is shown. Can be set to 0 to disable the ruler.", "relevance": { "repository": true } }, "commit.diff.visualTabs": { "type": "boolean", "default": false, "description": "Replace tab characters with U+2192 (RIGHTWARDS ARROW) styled to the correct width (taking the file's Emacs mode-line into account.)" }, "commit.expandAllFiles": { "type": "boolean", "default": false, "description": "On the 'showcommit' page, expand the diffs in all files on page load." }, "commit.expandFilesLimit": { "type": "integer", "default": 0, "description": "If 'commit.expandAllFiles' is enabled, and this limit is non-zero, all files are initially collapsed if the diff contains changes in more files than this limit." }, "dashboard.defaultGroups": { "type": "string", "default": "owned,draft,active,watched", "description": "Review groups to show on the dashboard by default. Available groups are owned, draft, active, watched, open and closed." }, "debug.enabled": { "type": "boolean", "default": false, "description": "Enable debugging preferences." }, "debug.extensions.customProcessCommits": { "type": "boolean", "default": false, "description": "Enable button for performing installed ProcessCommits hooks on arbitrary sets of commits for testing." }, "debug.profiling.abortChanges": { "type": "boolean", "default": false, "description": "Enable profiling of /abortchanges." }, "debug.profiling.databaseQueries": { "type": "boolean", "default": false, "description": "Enable profiling of database queries." }, "debug.profiling.pageGeneration": { "type": "boolean", "default": false, "description": "Enable profiling of generation of various pages. Results are emitted as a comment at the end of the HTML." }, "debug.profiling.submitChanges": { "type": "boolean", "default": false, "description": "Enable profiling of /submitchanges." }, "defaultPage": { "type": "string", "default": "home", "description": "The default page displayed when accessing the system." }, "defaultRepository": { "type": "string", "default": "", "description": "Name of default repository. In situations where the repository is not implied, this is the one that is used, or preferred." }, "email.activated": { "type": "boolean", "default": true, "description": "Must be enabled before the system sends any emails to the user." }, "email.comment.contextLines": { "type": "integer", "default": 0, "description": "Default number of context lines added to code excerpts when displaying comment threads in emails." }, "email.enableAssociationRecipients": { "type": "boolean", "default": false, "description": "Add phony recipients to review emails representing your associations with the review.", "relevance": { "repository": true } }, "email.ignoreOwnChanges": { "type": "boolean", "default": false, "description": "Don't send emails about own changes (reviewing, commits added to reviews and rebased reviews.)" }, "email.lineLength": { "type": "integer", "default": 80, "description": "Maximum line length in emails sent. Plain text will be reflowed to adhere to this. Diffs and other non-prose items are never reflowed." }, "email.listId": { "type": "string", "default": "", "description": "Identifier used to construct a List-Id header for review emails. Used for the $id in \"<$id.$hostname>\" in the header value.", "relevance": { "repository": true } }, "email.newReview.diff.contextLines": { "type": "integer", "default": 3, "description": "Number of context lines added to diffs in the email sent when a new review is submitted." }, "email.newReview.diff.maxLines": { "type": "integer", "default": 250, "description": "Maximum number of lines of diffs to include in the email sent when a new review is submitted. If exceeded, no diffs are included at all." }, "email.newReview.displayCommits": { "type": "boolean", "default": true, "description": "Include a list of all commits to be reviewed in the email sent when a new review is submitted." }, "email.newReview.displayDiffs": { "type": "boolean", "default": true, "description": "Include diffs of each commit to be reviewed in the email sent when a new review is submitted." }, "email.newReview.displayReviewers": { "type": "boolean", "default": true, "description": "Include a list of all assigned reviewers in the email sent when a new review is submitted." }, "email.newReview.displayStats": { "type": "boolean", "default": false, "description": "Include --stat output for commits added to the review." }, "email.newReview.displayWatchers": { "type": "boolean", "default": true, "description": "Include a list of all additional watchers in the email sent when a new review is submitted." }, "email.newReview.stats.maxLines": { "type": "integer", "default": 250, "description": "Maximum number of lines of commit stats to include in the email sent when a review is submitted. If exceeded, no stats are included at all." }, "email.subjectLine.newReview": { "type": "string", "default": "New Review: %(summary)s", "description": "Python format string for subject line of email sent for newly created reviews." }, "email.subjectLine.newishReview": { "type": "string", "default": "New(ish) Review: %(summary)s", "description": "Python format string for subject line of email sent for new(ish) reviews." }, "email.subjectLine.pingedReview": { "type": "string", "default": "Pinged Review: %(summary)s", "description": "Python format string for subject line of email sent when someone pings a review." }, "email.subjectLine.updatedReview.assignmentsChanged": { "type": "string", "default": "Updated Review: %(summary)s", "description": "Python format string for subject line of email sent when someone changes review assignments." }, "email.subjectLine.updatedReview.commitsPushed": { "type": "string", "default": "Updated Review: %(summary)s", "description": "Python format string for subject line of email sent when someone pushes additional commits to a review." }, "email.subjectLine.updatedReview.parentFiltersApplied": { "type": "string", "default": "Updated Review: %(summary)s", "description": "Python format string for subject line of email sent when someone applies parent repository filters to a review." }, "email.subjectLine.updatedReview.reviewRebased": { "type": "string", "default": "Updated Review: %(summary)s", "description": "Python format string for subject line of email sent when someone rebases a review branch." }, "email.subjectLine.updatedReview.submittedChanges": { "type": "string", "default": "Updated Review: %(summary)s", "description": "Python format string for subject line of email sent when someone submits changes to a review." }, "email.updatedReview.commentThreading": { "type": "boolean", "default": false, "description": "Send emails containing comments so that comment threads form email threads. This will increase the number of emails sent from Critic to the user." }, "email.updatedReview.diff.contextLines": { "type": "integer", "default": 3, "description": "Number of context lines added to diffs in the email sent when a review is updated." }, "email.updatedReview.diff.maxLines": { "type": "integer", "default": 250, "description": "Maximum number of lines of diffs to include in the email sent when a review is updated. If exceeded, no diffs are included at all." }, "email.updatedReview.displayCommits": { "type": "boolean", "default": true, "description": "Include a list of all new commits to be reviewed in the email sent when a review is updated." }, "email.updatedReview.displayStats": { "type": "boolean", "default": false, "description": "Include --stat output for commits added to the review." }, "email.updatedReview.quotedComments": { "type": "string", "default": "all", "description": "Selection of comments in a comment thread that are quoted when new replies are submitted." }, "email.updatedReview.relevantChangesOnly": { "type": "boolean", "default": false, "description": "Only generate emails about reviewed files and written comments that are relevant." }, "email.updatedReview.stats.maxLines": { "type": "integer", "default": 250, "description": "Maximum number of lines of commit stats to include in the email sent when a review is updated. If exceeded, no stats are included at all." }, "email.urlType": { "type": "string", "default": "main", "description": "Type of URLs used in emails." }, "repository.urlType": { "type": "string", "default": "http", "description": "Type of repository URL to display." }, "review.applyUpstreamFilters": { "type": "boolean", "default": true, "description": "If enabled, filters from upstream repositories are applied when creating review via push." }, "review.branchArchiveDelay.closed": { "type": "integer", "default": 7, "description": "If non-zero, archive review branches belonging to closed reviews this number of days after the review was (last) closed.", "relevance": { "repository": true } }, "review.branchArchiveDelay.dropped": { "type": "integer", "default": 1, "description": "If non-zero, archive review branches belonging to dropped reviews this number of days after the review was (last) dropped.", "relevance": { "repository": true } }, "review.createViaPush": { "type": "boolean", "default": false, "description": "If enabled, reviews can be requested by pushing a new branch whose name starts with \"r/\" to the repository." }, "review.defaultOptOut": { "type": "boolean", "default": false, "description": "Opt out of receiving review emails (except for \"New review\" emails) by default.", "relevance": { "repository": true, "filter": true } }, "review.dropAnyReview": { "type": "boolean", "default": false, "description": "Show the \"Drop Review\" button on the front-page of every review, instead of only those you own." }, "review.pingAnyReview": { "type": "boolean", "default": false, "description": "Show the \"Ping Review\" button on the front-page of every review, instead of only those you own." }, "review.updateCheckInterval": { "type": "integer", "default": 300, "description": "Check for updates of reviews every N seconds while displaying review front pages. If zero, the check is disabled." }, "review.useMustRevalidate": { "type": "boolean", "default": true, "description": "Deliver review front-pages with \"Cache-Control: must-revalidate\" to prevent history navigation to stale versions." }, "style.defaultFont": { "type": "string", "default": "font-size: 10pt; font-family: sans-serif", "description": "Font setting applied to the BODY element on every page." }, "style.sourceFont": { "type": "string", "default": "font-size: 10pt; font-family: monospace", "description": "Font setting applied to source code text in diff display." }, "style.tutorialFont": { "type": "string", "default": "font-size: 11pt; font-family: serif", "description": "Font setting applied to tutorial text." }, "timezone": { "type": "string", "default": "Universal/UTC", "description": "Timezone to present (most) dates in." }, "ui.asynchronousReviewMarking": { "type": "boolean", "default": false, "description": "Mark changes as reviewed (or not reviewed) using an asynchronous XMLHttpRequest." }, "ui.convertIssueToNote": { "type": "boolean", "default": false, "description": "Enable \"Convert To Note\" operation for issues. This operation is considered an inferior alternative to resolving an issue; use of it is not recommended." }, "ui.keyboardShortcuts": { "type": "boolean", "default": true, "description": "Enabled keyboard shortcuts on those pages that define any." }, "ui.resolveIssueWarning": { "type": "boolean", "default": true, "description": "Show a warning when resolving an issue raised by another user." }, "ui.title.showReview": { "type": "string", "default": "%(id)s (%(progress)s) - %(summary)s - Opera Critic", "description": "Python format string for title of review front-page documents." } } ================================================ FILE: src/dbaccess.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. try: import configuration except ImportError: IntegrityError = ProgrammingError = OperationalError = Exception TransactionRollbackError = Exception def connect(): raise Exception("not supported") else: if configuration.database.DRIVER == "postgresql": import psycopg2 as driver TransactionRollbackError = driver.extensions.TransactionRollbackError else: import sys import os sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) import installation.qs.sqlite as driver # SQLite doesn't appear to be throwing this type of error. class TransactionRollbackError(Exception): pass IntegrityError = driver.IntegrityError OperationalError = driver.OperationalError ProgrammingError = driver.ProgrammingError def connect(): return driver.connect(**configuration.database.PARAMETERS) ================================================ FILE: src/dbutils/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from dbaccess import (IntegrityError, OperationalError, ProgrammingError, TransactionRollbackError) from dbutils.session import Session from dbutils.database import (InvalidCursorError, FailedToLock, NOWAIT, Database, boolean) from dbutils.user import InvalidUserId, NoSuchUser, User from dbutils.review import NoSuchReview, ReviewState, Review from dbutils.branch import Branch from dbutils.paths import (InvalidFileId, InvalidPath, File, find_file, find_files, describe_file) from dbutils.timezones import (loadTimezones, updateTimezones, sortedTimezones, adjustTimestamp) from dbutils.system import (getInstalledSHA1, getURLPrefix, getAdministratorContacts) ================================================ FILE: src/dbutils/branch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class Branch(object): def __init__(self, id, repository, name, head_sha1, base, tail_sha1, branch_type, archived): self.id = id self.repository = repository self.name = name self.head_sha1 = head_sha1 self.base = base self.tail_sha1 = tail_sha1 self.type = branch_type self.archived = archived self.review = None self.__commits = None self.__head = None self.__tail = None def __eq__(self, other): return self.id == other.id def __ne__(self, other): return self.id != other.id def contains(self, db, commit): import gitutils cursor = db.cursor() if isinstance(commit, gitutils.Commit) and commit.id is not None: cursor.execute("SELECT 1 FROM reachable WHERE branch=%s AND commit=%s", [self.id, commit.id]) else: cursor.execute("SELECT 1 FROM reachable, commits WHERE branch=%s AND commit=id AND sha1=%s", [self.id, str(commit)]) return cursor.fetchone() is not None def getHead(self, db): import gitutils if not self.__head: self.__head = gitutils.Commit.fromSHA1(db, self.repository, self.head_sha1) return self.__head def getTail(self, db): import gitutils if not self.__tail: self.__tail = gitutils.Commit.fromSHA1(db, self.repository, self.tail_sha1) return self.__tail def getJSConstructor(self): from htmlutils import jsify if self.base: return "new Branch(%d, %s, %s)" % (self.id, jsify(self.name), self.base.getJSConstructor()) else: return "new Branch(%d, %s, null)" % (self.id, jsify(self.name)) def getJS(self): return "var branch = %s;" % self.getJSConstructor() def getCommits(self, db): import gitutils if self.__commits is None: cursor = db.cursor() cursor.execute("""SELECT commits.id, commits.sha1 FROM reachable JOIN commits ON (commits.id=reachable.commit) WHERE reachable.branch=%s""", (self.id,)) self.__commits = [gitutils.Commit.fromSHA1(db, self.repository, sha1, commit_id=commit_id) for commit_id, sha1 in cursor] return self.__commits def rebase(self, db, base): import gitutils cursor = db.cursor() def findReachable(head, base_branch_id, force_include=set()): bases = [base_branch_id] while True: cursor.execute("SELECT base FROM branches WHERE id=%s", (bases[-1],)) branch_id = cursor.fetchone()[0] if branch_id is None: break bases.append(branch_id) expression = "SELECT 1 FROM reachable, commits WHERE branch IN (%s) AND commit=id AND sha1=%%s" % ", ".join(["%s"] * len(bases)) def exclude(sha1): cursor.execute(expression, bases + [sha1]) return cursor.fetchone() is not None stack = [head.sha1] processed = set() values = [] while stack: sha1 = stack.pop(0) if sha1 not in processed: processed.add(sha1) commit = gitutils.Commit.fromSHA1(db, self.repository, sha1) if sha1 in force_include or not exclude(sha1): values.append(commit.getId(db)) for sha1 in commit.parents: if sha1 not in processed and (sha1 in force_include or not exclude(sha1)): stack.append(sha1) return values cursor.execute("SELECT COUNT(*) FROM reachable WHERE branch=%s", (self.id,)) old_count = cursor.fetchone()[0] if base.base and base.base.id == self.id: cursor.execute("SELECT count(*) FROM reachable WHERE branch=%s", (base.id,)) base_old_count = cursor.fetchone()[0] base_reachable = findReachable(base.getHead(db), self.base.id, set(commit.sha1 for commit in self.getCommits(db))) base_new_count = len(base_reachable) cursor.execute("DELETE FROM reachable WHERE branch=%s", [base.id]) cursor.executemany("INSERT INTO reachable (branch, commit) VALUES (%s, %s)", [(base.id, commit) for commit in base_reachable]) cursor.execute("UPDATE branches SET base=%s WHERE id=%s", [self.base.id, base.id]) base.base = self.base base.__commits = None else: base_old_count = None base_new_count = None our_reachable = findReachable(self.getHead(db), base.id) new_count = len(our_reachable) cursor.execute("DELETE FROM reachable WHERE branch=%s", [self.id]) cursor.executemany("INSERT INTO reachable (branch, commit) VALUES (%s, %s)", [(self.id, commit) for commit in our_reachable]) cursor.execute("UPDATE branches SET base=%s WHERE id=%s", [base.id, self.id]) self.base = base self.__commits = None return old_count, new_count, base_old_count, base_new_count def archive(self, db): import gitutils try: head = self.getHead(db) except gitutils.GitReferenceError: # The head commit appears to be missing from the repository. head = None else: self.repository.keepalive(head.sha1) if head: try: self.repository.deleteref("refs/heads/" + self.name, head) except gitutils.GitError: # Branch either doesn't exist, or points to the wrong commit. try: sha1 = self.repository.revparse("refs/heads/" + self.name) except gitutils.GitReferenceError: # Branch doesn't exist. Pretend it's been archived already. pass else: # Branch points to the wrong commit. Don't delete the ref. return cursor = db.cursor() cursor.execute("""UPDATE branches SET archived=TRUE WHERE id=%s""", (self.id,)) self.archived = True def resurrect(self, db): self.repository.createref("refs/heads/" + self.name, self.getHead(db)) cursor = db.cursor() cursor.execute("""UPDATE branches SET archived=FALSE WHERE id=%s""", (self.id,)) self.archived = False @staticmethod def fromId(db, branch_id, load_review=False, repository=None, for_update=False, profiler=None): import gitutils cursor = db.cursor() cursor.execute("""SELECT name, repository, head, base, tail, branches.type, archived FROM branches WHERE branches.id=%s""", (branch_id,), for_update=for_update) row = cursor.fetchone() if not row: return None else: branch_name, repository_id, head_commit_id, base_branch_id, tail_commit_id, type, archived = row def commit_sha1(commit_id): cursor.execute("SELECT sha1 FROM commits WHERE id=%s", (commit_id,)) return cursor.fetchone()[0] head_commit_sha1 = commit_sha1(head_commit_id) tail_commit_sha1 = (commit_sha1(tail_commit_id) if tail_commit_id is not None else None) if profiler: profiler.check("Branch.fromId: basic") if repository is None: repository = gitutils.Repository.fromId(db, repository_id) assert repository.id == repository_id if profiler: profiler.check("Branch.fromId: repository") base_branch = (Branch.fromId(db, base_branch_id, repository=repository) if base_branch_id is not None else None) if profiler: profiler.check("Branch.fromId: base") branch = Branch(branch_id, repository, branch_name, head_commit_sha1, base_branch, tail_commit_sha1, type, archived) if load_review: from dbutils import Review branch.review = Review.fromBranch(db, branch) if profiler: profiler.check("Branch.fromId: review") return branch @staticmethod def fromName(db, repository, name, **kwargs): cursor = db.cursor() cursor.execute("""SELECT id FROM branches WHERE repository=%s AND name=%s""", (repository.id, name)) row = cursor.fetchone() if not row: return None else: return Branch.fromId(db, row[0], **kwargs) ================================================ FILE: src/dbutils/database.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import contextlib import re import time import base import dbaccess from dbutils.session import Session class InvalidCursorError(base.ImplementationError): pass # Raised when "SELECT ... FOR UPDATE NOWAIT" fails to acquire row locks (without # blocking.) class FailedToLock(Exception): pass # Singleton used as the value to Database.Cursor.execute()'s 'for_update' # argument to request NOWAIT behavior (fail instead of blocking if rows are # already locked.) class NoWait: pass NOWAIT = NoWait() class _CursorIterator(object): def __init__(self, base): self.__base = base self.__invalid = False def next(self): if self.__invalid: raise InvalidCursorError("cursor re-used during iteration") return next(self.__base) def invalidate(self): self.__invalid = True class _CursorBase(object): def __init__(self, db, cursor, profiling): self.db = db self.__cursor = cursor self.__profiling = profiling is not None self.__rows = None self.__iterators = [] def __iter__(self): if self.__rows: base = iter(self.__rows) self.__rows = None else: base = iter(self.__cursor) iterator = _CursorIterator(base) self.__iterators.append(iterator) return iterator def __getitem__(self, index): if not self.__profiling: return self.__cursor[index] else: return self.__rows[index] @property def description(self): return self.__cursor.description def fetchone(self): if not self.__profiling: return self.__cursor.fetchone() elif self.__rows: row = self.__rows[0] self.__rows = self.__rows[1:] return row else: return None def fetchall(self): if not self.__profiling: return self.__cursor.fetchall() else: return self.__rows def execute(self, query, params=(), for_update=False): self.validate(query, for_update) if for_update: assert query.upper().startswith("SELECT ") query += " FOR UPDATE" if for_update is NOWAIT: query += " NOWAIT" try: if not self.__profiling: self.__cursor.execute(query, params) else: map(_CursorIterator.invalidate, self.__iterators) self.__iterators = [] before = time.time() self.__cursor.execute(query, params) try: self.__rows = self.__cursor.fetchall() except dbaccess.ProgrammingError: self.__rows = None after = time.time() self.db.recordProfiling(query, after - before, rows=len(self.__rows) if self.__rows else 0) except dbaccess.OperationalError: if for_update is NOWAIT: raise FailedToLock() raise def executemany(self, query, params=()): self.validate(query, False) if self.__profiling is None: self.__cursor.executemany(query, params) else: before = time.time() params = list(params) self.__cursor.executemany(query, params) after = time.time() self.db.recordProfiling(query, after - before, repetitions=len(params)) def mogrify(self, *args): return self.__cursor.mogrify(*args) def validate(self, query, for_update): raise InvalidCursorError("invalid use of _CursorBase") class _UnsafeCursor(_CursorBase): def validate(self, query, for_update): try: command, _ = Database.analyzeQuery(query) except ValueError: command = None if command != "SELECT" or for_update: self.db.unsafe_queries = True class _ReadOnlyCursor(_CursorBase): def validate(self, query, for_update): try: command, _ = Database.analyzeQuery(query) except ValueError as error: raise InvalidCursorError(error.message) if command != "SELECT" or for_update: raise InvalidCursorError( "invalid SQL query for read-only cursor: " + query.split(None, 1)[0]) class _UpdatingCursor(_ReadOnlyCursor): def __init__(self, tables, *args): super(_UpdatingCursor, self).__init__(*args) self.__disabled = False self.__tables = set(tables) @property def disabled(self): return self.__disabled def validate(self, query, for_update): if self.__disabled: raise InvalidCursorError("disabled updating cursor used") try: command, table = Database.analyzeQuery(query) except ValueError as error: raise InvalidCursorError(error.message) if command == "SELECT": return True elif command not in ("INSERT", "UPDATE", "DELETE"): raise InvalidCursorError( "invalid query for updating cursor: " + command) elif table not in self.__tables: raise InvalidCursorError( "invalid table for updating cursor: " + table) else: return True def disable(self): self.__disabled = True RE_COMMAND = re.compile( # Optional WITH clause first: r"(?:WITH\s+\w+\s+\(\)\s+AS\s+\(\)(?:\s*,\s*\w+\s+\(\)\s+AS\s+\(\))*\s*)?" # Then query start. r"(INSERT(?=\s+INTO)|UPDATE|DELETE(?=\s+FROM)|SELECT)\s+(.*)", # Let . match line breaks, and ignore case. re.DOTALL | re.IGNORECASE) class Database(Session): def __init__(self, critic=None, allow_unsafe_cursors=True): super(Database, self).__init__(critic) self.__connection = dbaccess.connect() self.__transaction_callbacks = [] self.__allow_unsafe_cursors = allow_unsafe_cursors self.__updating_cursor = None self.unsafe_queries = False def __call_transaction_callbacks(self, *args): keep_transaction_callbacks = [] for callback in self.__transaction_callbacks: if callback(*args): keep_transaction_callbacks.append(callback) self.__transaction_callbacks = keep_transaction_callbacks def cursor(self): if not self.__allow_unsafe_cursors: raise InvalidCursorError("unsafe cursors are not allowed") return _UnsafeCursor(self, self.__connection.cursor(), self.profiling) def readonly_cursor(self): return _ReadOnlyCursor(self, self.__connection.cursor(), self.profiling) @contextlib.contextmanager def updating_cursor(self, *tables): if self.__updating_cursor: raise InvalidCursorError("concurrent updating cursor requested") if self.unsafe_queries: raise InvalidCursorError("mixed unsafe and updating cursors") # Commit the current transaction. It's guaranteed to have made no # modifications at this point, but might hold locks from earlier # queries that could increase the likelihook of deadlocks. self.commit() self.__updating_cursor = _UpdatingCursor( tables, self, self.__connection.cursor(), self.profiling) rolled_back = False try: yield self.__updating_cursor if self.unsafe_queries: raise InvalidCursorError("mixed unsafe and updating cursors") except: # Yes, we really mean to handle *all* exceptions here. rolled_back = True self.rollback() raise finally: do_commit = not (rolled_back or self.__updating_cursor.disabled) self.__updating_cursor.disable() self.__updating_cursor = None if do_commit: self.commit() def commit(self): if self.__updating_cursor: raise InvalidCursorError("manual commit when using updating cursor") before = time.time() self.__connection.commit() after = time.time() self.recordProfiling("", after - before, 0) self.__call_transaction_callbacks("commit") self.unsafe_queries = False def rollback(self): if self.__updating_cursor: self.__updating_cursor.disable() before = time.time() self.__connection.rollback() after = time.time() self.recordProfiling("", after - before, 0) self.__call_transaction_callbacks("rollback") self.unsafe_queries = False def close(self): super(Database, self).close() if self.__connection: self.__connection.rollback() self.__connection.close() self.__connection = None def closed(self): return self.__connection is None def __enter__(self): return self def __exit__(self, *args): self.close() return False def registerTransactionCallback(self, callback): self.__transaction_callbacks.append(callback) @staticmethod def analyzeQuery(query): """Extract the SQL command and affected table (if any) from a query Supported commands are SELECT, UPDATE, INSERT and DELETE. Any other kind of query will raise a ValueError.""" level = 0 top_level = "" for part in re.split("([()])", query): if part == ")": level -= 1 if level == 0: top_level += part if part == "(": level += 1 match = RE_COMMAND.match(top_level) if not match: raise ValueError("unrecognized query: %s" % query.split()[0]) command, rest = match.groups() if command in ("INSERT", "UPDATE", "DELETE"): rest = rest.split() if command in ("INSERT", "DELETE"): table = rest[1] else: table = rest[0] else: table = None return command, table @staticmethod def forUser(critic=None): return Database(critic) @staticmethod def forSystem(critic=None): import dbutils db = Database(critic) db.setUser(dbutils.User.makeSystem()) return db @staticmethod def forTesting(critic): try: import configuration except ImportError: # Not an installed system. pass else: assert configuration.debug.IS_TESTING return Database.forSystem(critic) # This function performs a NULL-safe conversion from a "truth" value or # arbitrary type to True/False (or None.) It's a utility for working around the # fact that SQLite stores booleans as integers (zero or one.) def boolean(value): return None if value is None else bool(value) ================================================ FILE: src/dbutils/database_unittest.py ================================================ def cursors(): import api import dbutils class TestException(Exception): pass critic = api.critic.startSession(for_testing=True) # Create some playground tables. We'll drop them later if all goes well, # but it doesn't really matter if we don't. with dbutils.Database.forTesting(critic) as db: db.cursor().execute( "CREATE TABLE playground1 ( x INTEGER PRIMARY KEY, y INTEGER )") db.cursor().execute( "CREATE TABLE playground2 ( x INTEGER PRIMARY KEY, y INTEGER )") db.commit() # Basic testing of read-only / updating cursors. with dbutils.Database.forTesting(critic) as db: ro_cursor = db.readonly_cursor() with db.updating_cursor("playground1") as cursor: cursor.executemany("INSERT INTO playground1 (x, y) VALUES (%s, %s)", [(1, 1), (2, 2), (3, 3)]) db.rollback() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 3 try: with db.updating_cursor("playground1") as cursor: cursor.execute("INSERT INTO playground1 (x, y) VALUES (%s, %s)", (4, 4)) raise TestException except TestException: pass db.commit() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 3 try: with db.updating_cursor("playground1") as cursor: cursor.execute("INSERT INTO playground2 (x, y) VALUES (%s, %s)", (1, 1)) except dbutils.InvalidCursorError as error: assert error.message == "invalid table for updating cursor: playground2" db.commit() ro_cursor.execute("SELECT x, y FROM playground2") assert len(list(ro_cursor)) == 0 try: with db.updating_cursor("playground2") as cursor: cursor.execute("DELETE FROM playground1 WHERE x=1") except dbutils.InvalidCursorError as error: assert error.message == "invalid table for updating cursor: playground1" db.commit() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 3 with db.updating_cursor("playground1") as cursor: cursor.execute("DELETE FROM playground1 WHERE x=1") db.rollback() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 2 with db.updating_cursor("playground1") as cursor: cursor.execute("UPDATE playground1 SET y=-2 WHERE x=2") db.rollback() ro_cursor.execute("SELECT y FROM playground1 WHERE x=2") assert ro_cursor.fetchone()[0] == -2 with db.updating_cursor("playground1") as cursor: try: with db.updating_cursor("playground2"): assert False except dbutils.InvalidCursorError as error: assert error.message == "concurrent updating cursor requested" stored_cursor = None with db.updating_cursor("playground1") as cursor: stored_cursor = cursor try: stored_cursor.execute("UPDATE playground1 SET y=-3 WHERE x=3") except dbutils.InvalidCursorError as error: assert error.message == "disabled updating cursor used" db.commit() ro_cursor.execute("SELECT y FROM playground1 WHERE x=3") assert ro_cursor.fetchone()[0] == 3 try: with db.updating_cursor("playground1") as cursor: cursor.execute("DROP TABLE playground1") except dbutils.InvalidCursorError as error: assert error.message == "unrecognized query: DROP", error.message try: with db.updating_cursor("playground1") as cursor: cursor.execute("DELETE FROM playground1") db.commit() except dbutils.InvalidCursorError as error: assert error.message == "manual commit when using updating cursor", error.message db.commit() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 2 # Test mixing of unsafe cursor and updating cursor. with dbutils.Database.forTesting(critic) as db: ro_cursor = db.readonly_cursor() unsafe_cursor = db.cursor() with db.updating_cursor("playground1") as cursor: cursor.execute("DELETE FROM playground1") cursor.executemany("INSERT INTO playground1 (x, y) VALUES (%s, %s)", [(1, 1), (2, 2), (3, 3)]) db.rollback() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 3 # Can't create an updating cursor after executing an updating query # using an unsafe cursor. try: unsafe_cursor.execute("DELETE FROM playground1") with db.updating_cursor("playground1") as cursor: assert False except dbutils.InvalidCursorError as error: assert error.message == "mixed unsafe and updating cursors" db.rollback() # Can't commit an updating cursor after executing an updating query # using an unsafe cursor. try: with db.updating_cursor("playground1") as cursor: cursor.execute("INSERT INTO playground1 (x, y) VALUES (%s, %s)", (4, 4)) unsafe_cursor.execute("DELETE FROM playground1") except dbutils.InvalidCursorError as error: assert error.message == "mixed unsafe and updating cursors" db.commit() ro_cursor.execute("SELECT x, y FROM playground1") assert len(list(ro_cursor)) == 3 # If the transaction is committed or rolled back after execution of # updating query using unsafe cursor, then use of updating cursor is # fine. unsafe_cursor.execute("DELETE FROM playground1") db.rollback() with db.updating_cursor("playground1") as cursor: cursor.execute("UPDATE playground1 SET y=-2 WHERE x=2") db.rollback() ro_cursor.execute("SELECT y FROM playground1") assert set(y for (y,) in ro_cursor) == set([1, -2, 3]) # If the transaction is committed or rolled back after execution of # updating query using unsafe cursor, then use of updating cursor is # fine. unsafe_cursor.execute("UPDATE playground1 SET y=-1 WHERE x=1") db.commit() with db.updating_cursor("playground1") as cursor: cursor.execute("UPDATE playground1 SET y=-3 WHERE x=3") db.rollback() ro_cursor.execute("SELECT y FROM playground1") assert set(y for (y,) in ro_cursor) == set([-1, -2, -3]) # Drop the playground table. with dbutils.Database.forTesting(critic) as db: db.cursor().execute("DROP TABLE playground1") db.cursor().execute("DROP TABLE playground2") db.commit() print "cursors: ok" def analyzeQuery(): import dbutils # Trivial cases. assert dbutils.Database.analyzeQuery( "SELECT foo FROM bar WHERE fie") == ("SELECT", None) assert dbutils.Database.analyzeQuery( "UPDATE foo SET bar=10 WHERE fie") == ("UPDATE", "foo") assert dbutils.Database.analyzeQuery( "INSERT INTO foo (bar) VALUES (10)") == ("INSERT", "foo") assert dbutils.Database.analyzeQuery( "DELETE FROM foo WHERE bar AND fie") == ("DELETE", "foo") # Something more complex. assert dbutils.Database.analyzeQuery( """WITH allpaths (path) AS (VALUES (%s)), missingpaths (path) AS (SELECT allpaths.path FROM allpaths LEFT OUTER JOIN files ON (MD5(files.path)=MD5(allpaths.path)) WHERE files.path IS NULL) INSERT INTO files (path) SELECT path FROM missingpaths""") == ("INSERT", "files") print "analyzeQuery: ok" ================================================ FILE: src/dbutils/paths.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class InvalidFileId(Exception): def __init__(self, file_id): super(InvalidFileId, self).__init__("Invalid file id: %d" % file_id) class InvalidPath(Exception): pass class File(object): def __init__(self, file_id, path): self.id = file_id self.path = path def __int__(self): return self.id def __str__(self): return self.path @staticmethod def fromId(db, file_id): return File(file_id, describe_file(db, file_id)) @staticmethod def fromPath(db, path, insert=True): file_id = find_file(db, path, insert) if file_id is None: # Only happens when insert=False. raise InvalidPath("Path does not exist: %s" % path) return File(file_id, path) def find_file(db, path, insert=True): path = path.lstrip("/") if path.endswith("/"): raise InvalidPath("Trailing path separator: %r" % path) cursor = db.cursor() cursor.execute("SELECT id, path FROM files WHERE MD5(path)=MD5(%s)", (path,)) row = cursor.fetchone() if row: file_id, found_path = row assert path == found_path, "MD5 collision in files table: %r != %r" % (path, found_path) return file_id if insert: cursor.execute("INSERT INTO files (path) VALUES (%s) RETURNING id", (path,)) return cursor.fetchone()[0] return None def find_files(db, files): for file in files: file.id = find_file(db, file.path) def describe_file(db, file_id): cursor = db.cursor() cursor.execute("SELECT path FROM files WHERE id=%s", (file_id,)) row = cursor.fetchone() if not row: raise InvalidFileId(file_id) return row[0] ================================================ FILE: src/dbutils/review.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import base def countDraftItems(db, user, review): cursor = db.cursor() cursor.execute("""SELECT reviewfilechanges.to_state, SUM(deleted) + SUM(inserted) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft' GROUP BY reviewfilechanges.to_state""", (review.id, user.id)) reviewed = unreviewed = 0 for to_state, lines in cursor: if to_state == "reviewed": reviewed = lines else: unreviewed = lines cursor.execute("""SELECT reviewfilechanges.to_state, COUNT(*) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.deleted=0 AND reviewfiles.inserted=0 AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft' GROUP BY reviewfilechanges.to_state""", (review.id, user.id)) reviewedBinary = unreviewedBinary = 0 for to_state, lines in cursor: if to_state == "reviewed": reviewedBinary = lines else: unreviewedBinary = lines cursor.execute("SELECT count(*) FROM commentchains, comments WHERE commentchains.review=%s AND comments.chain=commentchains.id AND comments.uid=%s AND comments.state='draft'", [review.id, user.id]) comments = cursor.fetchone()[0] cursor.execute("""SELECT DISTINCT commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND ((commentchains.state=commentchainchanges.from_state AND commentchainchanges.from_state IN ('addressed', 'closed') AND commentchainchanges.to_state='open') OR (commentchainchanges.from_addressed_by IS NOT NULL AND commentchainchanges.to_addressed_by IS NOT NULL))""", [review.id, user.id]) reopened = len(cursor.fetchall()) cursor.execute("""SELECT count(*) FROM commentchains, commentchainchanges WHERE commentchains.review=%s AND commentchains.state='open' AND commentchainchanges.chain=commentchains.id AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.from_state='open' AND commentchainchanges.to_state='closed'""", [review.id, user.id]) closed = cursor.fetchone()[0] cursor.execute("""SELECT count(*) FROM commentchains, commentchainchanges WHERE commentchains.review=%s AND commentchainchanges.chain=commentchains.id AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.from_type=commentchains.type AND commentchainchanges.to_type!=commentchains.type""", [review.id, user.id]) morphed = cursor.fetchone()[0] return { "reviewedNormal": reviewed, "unreviewedNormal": unreviewed, "reviewedBinary": reviewedBinary, "unreviewedBinary": unreviewedBinary, "writtenComments": comments, "reopenedIssues": reopened, "resolvedIssues": closed, "morphedChains": morphed } class NoSuchReview(base.Error): def __init__(self, review_id): super(NoSuchReview, self).__init__("No such review: r/%d" % review_id) self.id = review_id class ReviewState(object): def __init__(self, review, accepted, pending, reviewed, issues): self.review = review self.accepted = accepted self.pending = pending self.reviewed = reviewed self.issues = issues def getPercentReviewed(self): if self.pending + self.reviewed: return 100.0 * self.reviewed / (self.pending + self.reviewed) else: return 50.0 def getProgress(self): if self.pending + self.reviewed == 0: return "?? %" percent = self.getPercentReviewed() if int(percent) > 0 and (percent < 99.0 or percent == 100.0): return "%d %%" % int(percent) elif percent > 0: precision = 1 while precision < 10: progress = ("%%.%df" % precision) % percent if progress[-1] != '0': break precision += 1 return progress + " %" else: return "No progress" def getIssues(self): if self.issues: return "%d issue%s" % (self.issues, "s" if self.issues > 1 else "") else: return "" def __str__(self): if self.review.state == 'dropped': return "Dropped..." elif self.review.state == 'closed': return "Finished!" elif self.accepted: return "Accepted!" else: progress = self.getProgress() issues = self.getIssues() if issues: return "%s and %s" % (progress, issues) else: return progress class ReviewRebase(object): def __init__(self, review, old_head, new_head, old_upstream, new_upstream, user, equivalent_merge, replayed_rebase): self.review = review self.old_head = old_head self.new_head = new_head self.old_upstream = old_upstream self.new_upstream = new_upstream self.user = user self.equivalent_merge = equivalent_merge self.replayed_rebase = replayed_rebase class ReviewRebases(list): def __init__(self, db, review): import gitutils from dbutils import User self.__old_head_map = {} self.__new_head_map = {} cursor = db.cursor() cursor.execute("""SELECT old_head, new_head, old_upstream, new_upstream, uid, equivalent_merge, replayed_rebase FROM reviewrebases WHERE review=%s AND new_head IS NOT NULL""", (review.id,)) for (old_head_id, new_head_id, old_upstream_id, new_upstream_id, user_id, equivalent_merge_id, replayed_rebase_id) in cursor: old_head = gitutils.Commit.fromId(db, review.repository, old_head_id) new_head = gitutils.Commit.fromId(db, review.repository, new_head_id) if old_upstream_id is not None and new_upstream_id is not None: old_upstream = gitutils.Commit.fromId(db, review.repository, old_upstream_id) new_upstream = gitutils.Commit.fromId(db, review.repository, new_upstream_id) else: old_upstream = new_upstream = None if equivalent_merge_id: equivalent_merge = gitutils.Commit.fromId(db, review.repository, equivalent_merge_id) else: equivalent_merge = None if replayed_rebase_id: replayed_rebase = gitutils.Commit.fromId(db, review.repository, replayed_rebase_id) else: replayed_rebase = None user = User.fromId(db, user_id) rebase = ReviewRebase(review, old_head, new_head, old_upstream, new_upstream, user, equivalent_merge, replayed_rebase) self.append(rebase) self.__old_head_map[old_head] = rebase self.__new_head_map[new_head] = rebase if equivalent_merge: self.__old_head_map[equivalent_merge] = rebase if review.performed_rebase: self.__old_head_map[review.performed_rebase.old_head] = review.performed_rebase self.__new_head_map[review.performed_rebase.new_head] = review.performed_rebase def fromOldHead(self, commit): return self.__old_head_map.get(commit) def fromNewHead(self, commit): return self.__new_head_map.get(commit) class ReviewTrackedBranch(object): def __init__(self, review, trackedbranch_id, remote, name, disabled): self.id = trackedbranch_id self.review = review self.remote = remote self.name = name self.disabled = disabled class Review(object): def __init__(self, review_id, owners, review_type, branch, state, serial, summary, description, applyfilters, applyparentfilters): self.id = review_id self.owners = owners self.type = review_type self.repository = branch.repository self.branch = branch self.state = state self.serial = serial self.summary = summary self.description = description self.reviewers = [] self.watchers = {} self.commentchains = None self.applyfilters = applyfilters self.applyparentfilters = applyparentfilters self.filters = None self.relevant_files = None self.draft_status = None self.performed_rebase = None @staticmethod def isAccepted(db, review_id): cursor = db.cursor() cursor.execute("SELECT 1 FROM reviewfiles WHERE review=%s AND state='pending' LIMIT 1", (review_id,)) if cursor.fetchone(): return False cursor.execute("SELECT 1 FROM commentchains WHERE review=%s AND type='issue' AND state='open' LIMIT 1", (review_id,)) if cursor.fetchone(): return False return True def accepted(self, db): if self.state != 'open': return False else: return Review.isAccepted(db, self.id) def getReviewState(self, db): cursor = db.cursor() cursor.execute("""SELECT state, SUM(deleted) + SUM(inserted) FROM reviewfiles WHERE reviewfiles.review=%s GROUP BY state""", (self.id,)) pending = 0 reviewed = 0 for state, count in cursor.fetchall(): if state == "pending": pending = count else: reviewed = count cursor.execute("""SELECT count(id) FROM commentchains WHERE review=%s AND type='issue' AND state='open'""", (self.id,)) issues = cursor.fetchone()[0] return ReviewState(self, self.accepted(db), pending, reviewed, issues) def setPerformedRebase(self, old_head, new_head, old_upstream, new_upstream, user, equivalent_merge, replayed_rebase): self.performed_rebase = ReviewRebase(self, old_head, new_head, old_upstream, new_upstream, user, equivalent_merge, replayed_rebase) def getReviewRebases(self, db): return ReviewRebases(db, self) def getTrackedBranch(self, db): cursor = db.cursor() cursor.execute("""SELECT trackedbranches.id, remote, remote_name, disabled FROM trackedbranches JOIN branches ON (trackedbranches.repository=branches.repository AND trackedbranches.local_name=branches.name) JOIN reviews ON (branches.id=reviews.branch) WHERE reviews.id=%s""", (self.id,)) for trackedbranch_id, remote, name, disabled in cursor: return ReviewTrackedBranch(self, trackedbranch_id, remote, name, disabled) def getCommitSet(self, db): import gitutils import log.commitset cursor = db.cursor() cursor.execute("""SELECT DISTINCT commits.id, commits.sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s""", (self.id,)) commits = [] for commit_id, commit_sha1 in cursor: commits.append(gitutils.Commit.fromSHA1(db, self.repository, commit_sha1, commit_id)) return log.commitset.CommitSet(commits) def containsCommit(self, db, commit, include_head_and_tails=False, include_actual_log=False): import gitutils commit_id = None commit_sha1 = None if isinstance(commit, gitutils.Commit): commit_id = commit.id commit_sha1 = commit.sha1 elif isinstance(commit, str): commit_sha1 = self.repository.revparse(commit) commit = None elif isinstance(commit, int): commit_id = commit commit = None else: raise TypeError cursor = db.cursor() if commit_id is not None: cursor.execute("""SELECT 1 FROM reviewchangesets JOIN changesets ON (id=changeset) WHERE reviewchangesets.review=%s AND changesets.child=%s AND changesets.type!='conflicts'""", (self.id, commit_id)) else: cursor.execute("""SELECT 1 FROM reviewchangesets JOIN changesets ON (changesets.id=reviewchangesets.changeset) JOIN commits ON (commits.id=changesets.child) WHERE reviewchangesets.review=%s AND changesets.type!='conflicts' AND commits.sha1=%s""", (self.id, commit_sha1)) if cursor.fetchone() is not None: return True if include_head_and_tails: head_and_tails = set([self.branch.getHead(db)]) commitset = self.getCommitSet(db) if commitset: head_and_tails |= commitset.getTails() if commit_sha1 is None: if commit is None: commit = gitutils.Commit.fromId(db, self.repository, commit_id) commit_sha1 = commit.sha1 if commit_sha1 in head_and_tails: return True if include_actual_log: if commit_id is not None: cursor.execute("""SELECT 1 FROM reachable JOIN branches ON (branches.id=reachable.branch) JOIN reviews ON (reviews.branch=branches.id) WHERE reachable.commit=%s AND reviews.id=%s""", (commit_id, self.id)) else: cursor.execute("""SELECT 1 FROM commits JOIN reachable ON (reachable.commit=commits.id) JOIN branches ON (branches.id=reachable.branch) JOIN reviews ON (reviews.branch=branches.id) WHERE commits.sha1=%s AND reviews.id=%s""", (commit_sha1, self.id)) if cursor.fetchone() is not None: return True return False def getJS(self): return "var review = critic.review = { id: %d, branch: { id: %d, name: %r }, owners: [ %s ], serial: %d };" % (self.id, self.branch.id, self.branch.name, ", ".join(owner.getJSConstructor() for owner in self.owners), self.serial) def getETag(self, db, user=None): import configuration cursor = db.cursor() etag = "" if configuration.debug.IS_DEVELOPMENT: cursor.execute("SELECT installed_at FROM systemidentities WHERE name=%s", (configuration.base.SYSTEM_IDENTITY,)) installed_at = cursor.fetchone()[0] etag += "install%s." % time.mktime(installed_at.timetuple()) if user and not user.isAnonymous(): etag += "user%d." % user.id etag += "review%d.serial%d" % (self.id, self.serial) if user: items = self.getDraftStatus(db, user) if any(items.values()): etag += ".draft%d" % hash(tuple(sorted(items.items()))) cursor.execute("SELECT id FROM reviewrebases WHERE review=%s AND uid=%s AND new_head IS NULL", (self.id, user.id)) row = cursor.fetchone() if row: etag += ".rebase%d" % row[0] return '"%s"' % etag def getURL(self, db, user=None, indent=0, separator="\n"): import dbutils indent = " " * indent if user: url_prefixes = user.getCriticURLs(db) else: url_prefixes = [dbutils.getURLPrefix(db)] return separator.join(["%s%s/r/%d" % (indent, url_prefix, self.id) for url_prefix in url_prefixes]) def getRecipients(self, db): from dbutils import User cursor = db.cursor() cursor.execute("SELECT uid, include FROM reviewrecipientfilters WHERE review=%s", (self.id,)) default_include = True included = set(owner.id for owner in self.owners) excluded = set() for uid, include in cursor: if uid is None: default_include = include elif include: included.add(uid) elif uid not in self.owners: excluded.add(uid) cursor.execute("SELECT uid FROM reviewusers WHERE review=%s", (self.id,)) recipients = [] for (user_id,) in cursor: if user_id in excluded: continue elif user_id not in included and not default_include: continue user = User.fromId(db, user_id) if user.status != "retired": recipients.append(user) return recipients def getDraftStatus(self, db, user): if self.draft_status is None: self.draft_status = countDraftItems(db, user, self) return self.draft_status def incrementSerial(self, db): self.serial += 1 db.cursor().execute("UPDATE reviews SET serial=%s WHERE id=%s", [self.serial, self.id]) def scheduleBranchArchival(self, db, delay=None): import dbutils # First, cancel current scheduled archival, if there is one. self.cancelScheduledBranchArchival(db) # If review is not closed or dropped, don't schedule a branch archival. # Also don't schedule one if the branch has already been archived. if self.state not in ("closed", "dropped") or self.branch.archived: return if delay is None: # Configuration policy: # # Any owner of a review can, by having changed the relevant # preference setting, increase the time before a review branch is # archived, or disable archival entirely, but they can't make it # happen sooner than the system or repository default, or what any # other owner has requested. # Find configured value for each owner, and also the per-repository # (or per-system) default, in case each owner has changed the # setting. preference_item = "review.branchArchiveDelay." + self.state repository_default = dbutils.User.fetchPreference( db, preference_item, repository=self.repository) delays = set([repository_default]) for owner in self.owners: delays.add(owner.getPreference(db, preference_item, repository=self.repository)) # If configured to zero (by any owner,) don't schedule a branch # archival. if min(delays) <= 0: return # Otherwise, use maximum configured value for any owner. delay = max(delays) cursor = db.cursor() cursor.execute("""INSERT INTO scheduledreviewbrancharchivals (review, deadline) VALUES (%s, NOW() + INTERVAL %s)""", (self.id, "%d DAYS" % delay)) return delay def cancelScheduledBranchArchival(self, db): cursor = db.cursor() cursor.execute("""DELETE FROM scheduledreviewbrancharchivals WHERE review=%s""", (self.id,)) def close(self, db, user): self.serial += 1 self.state = "closed" db.cursor().execute("UPDATE reviews SET state='closed', serial=%s, closed_by=%s WHERE id=%s", (self.serial, user.id, self.id)) self.scheduleBranchArchival(db) def drop(self, db, user): self.serial += 1 self.state = "dropped" db.cursor().execute("UPDATE reviews SET state='dropped', serial=%s, closed_by=%s WHERE id=%s", (self.serial, user.id, self.id)) self.scheduleBranchArchival(db) def reopen(self, db, user): self.serial += 1 if self.branch.archived: self.branch.resurrect(db) db.cursor().execute("UPDATE reviews SET state='open', serial=%s, closed_by=NULL WHERE id=%s", (self.serial, self.id)) self.cancelScheduledBranchArchival(db) def disableTracking(self, db): db.cursor().execute("UPDATE trackedbranches SET disabled=TRUE WHERE repository=%s AND local_name=%s", (self.repository.id, self.branch.name)) def setSummary(self, db, summary): self.serial += 1 self.summary = summary db.cursor().execute("UPDATE reviews SET summary=%s, serial=%s WHERE id=%s", [self.summary, self.serial, self.id]) def setDescription(self, db, description): self.serial += 1 self.description = description db.cursor().execute("UPDATE reviews SET description=%s, serial=%s WHERE id=%s", [self.description, self.serial, self.id]) def addOwner(self, db, owner): if not owner in self.owners: self.serial += 1 self.owners.append(owner) cursor = db.cursor() cursor.execute("SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s", (self.id, owner.id)) if cursor.fetchone(): cursor.execute("UPDATE reviewusers SET owner=TRUE WHERE review=%s AND uid=%s", (self.id, owner.id)) else: cursor.execute("INSERT INTO reviewusers (review, uid, owner) VALUES (%s, %s, TRUE)", (self.id, owner.id)) cursor.execute("SELECT id FROM trackedbranches WHERE repository=%s AND local_name=%s", (self.repository.id, self.branch.name)) row = cursor.fetchone() if row: trackedbranch_id = row[0] cursor.execute("INSERT INTO trackedbranchusers (branch, uid) VALUES (%s, %s)", (trackedbranch_id, owner.id)) def removeOwner(self, db, owner): if owner in self.owners: self.serial += 1 self.owners.remove(owner) cursor = db.cursor() cursor.execute("UPDATE reviewusers SET owner=FALSE WHERE review=%s AND uid=%s", (self.id, owner.id)) cursor.execute("SELECT id FROM trackedbranches WHERE repository=%s AND local_name=%s", (self.repository.id, self.branch.name)) row = cursor.fetchone() if row: trackedbranch_id = row[0] cursor.execute("DELETE FROM trackedbranchusers WHERE branch=%s AND uid=%s", (trackedbranch_id, owner.id)) def getReviewFilters(self, db): cursor = db.cursor() cursor.execute("SELECT uid, path, type, NULL FROM reviewfilters WHERE review=%s", (self.id,)) return cursor.fetchall() or None def getFilteredTails(self, db): import log.commitset commitset = log.commitset.CommitSet(self.branch.getCommits(db)) return commitset.getFilteredTails(self.branch.repository) def getRelevantFiles(self, db, user): if not self.filters: from reviewing.filters import Filters self.filters = Filters() self.filters.setFiles(db, review=self) self.filters.load(db, review=self) self.relevant_files = self.filters.getRelevantFiles() cursor = db.cursor() cursor.execute("SELECT assignee, file FROM fullreviewuserfiles WHERE review=%s", (self.id,)) for user_id, file_id in cursor: self.relevant_files.setdefault(user_id, set()).add(file_id) return self.relevant_files.get(user.id, set()) def getUserAssociation(self, db, user): cursor = db.cursor() association = [] if user in self.owners: association.append("owner") cursor.execute("""SELECT 1 FROM reviewchangesets JOIN changesets ON (changesets.id=reviewchangesets.changeset) JOIN commits ON (commits.id=changesets.child) JOIN gitusers ON (gitusers.id=commits.author_gituser) JOIN usergitemails USING (email) WHERE reviewchangesets.review=%s AND usergitemails.uid=%s""", (self.id, user.id)) if cursor.fetchone(): association.append("author") cursor.execute("SELECT COUNT(*) FROM fullreviewuserfiles WHERE review=%s AND assignee=%s", (self.id, user.id)) if cursor.fetchone()[0] != 0: association.append("reviewer") elif user not in self.owners: cursor.execute("SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s", (self.id, user.id)) if cursor.fetchone(): association.append("watcher") if not association: association.append("none") return ", ".join(association) @staticmethod def fromId(db, review_id, branch=None, profiler=None): from dbutils import User cursor = db.cursor() cursor.execute("SELECT type, branch, state, serial, summary, description, applyfilters, applyparentfilters FROM reviews WHERE id=%s", [review_id]) row = cursor.fetchone() if not row: raise NoSuchReview(review_id) type, branch_id, state, serial, summary, description, applyfilters, applyparentfilters = row if profiler: profiler.check("Review.fromId: basic") if branch is None: from dbutils import Branch branch = Branch.fromId(db, branch_id, load_review=False, profiler=profiler) cursor.execute("SELECT uid FROM reviewusers WHERE review=%s AND owner", (review_id,)) owners = User.fromIds(db, [user_id for (user_id,) in cursor]) if profiler: profiler.check("Review.fromId: owners") review = Review(review_id, owners, type, branch, state, serial, summary, description, applyfilters, applyparentfilters) branch.review = review # Reviewers: all users that have at least one review file assigned to them. cursor.execute("""SELECT DISTINCT uid, assignee IS NOT NULL, type FROM reviewusers LEFT OUTER JOIN fullreviewuserfiles ON (fullreviewuserfiles.review=reviewusers.review AND assignee=uid) WHERE reviewusers.review=%s""", (review_id,)) reviewers = [] watchers = [] watcher_types = {} for user_id, is_reviewer, user_type in cursor.fetchall(): if is_reviewer: reviewers.append(user_id) elif user_id not in review.owners: watchers.append(user_id) watcher_types[user_id] = user_type review.reviewers = User.fromIds(db, reviewers) for watcher in User.fromIds(db, watchers): review.watchers[watcher] = watcher_types[watcher] if profiler: profiler.check("Review.fromId: users") return review @staticmethod def fromBranch(db, branch): if branch: cursor = db.cursor() cursor.execute("SELECT id FROM reviews WHERE branch=%s", [branch.id]) row = cursor.fetchone() if not row: return None else: return Review.fromId(db, row[0], branch) else: return None @staticmethod def fromName(db, repository, name): from dbutils import Branch return Review.fromBranch(db, Branch.fromName(db, repository, name)) @staticmethod def fromArgument(db, argument): try: return Review.fromId(db, int(argument)) except: from dbutils import Branch branch = Branch.fromName(db, str(argument)) if not branch: return None return Review.fromBranch(db, branch) @staticmethod def fromAPI(api_review): return Review.fromId(api_review.critic.database, api_review.id) ================================================ FILE: src/dbutils/session.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. class Session(object): def __init__(self, critic): self.critic = critic self.__atexit = [] self.storage = { "Repository": {}, "User": {}, "Commit": {}, "CommitUserTime": {}, "Timezones": {} } self.profiling = {} self.__user = None self.__authentication_labels = set() self.__profiles = set() def atexit(self, fn): self.__atexit.append(fn) def close(self): for fn in self.__atexit: try: fn(self) except: pass self.__atexit = [] def disableProfiling(self): self.profiling = None def recordProfiling(self, item, duration, rows=None, repetitions=1): if self.profiling is not None: count, accumulated_ms, maximum_ms, accumulated_rows, maximum_rows = self.profiling.get(item, (0, 0.0, 0.0, None, None)) count += repetitions accumulated_ms += 1000 * duration maximum_ms = max(maximum_ms, 1000 * duration) if rows is not None: if accumulated_rows is None: accumulated_rows = 0 if maximum_rows is None: maximum_rows = 0 accumulated_rows += rows maximum_rows = max(maximum_rows, rows) self.profiling[item] = count, accumulated_ms, maximum_ms, accumulated_rows, maximum_rows @property def user(self): return self.__user @property def authentication_labels(self): return self.__authentication_labels @property def profiles(self): return frozenset(self.__profiles) def setUser(self, user, authentication_labels=()): import auth import api assert not self.__user or self.__user.isAnonymous() self.__user = user self.__authentication_labels.update(authentication_labels) self.__profiles.add(auth.AccessControlProfile.forUser( self, user, self.__authentication_labels)) if self.critic and not (user.isAnonymous() or user.isSystem()): self.critic.setActualUser(api.user.fetch(self.critic, user_id=user.id)) def addProfile(self, profile): self.__profiles.add(profile) ================================================ FILE: src/dbutils/system.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Martin Olsson # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. def getInstalledSHA1(db): import configuration cursor = db.cursor() cursor.execute("SELECT installed_sha1 FROM systemidentities WHERE name=%s", (configuration.base.SYSTEM_IDENTITY,)) return cursor.fetchone()[0] def getURLPrefix(db, user=None): import configuration cursor = db.cursor() cursor.execute("""SELECT anonymous_scheme, authenticated_scheme, hostname FROM systemidentities WHERE name=%s""", (configuration.base.SYSTEM_IDENTITY,)) anonymous_scheme, authenticated_scheme, hostname = cursor.fetchone() if user and not user.isAnonymous(): scheme = authenticated_scheme else: scheme = anonymous_scheme return "%s://%s" % (scheme, hostname) def getAdministratorContacts(db, indent=0, as_html=False): import dbutils administrators = dbutils.User.withRole(db, "administrator") # Sort by id, IOW, by user creation time. Probably gives "primary" # administrator first and auxiliary administrators second, but might also # just be arbitrary. If nothing else, it makes the order stable. administrators = sorted(administrators, key=lambda user: user.id) # Skip administrators with no email addresses, since those are unhelpful in # this context. administrators = filter(lambda user: user.email, administrators) if as_html: result = "the system administrator" if not administrators: return result if len(administrators) > 1: result += "s" result += " (%s)" mailto_links = \ [("%(fullname)s" % { "email": user.email, "fullname": user.fullname }) for user in administrators] if len(mailto_links) == 1: return result % mailto_links[0] else: return result % ("one of %s or %s" % (", ".join(mailto_links[:-1]), mailto_links[-1])) else: if not administrators: return "" administrators = ["%s <%s>" % (user.fullname, user.email) for user in administrators] prefix = " " * indent return prefix + ("\n" + prefix).join(administrators) ================================================ FILE: src/dbutils/timezones.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import datetime def loadTimezones(db): """ Insert (interesting) timezones from 'pg_timezone_names' into 'timezones' The 'pg_timezone_names' table contains all the information we want (but typically unnecessarily many different timezones) but is very slow to query, so can't be used during normal operations. """ import configuration def add(name, abbrev, utc_offset): cursor.execute("""INSERT INTO timezones (name, abbrev, utc_offset) VALUES (%s, %s, %s)""", (name, abbrev, utc_offset)) if configuration.database.DRIVER == "postgresql": cursor = db.cursor() cursor.execute("DELETE FROM timezones") add("Universal/UTC", "UTC", datetime.timedelta()) cursor.execute("SELECT name, abbrev, utc_offset FROM pg_timezone_names") for full_name, abbrev, utc_offset in cursor.fetchall(): region, _, name = full_name.partition("/") if region not in ("posix", "Etc") and name and "/" not in name: add(full_name, abbrev, utc_offset) db.commit() def updateTimezones(db): """ Update UTC offses in 'timezones' with values in 'pg_timezone_names' The UTC offsets in 'pg_timezone_names' are DST adjusted (for the timezones we care about) so we need to copy the values regularly to keep the cached values in 'timezones' up-to-date. """ import configuration if configuration.database.DRIVER == "postgresql": cursor = db.cursor() cursor.execute("""UPDATE timezones SET utc_offset=pg_timezone_names.utc_offset FROM pg_timezone_names WHERE pg_timezone_names.name=timezones.name""") db.commit() def __fetchTimezones(db): groups = db.storage["Timezones"].get(None, {}) if not groups: cursor = db.cursor() cursor.execute("SELECT name, abbrev, utc_offset FROM timezones") for full_name, abbrev, utc_offset in cursor.fetchall(): group, name = full_name.split("/") if isinstance(utc_offset, int): utc_offset = datetime.timedelta(utc_offset) groups.setdefault(group, {})[name] = (abbrev, utc_offset) db.storage["Timezones"][None] = groups return groups def sortedTimezones(db): groups = __fetchTimezones(db) result = [] for key in sorted(groups.keys()): result.append((key, sorted([(name, abbrev, utc_offset) for name, (abbrev, utc_offset) in groups[key].items()]))) return result def __fetchUTCOffset(db, timezone): utc_offset = db.storage["Timezones"].get(timezone) if utc_offset is None: groups = db.storage["Timezones"].get(None) if groups: group, name = timezone.split("/") utc_offset = groups[group][name][2] else: cursor = db.cursor() cursor.execute("SELECT utc_offset FROM timezones WHERE name=%s", (timezone,)) row = cursor.fetchone() if row: utc_offset = row[0] else: return 0 db.storage["Timezones"][timezone] = utc_offset return utc_offset def adjustTimestamp(db, timestamp, timezone): return timestamp + __fetchUTCOffset(db, timezone) def formatTimestamp(db, timestamp, timezone): utc_offset = __fetchUTCOffset(db, timezone) seconds = utc_offset.total_seconds() offset = " %s%02d:%02d" % ("-" if seconds < 0 else "+", seconds / 3600, (seconds % 3600) / 60) return time.strftime("%Y-%m-%d %H:%M", (timestamp + utc_offset).timetuple()) + offset def validTimezone(db, timezone): cursor = db.cursor() cursor.execute("SELECT 1 FROM timezones WHERE name=%s", (timezone,)) return bool(cursor.fetchone()) ================================================ FILE: src/dbutils/unittest.py ================================================ def independence(): # Simply check that dbutils can be imported. This is run in a test flagged # as "local" since we want dbutils to be possible to import in standalone # unit tests. # # Hardly anything in dbutils can actually be used, of course, but that's not # a problem; the unit tests simply need to make sure not to depend on that. import dbutils print "independence: ok" ================================================ FILE: src/dbutils/user.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import base def _preferenceCacheKey(item, repository, filter_id): cache_key = item if filter_id is not None: cache_key += ":f%d" % filter_id if repository is not None: cache_key += ":r%d" % repository.id return cache_key class InvalidUserId(base.Error): def __init__(self, user_id): super(InvalidUserId, self).__init__("Invalid user id: %d" % user_id) self.user_id = user_id class NoSuchUser(base.Error): def __init__(self, name): super(NoSuchUser, self).__init__("No such user: %s" % name) self.name = name class User(object): def __init__(self, user_id, name, fullname, status, email, email_verified): self.id = user_id self.name = name self.email = email self.email_verified = email_verified self.fullname = fullname self.status = status self.preferences = {} self.__resources = {} def __eq__(self, other): if self.isAnonymous(): return False elif isinstance(other, User): if other.isAnonymous(): return False else: return self.id == other.id elif isinstance(other, int): return self.id == other elif isinstance(other, basestring): return self.name == other else: raise base.Error("invalid comparison") def __ne__(self, other): return not (self == other) def __int__(self): assert not self.isAnonymous() return self.id def __str__(self): assert not self.isAnonymous() return self.name def __repr__(self): return "User(%r, %r, %r, %r)" % (self.id, self.name, self.email, self.fullname) def __hash__(self): return hash(self.id) def isAnonymous(self): return self.status == 'anonymous' def isSystem(self): return self.status == 'system' def hasRole(self, db, role): cursor = db.cursor() cursor.execute("SELECT 1 FROM userroles WHERE uid=%s AND role=%s", (self.id, role)) return bool(cursor.fetchone()) def loadPreferences(self, db): if not self.preferences: cursor = db.cursor() cursor.execute("""SELECT uid, item, type, integer, string FROM preferences JOIN userpreferences USING (item) WHERE (uid=%s OR uid IS NULL) AND repository IS NULL AND filter IS NULL""", (self.id,)) rows = sorted(cursor, key=lambda row: row[0], reverse=True) for _, item, preference_type, integer, string in rows: cache_key = _preferenceCacheKey(item, None, None) if cache_key not in self.preferences: if preference_type == "boolean": self.preferences[cache_key] = bool(integer) elif preference_type == "integer": self.preferences[cache_key] = integer else: self.preferences[cache_key] = string @staticmethod def fetchPreference(db, item, user=None, repository=None, filter_id=None): cursor = db.cursor() cursor.execute("SELECT type FROM preferences WHERE item=%s", (item,)) row = cursor.fetchone() if not row: raise base.ImplementationError("invalid preference: %s" % item) preference_type = row[0] arguments = [item] where = ["item=%s"] if preference_type in ("boolean", "integer"): columns = ["integer"] else: columns = ["string"] if user is not None and not user.isAnonymous(): arguments.append(user.id) where.append("uid=%s OR uid IS NULL") columns.append("uid") else: where.append("uid IS NULL") if repository is not None: arguments.append(repository.id) where.append("repository=%s OR repository IS NULL") columns.append("repository") else: where.append("repository IS NULL") if filter_id is not None: arguments.append(filter_id) where.append("filter=%s OR filter IS NULL") columns.append("filter") else: where.append("filter IS NULL") query = ("""SELECT %(columns)s FROM userpreferences WHERE %(where)s""" % { "columns": ", ".join(columns), "where": " AND ".join("(%s)" % condition for condition in where) }) cursor.execute(query, arguments) rows = cursor.fetchall() if not rows: raise base.ImplementationError( "invalid preference read: %s (no value found)" % item) value = sorted(rows, key=lambda row: row[1:])[-1][0] if preference_type == "boolean": return bool(value) return value @staticmethod def storePreference(db, item, value, user=None, repository=None, filter_id=None): # A preference value can be set for either a repository or a filter, but # not for both at the same time. A filter implies a repository anyway, # so there would be no point. assert repository is None or filter_id is None assert filter_id is None or user is not None # If all are None, we'd be deleting the global default and not setting a # new one, which would be bad. if value is None and user is None \ and repository is None and filter is None: raise base.ImplementationError("attempted to delete global default") if User.fetchPreference(db, item, user, repository, filter_id) != value: cursor = db.cursor() arguments = [item] where = ["item=%s"] user_id = repository_id = None if user is not None: user_id = user.id arguments.append(user_id) where.append("uid=%s") else: where.append("uid IS NULL") if repository is not None: repository_id = repository.id arguments.append(repository_id) where.append("repository=%s") else: where.append("repository IS NULL") if filter_id is not None: arguments.append(filter_id) where.append("filter=%s") else: where.append("filter IS NULL") query = ("DELETE FROM userpreferences WHERE %s" % (" AND ".join("(%s)" % condition for condition in where))) cursor.execute(query, arguments) if value is not None: cursor.execute("SELECT type FROM preferences WHERE item=%s", (item,)) (value_type,) = cursor.fetchone() integer = string = None if value_type == "boolean": value = bool(value) integer = int(value) elif value_type == "integer": integer = int(value) else: string = str(value) cursor.execute("""INSERT INTO userpreferences (item, uid, repository, filter, integer, string) VALUES (%s, %s, %s, %s, %s, %s)""", (item, user_id, repository_id, filter_id, integer, string)) if user is not None: cache_key = _preferenceCacheKey(item, repository, filter_id) if cache_key in user.preferences: del user.preferences[cache_key] return True else: return False def getPreference(self, db, item, repository=None, filter_id=None): cache_key = _preferenceCacheKey(item, repository, filter_id) if cache_key not in self.preferences: self.preferences[cache_key] = User.fetchPreference( db, item, self, repository, filter_id) return self.preferences[cache_key] def setPreference(self, db, item, value, repository=None, filter_id=None): return User.storePreference(db, item, value, self, repository, filter_id) def getDefaultRepository(self, db): import auth import gitutils default_repo = self.getPreference(db, "defaultRepository") if not default_repo: return None try: return gitutils.Repository.fromName(db, default_repo) except auth.AccessDenied: return None def getResource(self, db, name): import configuration if name in self.__resources: return self.__resources[name] cursor = db.cursor() cursor.execute("SELECT revision, source FROM userresources WHERE uid=%s AND name=%s ORDER BY revision DESC FETCH FIRST ROW ONLY", (self.id, name)) row = cursor.fetchone() if row and row[1] is not None: resource = self.__resources[name] = ("\"critic.rev.%d\"" % row[0], row[1]) return resource path = os.path.join(configuration.paths.INSTALL_DIR, "resources", name) mtime = os.stat(path).st_mtime resource = self.__resources[name] = ("\"critic.mtime.%d\"" % mtime, open(path).read()) return resource def adjustTimestamp(self, db, timestamp): import dbutils.timezones return dbutils.timezones.adjustTimestamp(db, timestamp, self.getPreference(db, "timezone")) def formatTimestamp(self, db, timestamp): import dbutils.timezones return dbutils.timezones.formatTimestamp(db, timestamp, self.getPreference(db, "timezone")) def getCriticURLs(self, db): url_types = self.getPreference(db, 'email.urlType').split(",") cursor = db.cursor() cursor.execute("""SELECT key, anonymous_scheme, authenticated_scheme, hostname FROM systemidentities""") url_prefixes = dict((row[0], row[1:]) for row in cursor) urls = [] for url_type in url_types: if url_prefixes.has_key(url_type): anonymous_scheme, authenticated_scheme, hostname = url_prefixes[url_type] if self.isAnonymous(): scheme = anonymous_scheme else: scheme = authenticated_scheme urls.append("%s://%s" % (scheme, hostname)) return urls def getFirstName(self): return self.fullname.split(" ")[0] def getJSConstructor(self, db=None): from htmlutils import jsify if self.isAnonymous(): return "new User(null, null, null, null, null, { ui: {} })" if db: options = ("{ ui: { keyboardShortcuts: %s, resolveIssueWarning: %s, convertIssueToNote: %s, asynchronousReviewMarking: %s } }" % ("true" if self.getPreference(db, "ui.keyboardShortcuts") else "false", "true" if self.getPreference(db, "ui.resolveIssueWarning") else "false", "true" if self.getPreference(db, "ui.convertIssueToNote") else "false", "true" if self.getPreference(db, "ui.asynchronousReviewMarking") else "false")) else: options = "{ ui: {} }" return "new User(%d, %s, %s, %s, %s, %s)" % (self.id, jsify(self.name), jsify(self.email), jsify(self.fullname), jsify(self.status), options) def getJS(self, db=None, name="user"): return "var %s = %s;" % (name, self.getJSConstructor(db)) def getJSON(self): return { "id": self.id, "name": self.name, "email": self.email, "displayName": self.fullname } def getAbsence(self, db): cursor = db.cursor() cursor.execute("SELECT until FROM userabsence WHERE uid=%s", (self.id,)) row = cursor.fetchone() if row[0] is None: return "absent" else: return "absent until %04d-%02d-%02d" % (row[0].year, row[0].month, row[0].day) def hasGitEmail(self, db, address): cursor = db.cursor() cursor.execute("""SELECT 1 FROM usergitemails WHERE email=%s AND uid=%s""", (address, self.id)) return bool(cursor.fetchone()) @staticmethod def cache(db, user): storage = db.storage["User"] storage[user.id] = user if user.name: storage["n:" + user.name] = user if user.email: storage["e:" + user.email] = user return user @staticmethod def makeAnonymous(): return User(None, None, None, 'anonymous', None, None) @staticmethod def makeSystem(): import configuration return User(0, configuration.base.SYSTEM_USER_NAME, "Critic System", "system", configuration.base.SYSTEM_USER_EMAIL, None) @staticmethod def _fromQuery(db, where, *values): cursor = db.cursor() cursor.execute("""SELECT users.id, name, fullname, status, useremails.email, verified FROM users LEFT OUTER JOIN useremails ON (useremails.id=users.email) """ + where, values) return [User.cache(db, User(*row)) for row in cursor] @staticmethod def fromId(db, user_id): cached_user = db.storage["User"].get(user_id) if cached_user: return cached_user else: found = User._fromQuery(db, "WHERE users.id=%s", user_id) if not found: raise InvalidUserId(user_id) return found[0] @staticmethod def fromIds(db, user_ids): need_fetch = [] cache = db.storage["User"] for user_id in user_ids: if user_id not in cache: need_fetch.append(user_id) if need_fetch: User._fromQuery(db, "WHERE users.id=ANY (%s)", need_fetch) return [cache.get(user_id) for user_id in user_ids] @staticmethod def fromName(db, name): cached_user = db.storage["User"].get("n:" + name) if cached_user: return cached_user else: found = User._fromQuery(db, "WHERE users.name=%s", name) if not found: raise NoSuchUser(name) return found[0] @staticmethod def fromAPI(api_user): if api_user.is_anonymous: return User.makeAnonymous() return User.fromId(api_user.critic.database, api_user.id) @staticmethod def withRole(db, role): cursor = db.cursor() cursor.execute("""SELECT uid FROM userroles WHERE role=%s""", (role,)) return User.fromIds(db, [user_id for user_id, in cursor]) @staticmethod def create(db, name, fullname, email, email_verified, password=None, status="current", external_user_id=None): tables = ["users"] if email is not None: tables.extend(["useremails", "usergitemails"]) if external_user_id is not None: tables.append("externalusers") with db.updating_cursor(*tables) as cursor: cursor.execute( """INSERT INTO users (name, fullname, password, status) VALUES (%s, %s, %s, %s) RETURNING id""", (name, fullname, password, status)) user_id, = cursor.fetchone() if email is not None: cursor.execute( """INSERT INTO useremails (uid, email, verified) VALUES (%s, %s, %s) RETURNING id""", (user_id, email, email_verified)) email_id = cursor.fetchone()[0] cursor.execute("UPDATE users SET email=%s WHERE id=%s", (email_id, user_id)) cursor.execute("""INSERT INTO usergitemails (email, uid) VALUES (%s, %s)""", (email, user_id)) if external_user_id is not None: cursor.execute("""UPDATE externalusers SET uid=%s WHERE id=%s""", (user_id, external_user_id)) return User.fromId(db, user_id) def sendUserCreatedMail(self, source, external=None): import mailutils if self.email_verified is False: email_status = " (pending verification)" else: email_status = "" message = """\ A new user has been created: User name: %(username)r Full name: %(fullname)r Email: %(email)r%(email_status)s """ % { "username": self.name, "fullname": self.fullname, "email": self.email, "email_status": email_status } if external: import auth provider = auth.PROVIDERS[external["provider"]] message += """\ External: %(provider)s %(account)r """ % { "provider": provider.getTitle(), "account": external["account"] } message += """\ -- critic """ mailutils.sendAdministratorMessage( source, "User '%s' registered" % self.name, message) ================================================ FILE: src/diff/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import gitutils import diff.analyze import syntaxhighlight import syntaxhighlight.request import htmlutils import textutils re_modeline = re.compile(r"-\*-\s*(.*?)\s*-\*-") re_tabwidth = re.compile(r"(?:^|[ \t;])tab-width:\s*([0-9]+)(?:$|;)", re.I) re_indent_tabs_mode = re.compile(r"(?:^|[ \t;])indent-tabs-mode:\s*(t|nil)(?:$|;)", re.I) re_mode = re.compile(r"(?:^|[ \t;])mode:\s*([^;]+)(?:$|;)", re.I) # Low-level chunk of difference between two versions of a file. One chunk # represents a possibly empty set of consecutive lines in the old version of the # file being replaced by another possibly empty set of consecutive lines in the # new version of the file. (Both sets are never empty, of course.) class Chunk: def __init__(self, delete_offset, delete_count, insert_offset, insert_count, **kwargs): # Primary information: identifying the line numbers of deleted lines and # the line numbers of inserted lines. If lines are only inserted # delete_count is zero and delete_offset marks where, in the old version # of the file, this chunks adds its lines. If lines are only deleted, # insert_count is zero and insert_offset marks where, in the new version # of the file, the deleted lines would have been. # # Line numbers are 1-based, that is, the first line is number 1. self.delete_offset = delete_offset self.delete_count = delete_count self.insert_offset = insert_offset self.insert_count = insert_count # Optional: the ID of the chunk. self.id = kwargs.get("id") # Optional: True if the chunk contains only white-space changes. self.is_whitespace = kwargs.get("is_whitespace") # Optional: the actual deleted and/or inserted lines. self.deleted_lines = kwargs.get("deleted_lines") self.inserted_lines = kwargs.get("inserted_lines") # Optional: chunk analysis, linking together "matching" lines within the # chunk and describing how such matching lines changed from the old # version to the new version. self.analysis = kwargs.get("analysis") def copy(self): return Chunk(self.delete_offset, self.delete_count, self.insert_offset, self.insert_count, deleted_lines=self.deleted_lines, inserted_lines=self.inserted_lines, analysis=self.analysis) def isBinary(self): return self.delete_count == self.insert_count == 0 def analyze(self, file, last_chunk=False, reanalyze=False): if (reanalyze or not self.analysis) and self.delete_count != 0 and self.insert_count != 0: if not self.deleted_lines: file.loadOldLines() self.deleted_lines = file.getOldLines(self) if not self.inserted_lines: file.loadNewLines() self.inserted_lines = file.getNewLines(self) if self.is_whitespace: self.analysis = diff.analyze.analyzeWhiteSpaceChanges(self.deleted_lines, self.inserted_lines, last_chunk and self.delete_offset + self.delete_count + file.oldCount()) else: self.analysis = diff.analyze.analyzeChunk(self.deleted_lines, self.inserted_lines) def deleteEnd(self): return self.delete_offset + self.delete_count def insertEnd(self): return self.insert_offset + self.insert_count def delta(self): return self.insert_count - self.delete_count def __str__(self): return "@@ -%d,%d +%d,%d @@" % (self.delete_offset, self.delete_count, self.insert_offset, self.insert_count) def __repr__(self): if self.analysis: analysis = ", analysis=%r" % self.analysis else: analysis = "" return "Chunk(delete_offset=%d, delete_count=%d, insert_offset=%d, insert_count=%d%s)" % (self.delete_offset, self.delete_count, self.insert_offset, self.insert_count, analysis) def __eq__(self, other): return self.delete_offset == other.delete_offset and self.insert_offset == other.insert_offset def getLines(self): assert not (self.deleted_lines is None or self.inserted_lines is None) lines = [] terminator = "%d=%d" % (self.delete_count, self.insert_count) if self.analysis: analysis = self.analysis + ";" + terminator else: analysis = terminator mappings = analysis.split(";") old_offset = self.delete_offset new_offset = self.insert_offset for mapping in mappings: old_line, new_line = mapping.split(":")[0].split("=") old_line = self.delete_offset + int(old_line) new_line = self.insert_offset + int(new_line) while old_offset < old_line and new_offset < new_line: old_value = self.deleted_lines[old_offset - self.delete_offset] new_value = self.inserted_lines[new_offset - self.insert_offset] line_type = Line.CONTEXT if old_value == new_value else Line.REPLACED lines.append(Line(line_type, old_offset, old_value, new_offset, new_value)) old_offset += 1 new_offset += 1 while old_offset < old_line: old_value = self.deleted_lines[old_offset - self.delete_offset] lines.append(Line(Line.DELETED, old_offset, old_value, new_offset, None)) old_offset += 1 while new_offset < new_line: new_value = self.inserted_lines[new_offset - self.insert_offset] lines.append(Line(Line.INSERTED, old_offset, None, new_offset, new_value)) new_offset += 1 if old_line == self.deleteEnd(): break old_value = self.deleted_lines[old_line - self.delete_offset] new_value = self.inserted_lines[new_line - self.insert_offset] lines.append(Line(Line.MODIFIED, old_line, old_value, new_line, new_value)) old_offset += 1 new_offset += 1 return lines re_conflict = re.compile("^(?:(?:<){7}|={7}|(?:]*>=){7}|(?:>){7})") # Line in "macro chunk". Representing either a context line, or a line that has # been changed (modified, deleted or inserted.) class Line: CONTEXT = 1 DELETED = 2 MODIFIED = 3 REPLACED = 4 INSERTED = 5 WHITESPACE = 6 CONFLICT = 7 def __init__(self, type, old_offset, old_value, new_offset, new_value, **kwargs): # The type of line. One of CONTEXT, MODIFIED, DELETED, INSERTED and # REPLACED. self.type = type # The line number of this line in the old and new versions of the file, # and the actual line value. If the line represents an inserted line, # old_offset will be the line number of the next non-deleted line in the # old version of the file and old_value will be None. If the line # represents a deleted line, new_offset will be the line number of the # next non-inserted line in the new version of the file and new_value # will be None. self.old_offset = old_offset self.old_value = old_value self.new_offset = new_offset self.new_value = new_value # The difference between old_value and new_value is only in white-space. self.is_whitespace = kwargs.get("is_whitespace", False) self.analysis = kwargs.get("analysis", None) def __repr__(self): if self.type == Line.CONTEXT: type_string = "CONTEXT" elif self.type == Line.DELETED: type_string = "DELETED" elif self.type == Line.MODIFIED: type_string = "MODIFIED" elif self.type == Line.REPLACED: type_string = "REPLACED" elif self.type == Line.INSERTED: type_string = "INSERTED" return "Line(%s, %d:%d)" % (type_string, self.old_offset, self.new_offset) def isConflictMarker(self): return self.old_value and bool(re_conflict.match(self.old_value)) # Higher-level chunk of differences between two versions of a file. Constructed # by padding low-level chunks with a variable number of context lines. Chunks # whose contexts overlap are merged into a single "macro chunk." class MacroChunk: def __init__(self, chunks, lines): # List of low-level chunks that make up this macro chunk. self.chunks = chunks # List of lines in the macro chunk. self.lines = lines # Line numbers and size of this macro chunk in the old and new versions # of the file. Note that this includes the context lines, and thus does # not only represent actual changes. self.old_offset = lines[0].old_offset self.old_count = lines[-1].old_offset - lines[0].old_offset + 1 self.new_offset = lines[0].new_offset self.new_count = lines[-1].new_offset - lines[0].new_offset + 1 # Container for difference information per file. class File: def __init__(self, id=None, path=None, old_sha1=None, new_sha1=None, repository=None, **kwargs): self.id = id self.path = path self.old_sha1 = old_sha1 self.new_sha1 = new_sha1 self.old_mode = kwargs.get("old_mode") self.new_mode = kwargs.get("new_mode") self.repository = repository if isinstance(self.old_mode, int): self.old_mode = "%o" % self.old_mode if isinstance(self.new_mode, int): self.new_mode = "%o" % self.new_mode # List of low-level chunks affecting the file. self.chunks = kwargs.get("chunks") # List of macro chunks affecting the file. self.macro_chunks = kwargs.get("macro_chunks") # Lists of actual lines in the old and new versions of the file. Each # line is a string, not including the linebreak character. self.old_plain = kwargs.get("old_plain") self.new_plain = kwargs.get("new_plain") self.old_highlighted = kwargs.get("old_highlighted") self.old_is_highlighted = bool(self.old_highlighted) self.new_highlighted = kwargs.get("new_highlighted") self.new_is_highlighted = bool(self.new_highlighted) # List of comment chains that apply to the whole file. self.file_comment_chains = [] # List of comment chains that apply to the selected lines in the file. self.code_comment_chains = [] self.move_source_file = kwargs.get("move_source_file") self.move_target_file = kwargs.get("move_target_file") self.modeline = {} self.interpreter = {} def clean(self): self.chunks = None self.macro_chunks = None self.cleanLines() def cleanLines(self): self.old_plain = None self.new_plain = None self.old_highlighted = None self.new_highlighted = None def __hash__(self): return hash(self.id) def __int__(self): return self.id def __repr__(self): return "diff.File(id=%d, path=%r)" % (self.id or -1, self.path) def hasChanges(self): return self.old_sha1 is not None and self.new_sha1 is not None def isEmptyChanges(self): """Return true if empty diff information is recorded.""" return self.hasChanges() \ and len(self.chunks) == 1 \ and self.chunks[0].delete_count == 0 \ and self.chunks[0].insert_count == 0 def isEmptyFile(self): """Return true if this is an added or deleted empty (zero-length) file.""" if self.isEmptyChanges(): if self.wasAdded() and self.newSize() == 0: return True elif self.wasRemoved() and self.oldSize() == 0: return True return False def isBinaryChanges(self): """Return true if this is a binary file.""" return self.isEmptyChanges() and not self.isEmptyFile() def wasAdded(self): """Return true if this file was added.""" return self.old_sha1 == '0' * 40 def wasRemoved(self): """Return true if this file was deleted.""" return self.new_sha1 == '0' * 40 def oldSize(self): """Return size of old version of file, or None if file was deleted.""" if self.old_sha1 != '0' * 40: return self.repository.fetch(self.old_sha1, fetchData=False).size else: return None def newSize(self): """Return size of new version of file, or None if file was deleted.""" if self.new_sha1 != '0' * 40: return self.repository.fetch(self.new_sha1, fetchData=False).size else: return None def ensureHighlight(self, highlight_mode="legacy"): """Ensure that the old and new version are syntax highlighted If they are, True is returned. If they are not, an asynchronous request to syntax highlight them is made, and False is returned.""" sha1s = {} if self.old_sha1 \ and self.old_sha1 != "0" * 40 \ and self.old_mode != "160000": old_language = self.getLanguage(use_content="old") if old_language: sha1s[self.old_sha1] = (self.path, old_language) if self.new_sha1 \ and self.new_sha1 != "0" * 40 \ and self.new_mode != "160000": new_language = self.getLanguage(use_content="new") if new_language: sha1s[self.new_sha1] = (self.path, new_language) return not syntaxhighlight.request.requestHighlights( self.repository, sha1s, highlight_mode, async=True) def loadOldLines(self, highlighted=False, request_highlight=False, highlight_mode="legacy"): """Load the lines of the old version of the file, optionally highlighted.""" from diff.parse import splitlines if self.old_sha1 is None or self.old_sha1 == '0' * 40: self.old_plain = [] self.old_highlighted = [] return elif self.old_mode and self.old_mode == "160000": self.old_plain = "Subproject commit %s" % self.old_sha1 self.old_highlighted = splitlines(syntaxhighlight.wrap( self.old_plain, highlight_mode)) return if highlighted: if self.old_highlighted and self.old_is_highlighted: return else: self.old_is_highlighted = True language = self.getLanguage(use_content="old") data = syntaxhighlight.readHighlight( self.repository, self.old_sha1, self.path, language, request=request_highlight, mode=highlight_mode) self.old_highlighted = splitlines(data) self.old_eof_eol = data and data[-1] in "\n\r" else: if self.old_plain: return else: data = self.repository.fetch(self.old_sha1).data self.old_plain = splitlines(data) self.old_eof_eol = data and data[-1] in "\n\r" def loadNewLines(self, highlighted=False, request_highlight=False, highlight_mode="legacy"): """Load the lines of the new version of the file, optionally highlighted.""" from diff.parse import splitlines if self.new_sha1 is None or self.new_sha1 == '0' * 40: self.new_plain = [] self.new_highlighted = [] return elif self.new_mode and self.new_mode == "160000": self.new_plain = "Subproject commit %s" % self.new_sha1 self.new_highlighted = splitlines(syntaxhighlight.wrap( self.new_plain, highlight_mode)) return if highlighted: if self.new_highlighted and self.new_is_highlighted: return else: self.new_is_highlighted = True language = self.getLanguage(use_content="new") data = syntaxhighlight.readHighlight( self.repository, self.new_sha1, self.path, language, request=request_highlight, mode=highlight_mode) self.new_highlighted = splitlines(data) self.new_eof_eol = data and data[-1] in "\n\r" else: if self.new_plain: return else: data = self.repository.fetch(self.new_sha1).data self.new_plain = splitlines(data) self.new_eof_eol = data and data[-1] in "\n\r" def getOldLines(self, chunk, highlighted=False): begin = chunk.delete_offset - 1 end = begin + chunk.delete_count return self.oldLines(highlighted)[begin:end] def getNewLines(self, chunk, highlighted=False): begin = chunk.insert_offset - 1 end = begin + chunk.insert_count return self.newLines(highlighted)[begin:end] def oldLines(self, highlighted): if highlighted: return self.old_highlighted else: return self.old_plain def oldCount(self): if self.old_highlighted is not None: return len(self.old_highlighted) else: return len(self.old_plain) def newLines(self, highlighted): if highlighted: return self.new_highlighted else: return self.new_plain def newCount(self): if self.new_highlighted is not None: return len(self.new_highlighted) else: return len(self.new_plain) def canHighlight(self): return self.getLanguage() is not None def getLanguage(self, use_content=False): if (self.path.endswith(".h") or self.path.endswith(".c") or self.path.endswith(".cpp") or self.path.endswith(".hh") or self.path.endswith(".cc")): return "c++" elif (self.path.endswith(".py") or self.path.endswith(".gyp") or self.path.endswith(".gypi")): return "python" elif (self.path.endswith(".pl") or self.path.endswith(".pm")): return "perl" elif self.path.endswith(".java"): return "java" elif self.path.endswith(".rb"): return "ruby" elif self.path.endswith(".js"): return "javascript" elif self.path.endswith(".php"): return "php" elif (self.path.endswith(".mk") or self.path.endswith("/Makefile")): return "makefile" elif (self.path.endswith(".m") or self.path.endswith(".mm")): return "objective-c" elif self.path.endswith(".sql"): return "sql" # XML syntax highlighting is disabled due to issues (the pygments # lexer messes with the line-endings in the file.) #elif self.path.endswith(".xml"): # return "xml" if use_content: interpreter = self.getInterpreter(use_content) if interpreter: executable = interpreter.split("/")[-1] if executable.startswith("python"): return "python" elif executable.startswith("perl"): return "perl" modeline = self.getModeLine(use_content) if modeline: match = re_mode.search(modeline) if match: mode = match.group(1).strip() if mode in ("c++", "python", "perl", "java", "ruby", "js", "php", "makefile"): return mode return None def getInterpreter(self, side="new"): if side not in self.interpreter: if side == "new": self.loadNewLines() lines = self.new_plain else: self.loadOldLines() lines = self.old_plain self.interpreter[side] = "" for line in lines: if line.startswith("#!"): words = line[2:].split() if re.search("(^|/)env$", words[0]): self.interpreter[side] = words[1] else: self.interpreter[side] = words[0] break return self.interpreter[side] def getModeLine(self, side="new"): if side not in self.modeline: if side == "new": self.loadNewLines() lines = self.new_plain else: self.loadOldLines() lines = self.old_plain self.modeline[side] = "" for line in lines: if line.startswith("#!"): continue match = re_modeline.search(line) if match: self.modeline[side] = match.group(1) break return self.modeline[side] def getTabWidth(self, side="new", default=8): modeline = self.getModeLine(side) try: return int(re_tabwidth.search(modeline).group(1)) except: return default def getIndentTabsMode(self, side="new", default=True): modeline = self.getModeLine(side) try: return re_indent_tabs_mode.search(modeline).group(1) == "t" except: return default @staticmethod def sorted(files, key=lambda file: file.path): def compareFilenames(a, b): def isSource(name): return name.endswith(".cpp") or name.endswith(".cc") def isHeader(name): return name.endswith(".h") if isHeader(a) and isSource(b) and a.rsplit(".", 1)[0] == b.rsplit(".", 1)[0]: return -1 elif isSource(a) and isHeader(b) and a.rsplit(".", 1)[0] == b.rsplit(".", 1)[0]: return 1 else: return cmp(a, b) return sorted(files, key=key, cmp=compareFilenames) @staticmethod def eliminateCommonPrefixes(files, text=False, getpath=None, setpath=None): assert (getpath is None) == (setpath is None) if getpath is None: def defaultGetPath(x): return x getpath = defaultGetPath if setpath is None: def defaultSetPath(x, p): files[index] = p setpath = defaultSetPath def commonPrefixLength(pathA, pathB): componentsA = pathA.split('/') componentsB = pathB.split('/') for index in range(min(len(componentsA), len(componentsB))): if componentsA[index] != componentsB[index]: return sum(map(len, componentsA[:index])) + index if files: previous = None for index in range(len(files)): current = getpath(files[index]) if index > 0: length = commonPrefixLength(previous, current) else: length = 0 if text: if length > 4: updated = (" " * (length - 4) + ".../" + textutils.escape(current[length:])) else: updated = textutils.escape(current) else: if length > 2: updated = (" " * (length - 2) + "…/" + htmlutils.htmlify(textutils.escape(current[length:]))) else: updated = htmlutils.htmlify(textutils.escape(current)) if updated != current: setpath(files[index], updated) previous = current return files class Changeset: def __init__(self, id, parent, child, type, files=None, commits=None): self.id = id self.parent = parent self.child = child self.type = type self.files = files self.conflicts = False self.__commits = commits if commits else [child] if type == "direct" else None self.__file_by_id = {} def __hash__(self): return hash(self.id) def __eq__(self, other): return self.id == other.id def commits(self, db): if self.__commits is None: iter = self.child self.__commits = [iter] while self.parent not in iter.parents: if len(iter.parents) != 1: return [] iter = gitutils.Commit.fromSHA1(db, iter.repository, iter.parents[0]) self.__commits.append(iter) return self.__commits def setCommits(self, commits): self.__commits = commits def getFile(self, file_id): if self.files and not self.__file_by_id: for file in self.files: self.__file_by_id[file.id] = file return self.__file_by_id.get(file_id) def getReviewFiles(self, db, user, review): files = {} if self.files and not self.__file_by_id: for file in self.files: self.__file_by_id[file.id] = file def process(cursor): for file_id, state, reviewer, is_reviewer, draft_from, draft_to in cursor: if file_id not in self.__file_by_id: continue if draft_from == state: state = draft_to if files.has_key(file_id): existing = files[file_id] reviewers = existing[2] files[file_id] = (existing[0] or is_reviewer, state if existing[1] == state else "mixed", existing[2]) else: reviewers = set() files[file_id] = (is_reviewer, state, reviewers) if reviewer is not None: reviewers.add(reviewer) if self.type in ("merge", "conflicts"): cursor = db.cursor() cursor.execute("""SELECT reviewfiles.file, reviewfiles.state, reviewfiles.reviewer, reviewuserfiles.uid IS NOT NULL, reviewfilechanges.from_state, reviewfilechanges.to_state FROM reviewfiles LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) LEFT OUTER JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft') WHERE reviewfiles.review=%s AND reviewfiles.changeset=%s""", (user.id, user.id, review.id, self.id)) process(cursor) elif self.__commits: cursor = db.cursor() cursor.execute("""SELECT reviewfiles.file, reviewfiles.state, reviewfiles.reviewer, reviewuserfiles.uid IS NOT NULL, reviewfilechanges.from_state, reviewfilechanges.to_state FROM reviewfiles JOIN changesets ON (changesets.id=reviewfiles.changeset) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) LEFT OUTER JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft') WHERE reviewfiles.review=%s AND changesets.child=ANY (%s)""", (user.id, user.id, review.id, [commit.getId(db) for commit in self.__commits])) process(cursor) return files @staticmethod def fromId(db, repository, id): cursor = db.cursor() cursor.execute("SELECT parent, child, type FROM changesets WHERE id=%s", [id]) parent_id, child_id, type = cursor.fetchone() parent = gitutils.Commit.fromId(db, repository, parent_id) if parent_id else None child = gitutils.Commit.fromId(db, repository, child_id) return Changeset(id, parent, child, type) ================================================ FILE: src/diff/analyze.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import difflib import re import textutils re_ignore = re.compile("^\\s*(?:[{}*]|else|do|\\*/)?\\s*$") re_words = re.compile("([0-9]+|[A-Z][a-z]+|[A-Z]+|[a-z]+|[\\[\\]{}()]|\\s+|.)") re_ws = re.compile("\\s+") re_conflict = re.compile("^<<<<<<< .*$|^=======$|^>>>>>>> .*$") def analyzeChunk(deletedLines, insertedLines, moved=False): # Pure delete or pure insert, nothing to analyze. if not deletedLines or not insertedLines: return None deletedLines = map(textutils.decode, deletedLines) insertedLines = map(textutils.decode, insertedLines) # Large chunk, analysis would be expensive, so skip it. if len(deletedLines) * len(insertedLines) <= 10000 and not moved: analysis = analyzeChunk1(deletedLines, insertedLines) else: deletedLinesNoWS = [re_ws.sub(" ", line.strip()) for line in deletedLines] insertedLinesNoWS = [re_ws.sub(" ", line.strip()) for line in insertedLines] sm = difflib.SequenceMatcher(None, deletedLinesNoWS, insertedLinesNoWS) blocks = sm.get_matching_blocks() analysis = [] pi = 0 pj = 0 for i, j, n in blocks: if not n: continue if i > pi and j > pj: analysis.append(analyzeChunk1(deletedLines[pi:i], insertedLines[pj:j], offsetA=pi, offsetB=pj)) analysis.append(analyzeWhiteSpaceChanges(deletedLines[i:i+n], insertedLines[j:j+n], offsetA=i, offsetB=j, full=moved)) pi = i + n pj = j + n if pi < len(deletedLines) and pj < len(insertedLines): analysis.append(analyzeChunk1(deletedLines[pi:], insertedLines[pj:], offsetA=pi, offsetB=pj)) analysis = ";".join(filter(None, analysis)) if analysis: return analysis else: return None def analyzeChunk1(deletedLines, insertedLines, offsetA=0, offsetB=0): matches = [] equals = [] if len(deletedLines) * len(insertedLines) > 10000: return "" def ratio(sm, a, b, aLength, bLength): matching = 0 for i, j, n in sm.get_matching_blocks(): matching += sum(map(len, map(unicode.strip, a[i:i+n]))) if aLength > 5 and len(sm.get_matching_blocks()) == 2: return float(matching) / aLength else: return 2.0 * matching / (aLength + bLength) for deletedIndex, deleted in enumerate(deletedLines): deletedStripped = deleted.strip() deletedNoWS = re_ws.sub("", deletedStripped) # Don't match conflict lines against anything. if re_conflict.match(deleted): continue if not re_ignore.match(deleted): deletedWords = re_words.findall(deleted) for insertedIndex, inserted in enumerate(insertedLines): insertedStripped = inserted.strip() insertedNoWS = re_ws.sub("", insertedStripped) if not re_ignore.match(inserted): insertedWords = re_words.findall(inserted) sm = difflib.SequenceMatcher(None, deletedWords, insertedWords) r = ratio(sm, deletedWords, insertedWords, len(deletedNoWS), len(insertedNoWS)) if r > 0.5: matches.append((r, deletedIndex, insertedIndex, deletedWords, insertedWords, sm)) elif deletedStripped == insertedStripped: equals.append((deletedIndex, insertedIndex)) else: for insertedIndex, inserted in enumerate(insertedLines): if deletedStripped == inserted.strip(): equals.append((deletedIndex, insertedIndex)) if matches: matches.sort(key=lambda x: x[0], reverse=True) final = [] while matches: r, deletedIndex, insertedIndex, deletedWords, insertedWords, sm = matches.pop(0) final.append((deletedIndex, insertedIndex, deletedWords, insertedWords, sm)) matches = filter(lambda data: data[1] != deletedIndex and data[2] != insertedIndex and (data[1] < deletedIndex) == (data[2] < insertedIndex), matches) equals = filter(lambda data: (data[0] < deletedIndex) == (data[1] < insertedIndex), equals) final.sort() equals.sort() result = [] previousDeletedIndex = -1 previousInsertedIndex = -1 final.append((len(deletedLines), len(insertedLines), None, None, None)) for deletedIndex, insertedIndex, deletedWords, insertedWords, sm in final: while equals and (equals[0][0] < deletedIndex or equals[0][1] < insertedIndex): di, ii = equals.pop(0) if previousDeletedIndex < di < deletedIndex and previousInsertedIndex < ii < insertedIndex: deletedLine = deletedLines[di] insertedLine = insertedLines[ii] lineDiff = analyzeWhiteSpaceLine(deletedLine, insertedLine) if lineDiff: result.append("%d=%d:ws,%s" % (di + offsetA, ii + offsetB, lineDiff)) else: result.append("%d=%d" % (di + offsetA, ii + offsetB)) previousDeletedIndex = di previousInsertedIndex = ii while equals and (di == equals[0][0] or ii == equals[0][1]): equals.pop(0) if sm is None: break lineDiff = [] deletedLine = deletedLines[deletedIndex] insertedLine = insertedLines[insertedIndex] if deletedLine != insertedLine and deletedLine.strip() == insertedLine.strip(): lineDiff.append("ws") lineDiff.append(analyzeWhiteSpaceLine(deletedLine, insertedLine)) else: for tag, i1, i2, j1, j2 in sm.get_opcodes(): if tag == 'replace': lineDiff.append("r%d-%d=%d-%d" % (offsetInLine(deletedWords, i1), offsetInLine(deletedWords, i2), offsetInLine(insertedWords, j1), offsetInLine(insertedWords, j2))) elif tag == 'delete': lineDiff.append("d%d-%d" % (offsetInLine(deletedWords, i1), offsetInLine(deletedWords, i2))) elif tag == 'insert': lineDiff.append("i%d-%d" % (offsetInLine(insertedWords, j1), offsetInLine(insertedWords, j2))) if lineDiff: result.append("%d=%d:%s" % (deletedIndex + offsetA, insertedIndex + offsetB, ",".join(lineDiff))) else: result.append("%d=%d" % (deletedIndex + offsetA, insertedIndex + offsetB)) previousDeletedIndex = deletedIndex previousInsertedIndex = insertedIndex return ";".join(result) elif deletedLines[-1] == insertedLines[-1]: ndeleted = len(deletedLines) ninserted = len(insertedLines) result = [] index = 1 while index <= ndeleted and index <= ninserted and deletedLines[-index] == insertedLines[-index]: result.append("%d=%d" % (ndeleted - index + offsetA, ninserted - index + offsetB)) index += 1 return ";".join(reversed(result)) else: return "" def offsetInLine(words, offset): return sum(map(lambda word: len(word.encode("utf-8")), words[0:offset])) re_ws_words = re.compile("( |\t|\\s+|\\S+)") def analyzeWhiteSpaceLine(deletedLine, insertedLine): deletedLine = textutils.decode(deletedLine) insertedLine = textutils.decode(insertedLine) deletedWords = filter(None, re_ws_words.findall(deletedLine)) insertedWords = filter(None, re_ws_words.findall(insertedLine)) sm = difflib.SequenceMatcher(None, deletedWords, insertedWords) lineDiff = [] for tag, i1, i2, j1, j2 in sm.get_opcodes(): if tag == 'replace': lineDiff.append("r%d-%d=%d-%d" % (offsetInLine(deletedWords, i1), offsetInLine(deletedWords, i2), offsetInLine(insertedWords, j1), offsetInLine(insertedWords, j2))) elif tag == 'delete': lineDiff.append("d%d-%d" % (offsetInLine(deletedWords, i1), offsetInLine(deletedWords, i2))) elif tag == 'insert': lineDiff.append("i%d-%d" % (offsetInLine(insertedWords, j1), offsetInLine(insertedWords, j2))) return ",".join(lineDiff) def analyzeWhiteSpaceChanges(deletedLines, insertedLines, at_eof=False, offsetA=0, offsetB=0, full=False): result = [] for index, (deletedLine, insertedLine) in enumerate(zip(deletedLines, insertedLines)): if deletedLine != insertedLine: result.append("%d=%d:%s" % (index + offsetA, index + offsetB, analyzeWhiteSpaceLine(deletedLine, insertedLine))) elif index == len(deletedLines) - 1 and at_eof: result.append("%d=%d:eol" % (index + offsetA, index + offsetB)) elif full: result.append("%d=%d" % (index + offsetA, index + offsetB)) if not result and (offsetA or offsetB): result.append("%d=%d" % (offsetA, offsetB)) return ";".join(result) ================================================ FILE: src/diff/context.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import diff import diff.html class ContextLines: def __init__(self, file, chunks, chains=None, merge=False, conflicts=False): self.file = file self.chunks = chunks self.chains = chains self.merge = merge self.conflicts = conflicts def getMacroChunks(self, context_lines=3, minimum_gap=3, highlight=True, lineFilter=None, skip_interline_diff=False): old_lines = self.file.oldLines(highlight) new_lines = self.file.newLines(highlight) lines = [] def addLine(line): if not lineFilter or lineFilter(line): lines.append(line) for chunk in self.chunks: old_offset = chunk.delete_offset new_offset = chunk.insert_offset if chunk.analysis: mappings = chunk.analysis.split(';') for mapping in mappings: if ':' in mapping: mapped_lines, ops = mapping.split(':') ops_list = ops.split(",") else: mapped_lines = mapping ops = None ops_list = None old_line, new_line = mapped_lines.split('=') old_line = chunk.delete_offset + int(old_line) new_line = chunk.insert_offset + int(new_line) while old_offset < old_line and new_offset < new_line: if old_lines[old_offset - 1] == new_lines[new_offset - 1]: line_type = diff.Line.CONTEXT else: line_type = diff.Line.REPLACED line = diff.Line(line_type, old_offset, old_lines[old_offset - 1], new_offset, new_lines[new_offset - 1], is_whitespace=chunk.is_whitespace) if self.conflicts and line_type == diff.Line.REPLACED and line.isConflictMarker(): addLine(diff.Line(diff.Line.DELETED, old_offset, old_lines[old_offset - 1], new_offset, None)) else: addLine(line) new_offset += 1 old_offset += 1 while old_offset < old_line: addLine(diff.Line(diff.Line.DELETED, old_offset, old_lines[old_offset - 1], new_offset, None)) old_offset += 1 while new_offset < new_line: addLine(diff.Line(diff.Line.INSERTED, old_offset, None, new_offset, new_lines[new_offset - 1])) new_offset += 1 try: deleted_line = old_lines[old_offset - 1] inserted_line = new_lines[new_offset - 1] except: raise repr((self.file.path, self.file.old_sha1, self.file.new_sha1, new_offset, len(new_lines))) if deleted_line == inserted_line: line_type = diff.Line.CONTEXT is_whitespace = False else: if ops_list and ops_list[0] == "ws": is_whitespace = True if len(ops_list) > 1: ops_list = ops_list[1:] else: ops_list = None else: is_whitespace = False line_type = diff.Line.MODIFIED if highlight and ops_list and not skip_interline_diff: if len(ops_list) == 1 and ops_list[0] == "eol": line_type = diff.Line.REPLACED if highlight: if not self.file.old_eof_eol: deleted_line += "[missing linebreak]" if not self.file.new_eof_eol: deleted_line += "[missing linebreak]" else: deleted_line, inserted_line = diff.html.lineDiffHTML(ops_list, deleted_line, inserted_line) addLine(diff.Line(line_type, old_offset, deleted_line, new_offset, inserted_line, is_whitespace=chunk.is_whitespace or is_whitespace, analysis=ops_list)) old_offset += 1 new_offset += 1 old_line = chunk.delete_offset + chunk.delete_count new_line = chunk.insert_offset + chunk.insert_count while old_offset < old_line and new_offset < new_line: if old_lines[old_offset - 1] == new_lines[new_offset - 1]: line_type = diff.Line.CONTEXT else: line_type = diff.Line.REPLACED line = diff.Line(line_type, old_offset, old_lines[old_offset - 1], new_offset, new_lines[new_offset - 1], is_whitespace=chunk.is_whitespace) if self.conflicts and line_type == diff.Line.REPLACED and line.isConflictMarker(): addLine(diff.Line(diff.Line.DELETED, old_offset, old_lines[old_offset - 1], new_offset, None)) else: addLine(line) new_offset += 1 old_offset += 1 while old_offset < old_line: try: addLine(diff.Line(diff.Line.DELETED, old_offset, old_lines[old_offset - 1], new_offset, None)) except: addLine(diff.Line(diff.Line.DELETED, old_offset, "", new_offset, None)) old_offset += 1 while new_offset < new_line: try: addLine(diff.Line(diff.Line.INSERTED, old_offset, None, new_offset, new_lines[new_offset - 1])) except: addLine(diff.Line(diff.Line.INSERTED, old_offset, None, new_offset, "")) new_offset += 1 old_table = {} new_table = {} for line in lines: if line.old_value is not None: old_table[line.old_offset] = line if line.new_value is not None: new_table[line.new_offset] = line def translateInChunk(chunk, old_delta=None, new_delta=None): if chunk.analysis: mappings = chunk.analysis.split(';') previous_old_line = 0 previous_new_line = 0 for mapping in mappings: if ':' in mapping: mapped_lines, ops = mapping.split(':') else: mapped_lines = mapping old_line, new_line = mapped_lines.split('=') old_line = int(old_line) new_line = int(new_line) if old_delta is not None: if old_line == old_delta: return new_line elif old_line > old_delta: return previous_new_line else: if new_line == new_delta: return old_line elif new_line > new_delta: return previous_old_line previous_old_line = old_line previous_new_line = new_line if old_delta is not None: return min(old_delta, chunk.insert_count) else: return min(new_delta, chunk.delete_count) def findMatchingOldOffset(offset): precedingChunk = None for chunk in self.chunks: if chunk.insert_offset + chunk.insert_count > offset: if chunk.insert_offset <= offset: delta = translateInChunk(chunk, new_delta=offset - chunk.insert_offset) offset = chunk.delete_offset + delta return offset break precedingChunk = chunk if precedingChunk: offset -= precedingChunk.insert_offset + precedingChunk.insert_count offset += precedingChunk.delete_offset + precedingChunk.delete_count return offset def findMatchingNewOffset(offset): precedingChunk = None for chunk in self.chunks: if chunk.delete_offset + chunk.delete_count > offset: if chunk.delete_offset <= offset: delta = translateInChunk(chunk, old_delta=offset - chunk.delete_offset) offset = chunk.insert_offset + delta return offset break precedingChunk = chunk if precedingChunk: offset -= precedingChunk.delete_offset + precedingChunk.delete_count offset += precedingChunk.insert_offset + precedingChunk.insert_count return offset if self.chains and not self.merge: for (chain, use_old) in self.chains: if chain.comments: if not use_old and self.file.new_sha1 in chain.lines_by_sha1: chain_offset, chain_count = chain.lines_by_sha1[self.file.new_sha1] old_offset = findMatchingOldOffset(chain_offset) new_offset = chain_offset first_line = new_table.get(new_offset) else: chain_offset, chain_count = chain.lines_by_sha1[self.file.old_sha1] old_offset = chain_offset new_offset = findMatchingNewOffset(chain_offset) first_line = old_table.get(old_offset) count = chain_count while count: if old_offset not in old_table and new_offset not in new_table: try: line = diff.Line(diff.Line.CONTEXT, old_offset, old_lines[old_offset - 1], new_offset, new_lines[new_offset - 1]) except IndexError: break if not lineFilter or lineFilter(line): if not first_line: first_line = line old_table[old_offset] = line new_table[new_offset] = line if old_offset in old_table: old_offset += 1 if new_offset in new_table: new_offset += 1 count -= 1 class queue: def __init__(self, iterable): self.__list = list(iterable) self.__offset = 0 def __getitem__(self, index): return self.__list[self.__offset + index] def __nonzero__(self): return self.__offset < len(self.__list) def __len__(self): return len(self.__list) - self.__offset def __str__(self): return str(self.__list[self.__offset:]) def __repr__(self): return repr(self.__list[self.__offset:]) def pop(self): self.__offset += 1 return self.__list[self.__offset - 1] all_lines = queue(sorted([(key, value.new_offset, value) for key, value in old_table.items()] + [(value.old_offset, key, value) for key, value in new_table.items() if value.type not in (diff.Line.CONTEXT, diff.Line.MODIFIED, diff.Line.REPLACED)])) all_chunks = self.chunks[:] all_chains = self.chains and self.chains[:] or None macro_chunks = [] def lineOrNone(lines, index): try: return lines[index] except IndexError: return None while all_lines: old_offset, new_offset, first_line = all_lines.pop() count = min(context_lines, max(old_offset - 1, new_offset - 1)) old_offset = max(1, old_offset - count) new_offset = max(1, new_offset - count) lines = [] while count: if old_offset <= len(old_lines) and new_offset <= len(new_lines): addLine(diff.Line(diff.Line.CONTEXT, old_offset, old_lines[old_offset - 1], new_offset, new_lines[new_offset - 1])) old_offset += 1 new_offset += 1 elif old_offset <= len(old_lines): old_offset += 1 else: new_offset += 1 count -= 1 lines.append(first_line) if first_line.type != diff.Line.INSERTED: old_offset += 1 if first_line.type != diff.Line.DELETED: new_offset += 1 while all_lines: while all_lines and (old_offset == all_lines[0][0] or new_offset == all_lines[0][1]): line = all_lines.pop()[2] lines.append(line) if line.type != diff.Line.INSERTED: old_offset += 1 if line.type != diff.Line.DELETED: new_offset += 1 if all_lines and all_lines[0][1] - new_offset <= 2 * context_lines + minimum_gap: while old_offset != all_lines[0][0] and new_offset != all_lines[0][1]: line = diff.Line(diff.Line.CONTEXT, old_offset, lineOrNone(old_lines, old_offset - 1), new_offset, lineOrNone(new_lines, new_offset - 1)) addLine(line) if line.old_value is not None: old_offset += 1 if line.new_value is not None: new_offset += 1 else: break count = context_lines while count: if old_offset <= len(old_lines) and new_offset <= len(new_lines): addLine(diff.Line(diff.Line.CONTEXT, old_offset, old_lines[old_offset - 1], new_offset, new_lines[new_offset - 1])) old_offset += 1 new_offset += 1 elif old_offset <= len(old_lines): old_offset += 1 else: new_offset += 1 count -= 1 chunks = [] while all_chunks and (all_chunks[0].delete_offset < old_offset or all_chunks[0].insert_offset < new_offset): chunks.append(all_chunks.pop(0)) chains = [] if all_chains: index = 0 while index < len(all_chains): chain, use_old = all_chains[index] if not use_old and self.file.new_sha1 in chain.lines_by_sha1: chain_offset, chain_count = chain.lines_by_sha1[self.file.new_sha1] compare_offset = new_offset else: chain_offset, chain_count = chain.lines_by_sha1[self.file.old_sha1] compare_offset = old_offset if chain_offset < compare_offset: chains.append(chain) del all_chains[index] else: index += 1 macro_chunks.append(diff.MacroChunk(chunks, lines)) if not lineFilter: return filter(lambda macro_chunk: bool(macro_chunk.chunks), macro_chunks) else: return macro_chunks ================================================ FILE: src/diff/html.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import textutils from htmlutils import htmlify re_tag = re.compile("(<[^>]*>)") re_decimal_entity = re.compile("&#([0-9]+);") class Tag: def __init__(self, value): self.value = value def __str__(self): return self.value def __nonzero__(self): return False def __repr__(self): return "Tag(%r)" % self.value def splitTags(line): def process(token): if token[0] == '<': return Tag(token) else: def replace_decimal(match): return unichr(int(match.group(1))) token = textutils.decode(token) token = re_decimal_entity.sub(replace_decimal, token) token = token.encode("utf-8") return token.replace("<", "<").replace(">", ">").replace("&", "&") return map(process, filter(None, re_tag.split(line))) def joinTags(tags): def process(token): if token: return htmlify(token) else: return str(token) return "".join(map(process, tags)) def insertTag(tags, offset, newTag): newTag = Tag(newTag) index = 0 while index < len(tags): tag = tags[index] if tag: if len(tag) < offset: offset -= len(tag) else: if len(tag) == offset: tags.insert(index + 1, newTag) elif not offset: tags.insert(index, newTag) else: tags[index:index+1] = tag[:offset], newTag, tag[offset:] return index += 1 tags.append(newTag) def lineDiffHTML(ops, old, new): old = splitTags(old) new = splitTags(new) for op in ops: old_lines = None oldType = None new_lines = None newType = None if op[0] == 'r': old_lines, new_lines = op[1:].split('=') oldType = 'r' newType = 'r' elif op[0] == 'd': old_lines = op[1:] oldType = 'd' else: new_lines = op[1:] newType = 'i' if old_lines: start, end = old_lines.split('-') insertTag(old, int(start), "" % oldType) insertTag(old, int(end), "") if new_lines: start, end = new_lines.split('-') insertTag(new, int(start), "" % newType) insertTag(new, int(end), "") return joinTags(old), joinTags(new) ================================================ FILE: src/diff/merge.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import diff import diff.parse import gitutils # Maximum number of lines allowed between a two chunks to consider # them near enough to warrant inclusion. PROXIMITY_LIMIT = 3 def filterChunks(log, file_on_branch, file_in_merge, path): """filterChunks([diff.Chunk, ...], [diff.Chunk, ...]) => [diff.Chunk, ...] Filter the second list of chunks to only include chunks that affect lines that are within PROXIMITY_LIMIT lines of a chunk in the first list of chunks.""" result = [] on_branch = iter(file_on_branch.chunks) in_merge = iter(file_in_merge.chunks) try: chunk_on_branch = on_branch.next() chunk_in_merge = in_merge.next() while True: if chunk_in_merge.delete_offset - chunk_on_branch.insertEnd() > PROXIMITY_LIMIT: # Chunk_on_branch is significantly earlier than chunk_in_merge, # so continue to next one from on_branch. chunk_on_branch = on_branch.next() elif chunk_on_branch.insert_offset - chunk_in_merge.deleteEnd() > PROXIMITY_LIMIT: chunk_in_merge = in_merge.next() else: # The two chunks are near each other, or intersects, so include # the one from the merge result.append(chunk_in_merge) # ... and continue to the next one from in_merge. chunk_in_merge = in_merge.next() except StopIteration: # We ran out of chunks from either on_branch or in_merge. If we ran out # of chunks from in_merge, we obviously don't need to include any more # chunks in the result. If we ran out of chunks from on_branch, we # don't either, because the previous one was apparently significant # earlier than the current, and thus all following, chunks from in_merge. pass return result def parseMergeDifferences(db, repository, commit): mergebase = gitutils.Commit.fromSHA1(db, repository, repository.mergebase(commit, db=db)) result = {} log = [""] for parent_sha1 in commit.parents: parent = gitutils.Commit.fromSHA1(db, repository, parent_sha1) if parent_sha1 == mergebase: result[parent_sha1] = diff.parse.parseDifferences(repository, from_commit=parent, to_commit=commit)[parent_sha1] else: paths_on_branch = set(repository.run('diff', '--name-only', "%s..%s" % (mergebase, parent)).splitlines()) paths_in_merge = set(repository.run('diff', '--name-only', "%s..%s" % (parent, commit)).splitlines()) filter_paths = paths_on_branch & paths_in_merge on_branch = diff.parse.parseDifferences(repository, from_commit=mergebase, to_commit=parent, filter_paths=filter_paths)[mergebase.sha1] in_merge = diff.parse.parseDifferences(repository, from_commit=parent, to_commit=commit, filter_paths=filter_paths)[parent_sha1] files_on_branch = dict([(file.path, file) for file in on_branch]) result_for_parent = [] for file_in_merge in in_merge: file_on_branch = files_on_branch.get(file_in_merge.path) if file_on_branch: filtered_chunks = filterChunks(log, file_on_branch, file_in_merge, file_in_merge.path) if filtered_chunks: result_for_parent.append(diff.File(id=None, repository=repository, path=file_in_merge.path, old_sha1=file_in_merge.old_sha1, new_sha1=file_in_merge.new_sha1, old_mode=file_in_merge.old_mode, new_mode=file_in_merge.new_mode, chunks=filtered_chunks)) result[parent_sha1] = result_for_parent return result ================================================ FILE: src/diff/parse.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration import subprocess import gitutils import diff import re import itertools import analyze GIT_EMPTY_TREE = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" def demunge(path): special = { "a": "\a", "b": "\b", "t": "\t", "n": "\n", "v": "\v", "f": "\f", "r": "\r", '"': '"', "'": "'", "/": "/", "\\": "\\" } def unescape(match): escaped = match.group(1) if escaped in special: return special[escaped] else: return chr(int(escaped, 8)) return re.sub(r"""\\([abtnvfr"'/\\]|[0-9]{3})""", unescape, path) def splitlines(source): if not source: return source elif source[-1] == "\n": return source[:-1].split("\n") else: return source.split("\n") def detectWhiteSpaceChanges(file, old_lines, begin_old_offset, end_old_offset, old_ending_linebreak, new_lines, begin_new_offset, end_new_offset, new_ending_linebreak): start_old_offset = None for old_offset, new_offset in itertools.izip(xrange(begin_old_offset, end_old_offset), xrange(begin_new_offset, end_new_offset)): if old_lines[old_offset - 1] != new_lines[new_offset - 1] or (old_offset == len(old_lines) and old_ending_linebreak != new_ending_linebreak): if start_old_offset is None: start_old_offset = old_offset start_new_offset = new_offset elif start_old_offset is not None: assert old_offset - start_old_offset != 0 and new_offset - start_new_offset != 0 chunk = diff.Chunk(start_old_offset, old_offset - start_old_offset, start_new_offset, new_offset - start_new_offset, is_whitespace=True) chunk.is_whitespace = True file.chunks.append(chunk) start_old_offset = None if start_old_offset is not None: assert end_old_offset - start_old_offset != 0 and end_new_offset - start_new_offset != 0 chunk = diff.Chunk(start_old_offset, end_old_offset - start_old_offset, start_new_offset, end_new_offset - start_new_offset, is_whitespace=True) chunk.is_whitespace = True file.chunks.append(chunk) ws = re.compile("\\s+") def isWhitespaceChange(deleted_line, inserted_line): return ws.sub(" ", deleted_line.strip()) == ws.sub(" ", inserted_line.strip()) def createChunks(delete_offset, deleted_lines, insert_offset, inserted_lines): ws_before = None ws_after = None if deleted_lines and inserted_lines and isWhitespaceChange(deleted_lines[0], inserted_lines[0]): ws_lines = 1 max_lines = min(len(deleted_lines), len(inserted_lines)) while ws_lines < max_lines and isWhitespaceChange(deleted_lines[ws_lines], inserted_lines[ws_lines]): ws_lines += 1 ws_before = diff.Chunk(delete_offset, ws_lines, insert_offset, ws_lines, is_whitespace=True) delete_offset += ws_lines del deleted_lines[:ws_lines] insert_offset += ws_lines del inserted_lines[:ws_lines] if deleted_lines and inserted_lines and isWhitespaceChange(deleted_lines[-1], inserted_lines[-1]): ws_lines = 1 max_lines = min(len(deleted_lines), len(inserted_lines)) while ws_lines < max_lines and isWhitespaceChange(deleted_lines[-(ws_lines + 1)], inserted_lines[-(ws_lines + 1)]): ws_lines += 1 ws_after = diff.Chunk(delete_offset + len(deleted_lines) - ws_lines, ws_lines, insert_offset + len(inserted_lines) - ws_lines, ws_lines, is_whitespace=True) del deleted_lines[-ws_lines:] del inserted_lines[-ws_lines:] if deleted_lines or inserted_lines: chunks = [diff.Chunk(delete_offset, len(deleted_lines), insert_offset, len(inserted_lines))] else: chunks = [] if ws_before: chunks.insert(0, ws_before) if ws_after: chunks.append(ws_after) return chunks def mergeChunks(file): if len(file.chunks) > 1: file.loadOldLines(False) old_lines = file.oldLines(False) file.loadNewLines(False) new_lines = file.newLines(False) merged = [] previous = file.chunks[0] for chunk in file.chunks[1:]: assert previous.delete_count != 0 or previous.insert_count != 0 offset = previous.delete_offset + previous.delete_count while offset < chunk.delete_offset: if not analyze.re_ignore.match(old_lines[offset - 1]): break offset += 1 else: previous.delete_count = (chunk.delete_offset - previous.delete_offset) + chunk.delete_count previous.insert_count = (chunk.insert_offset - previous.insert_offset) + chunk.insert_count assert previous.delete_count != 0 or previous.insert_count != 0 previous.is_whitespace = previous.is_whitespace and chunk.is_whitespace continue merged.append(previous) previous = chunk merged.append(previous) for chunk in merged: while chunk.insert_count > 1 and chunk.delete_count > 1: insert_last = new_lines[chunk.insert_offset + chunk.insert_count - 2] delete_last = old_lines[chunk.delete_offset + chunk.delete_count - 2] if insert_last == delete_last: chunk.delete_count -= 1 chunk.insert_count -= 1 else: break file.clean() file.chunks = merged def parseDifferences(repository, commit=None, from_commit=None, to_commit=None, filter_paths=None, selected_path=None, simple=False): """parseDifferences(repository, [commit] | [from_commit, to_commit][, selected_path]) => dict(parent_sha1 => [diff.File, ...] (if selected_path is None) diff.File (if selected_path is not None)""" options = [] if to_commit: command = 'diff' if from_commit: what = [from_commit.sha1 + ".." + to_commit.sha1] else: what = [GIT_EMPTY_TREE, to_commit.sha1] elif not commit.parents: # Root commit. command = "show" what = [commit.sha1] options.append("--pretty=format:") else: assert len(commit.parents) == 1 command = 'diff' what = [commit.parents[0] + '..' + commit.sha1] if filter_paths is None and selected_path is None and not simple: names = repository.run(command, *(options + ["--name-only"] + what)) paths = set(filter(None, map(str.strip, names.splitlines()))) else: paths = set() if not simple: options.append('--ignore-space-change') options.extend(what) if filter_paths is not None: options.append('--') options.extend(filter_paths) elif selected_path is not None: options.append('--') options.append(selected_path) stdout = repository.run(command, '--full-index', '--unified=1', '--patience', *options) selected_file = None re_chunk = re.compile('^@@ -(\\d+)(?:,\\d+)? \\+(\\d+)(?:,\\d+)? @@') re_binary = re.compile('^Binary files (["\']?)(?:a/(.+)\\1|/dev/null) and (["\']?)(?:b/(.+)\\3|/dev/null) differ') re_diff = re.compile("^diff --git ([\"']?)a/(.*)\\1 ([\"']?)b/(.*)\\3$") re_old_path = re.compile("--- ([\"']?)a/(.*?)\\1\t?$") re_new_path = re.compile("\\+\\+\\+ ([\"']?)b/(.*?)\\1\t?$") def isplitlines(text): start = 0 length = len(text) while start < length: try: end = text.index('\n', start) yield text[start:end] start = end + 1 except ValueError: yield text[start:] break lines = isplitlines(stdout) included = set() files = [] files_by_path = {} def addFile(new_file): assert new_file.path not in files_by_path, "duplicate path: %s" % new_file.path files.append(new_file) files_by_path[new_file.path] = new_file included.add(new_file.path) old_mode = None new_mode = None try: line = lines.next() names = None while True: old_mode = None new_mode = None # Scan to the 'index ..' line that marks the beginning # of the differences in one file. while not line.startswith("index "): match = re_diff.match(line) if match: if old_mode is not None and new_mode is not None: addFile(diff.File(None, names[0], None, None, repository, old_mode=old_mode, new_mode=new_mode, chunks=[])) old_name = match.group(2) if match.group(1): old_name = demunge(old_name) new_name = match.group(4) if match.group(3): new_name = demunge(new_name) names = (old_name, new_name) elif line.startswith("old mode "): old_mode = line[9:] elif line.startswith("new mode "): new_mode = line[9:] elif line.startswith("new file mode "): new_mode = line[14:] elif line.startswith("deleted file mode "): old_mode = line[18:] line = lines.next() is_submodule = False try: sha1range, mode = line[6:].split(' ', 2) if mode == "160000": is_submodule = True old_mode = new_mode = mode old_sha1, new_sha1 = sha1range.split('..') except: old_sha1, new_sha1 = line[6:].split(' ', 1)[0].split("..") try: line = lines.next() except StopIteration: if old_mode is not None or new_mode is not None: assert names[0] == names[1] addFile(diff.File(None, names[0], old_sha1, new_sha1, repository, old_mode=old_mode, new_mode=new_mode, chunks=[diff.Chunk(0, 0, 0, 0)])) old_mode = new_mode = None raise if re_diff.match(line): new_file = diff.File(None, names[0] or names[1], old_sha1, new_sha1, repository, old_mode=old_mode, new_mode=new_mode) if '0' * 40 == old_sha1 or '0' * 40 == new_sha1: new_file.chunks = [diff.Chunk(0, 0, 0, 0)] else: new_file.loadOldLines() new_file.loadNewLines() new_file.chunks = [] detectWhiteSpaceChanges(new_file, new_file.oldLines(False), 1, new_file.oldCount() + 1, True, new_file.newLines(False), 1, new_file.newCount() + 1, True) addFile(new_file) old_mode = new_mode = False continue binary = re_binary.match(line) if binary: path = (binary.group(2) or binary.group(4)).strip() if path in files_by_path: new_file = files_by_path[path] if old_sha1 != '0' * 40: assert new_file.old_sha1 == '0' * 40 new_file.old_sha1 = old_sha1 new_file.old_mode = old_mode if new_sha1 != '0' * 40: assert new_file.new_sha1 == '0' * 40 new_file.new_sha1 = new_sha1 new_file.new_mode = new_mode new_file.chunks = [diff.Chunk(0, 0, 0, 0)] else: new_file = diff.File(None, path, old_sha1, new_sha1, repository, old_mode=old_mode, new_mode=new_mode) new_file.chunks = [diff.Chunk(0, 0, 0, 0)] addFile(new_file) continue match = re_old_path.match(line) if match: old_path = match.group(2) if match.group(1): old_path = demunge(old_path) else: old_path = None line = lines.next() match = re_new_path.match(line) if match: new_path = match.group(2) if match.group(1): new_path = demunge(new_path) else: new_path = None assert (old_path is None) == ('0' * 40 == old_sha1) assert (new_path is None) == ('0' * 40 == new_sha1) if old_path: path = old_path else: path = new_path if is_submodule: line = lines.next() match = re_chunk.match(line) assert match, repr(line) assert match.group(1) == match.group(2) == "1", repr(match.groups()) line = lines.next() assert line == "-Subproject commit %s" % old_sha1, repr(line) line = lines.next() assert line == "+Subproject commit %s" % new_sha1, repr(line) new_file = diff.File(None, path, old_sha1, new_sha1, repository, old_mode=old_mode, new_mode=new_mode, chunks=[diff.Chunk(1, 1, 1, 1, analysis="0=0:r18-58=18-58")]) if path not in files_by_path: addFile(new_file) old_mode = new_mode = None continue try: line = lines.next() delete_offset = 1 deleted_lines = [] insert_offset = 1 inserted_lines = [] if old_path and new_path and not simple: old_lines = splitlines(repository.fetch(old_sha1).data) new_lines = splitlines(repository.fetch(new_sha1).data) else: old_lines = None new_lines = None if path in files_by_path: new_file = files_by_path[path] if old_sha1 != '0' * 40: assert new_file.old_sha1 == '0' * 40 new_file.old_sha1 = old_sha1 new_file.old_mode = old_mode if new_sha1 != '0' * 40: assert new_file.new_sha1 == '0' * 40 new_file.new_sha1 = new_sha1 new_file.new_mode = new_mode new_file.chunks = [] else: new_file = diff.File(None, path, old_sha1, new_sha1, repository, old_mode=old_mode, new_mode=new_mode, chunks=[]) old_mode = new_mode = None if selected_path is not None and selected_path == path: selected_file = new_file if path not in files_by_path: addFile(new_file) previous_delete_offset = 1 previous_insert_offset = 1 while True: match = re_chunk.match(line) if not match: break groups = match.groups() delete_offset = int(groups[0]) deleted_lines = [] insert_offset = int(groups[1]) inserted_lines = [] while True: line = lines.next() if line == "\\ No newline at end of file": continue if line[0] not in (' ', '-', '+'): break if line[0] != ' ' and previous_delete_offset is not None and old_lines and new_lines and not simple: detectWhiteSpaceChanges(files[-1], old_lines, previous_delete_offset, delete_offset, True, new_lines, previous_insert_offset, insert_offset, True) previous_delete_offset = None if line[0] == ' ' and previous_delete_offset is None: previous_delete_offset = delete_offset previous_insert_offset = insert_offset type = line[0] if type == '-': delete_offset += 1 deleted_lines.append(line[1:]) elif type == '+': insert_offset += 1 inserted_lines.append(line[1:]) else: if deleted_lines or inserted_lines: chunks = createChunks(delete_offset - len(deleted_lines), deleted_lines, insert_offset - len(inserted_lines), inserted_lines) files[-1].chunks.extend(chunks) deleted_lines = [] inserted_lines = [] delete_offset += 1 insert_offset += 1 if deleted_lines or inserted_lines: chunks = createChunks(delete_offset - len(deleted_lines), deleted_lines, insert_offset - len(inserted_lines), inserted_lines) files[-1].chunks.extend(chunks) deleted_lines = [] inserted_lines = [] if previous_delete_offset is not None and old_lines and new_lines and not simple: detectWhiteSpaceChanges(files[-1], old_lines, previous_delete_offset, len(old_lines) + 1, True, new_lines, previous_insert_offset, len(new_lines) + 1, True) previous_delete_offset = None except StopIteration: if deleted_lines or inserted_lines: chunks = createChunks(delete_offset - len(deleted_lines), deleted_lines, insert_offset - len(inserted_lines), inserted_lines) files[-1].chunks.extend(chunks) deleted_lines = [] inserted_lines = [] if previous_delete_offset is not None and old_lines and new_lines and not simple: detectWhiteSpaceChanges(files[-1], old_lines, previous_delete_offset, len(old_lines) + 1, True, new_lines, previous_insert_offset, len(new_lines) + 1, True) raise except StopIteration: if old_mode is not None and new_mode is not None: assert names[0] == names[1] addFile(diff.File(None, names[0], None, None, repository, old_mode=old_mode, new_mode=new_mode, chunks=[])) for path in (paths - included): lines = isplitlines(repository.run(command, '--full-index', '--unified=1', *(what + ['--', path]))) try: line = lines.next() while not line.startswith("index "): line = lines.next() try: sha1range, mode = line[6:].split(' ') if mode == "160000": continue old_sha1, new_sha1 = sha1range.split("..") except: old_sha1, new_sha1 = line[6:].split(' ', 1)[0].split("..") if old_sha1 == '0' * 40 or new_sha1 == '0' * 40: # Added or removed empty file. continue addFile(diff.File(None, path, old_sha1, new_sha1, repository, chunks=[])) old_data = repository.fetch(old_sha1).data old_lines = splitlines(old_data) new_data = repository.fetch(new_sha1).data new_lines = splitlines(new_data) assert len(old_lines) == len(new_lines), "%s:%d != %s:%d" % (old_sha1, len(old_lines), new_sha1, len(new_lines)) def endsWithLinebreak(data): return data and data[-1] in "\n\r" detectWhiteSpaceChanges(files[-1], old_lines, 1, len(old_lines) + 1, endsWithLinebreak(old_data), new_lines, 1, len(new_lines) + 1, endsWithLinebreak(new_data)) except StopIteration: pass if not simple: for file in files: mergeChunks(file) if to_commit: if selected_path is not None: return selected_file elif from_commit: return { from_commit.sha1: files } else: return { None: files } elif not commit.parents: return { None: files } else: return { commit.parents[0]: files } ================================================ FILE: src/diffutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from difflib import SequenceMatcher from itertools import izip, repeat import diff import diff.html def expandWithContext(chunks, old_lines, new_lines, context_lines, highlight=True): if not chunks: return [] groups = [] group = [] chunks = iter(chunks) try: previousChunk = chunks.next() group.append(previousChunk) while True: nextChunk = chunks.next() distance = nextChunk.delete_offset - (previousChunk.delete_offset + previousChunk.delete_count) gap_between = distance - 2 * context_lines if gap_between >= 3: groups.append(group) group = [] group.append(nextChunk) previousChunk = nextChunk except StopIteration: pass groups.append(group) macro_chunks = [] for group in groups: delete_offset = max(1, group[0].delete_offset - context_lines) insert_offset = max(1, group[0].insert_offset - context_lines) lines = [] for chunk in group: while delete_offset < chunk.delete_offset: lines.append(diff.Line(diff.Line.CONTEXT, delete_offset, old_lines[delete_offset - 1], insert_offset, new_lines[insert_offset - 1])) delete_offset += 1 insert_offset += 1 if chunk.analysis: mappings = chunk.analysis.split(';') for mapping in mappings: if ':' in mapping: mapped_lines, ops = mapping.split(':') else: mapped_lines = mapping ops = None delete_line, insert_line = mapped_lines.split('=') delete_line = chunk.delete_offset + int(delete_line) insert_line = chunk.insert_offset + int(insert_line) while delete_offset < delete_line and insert_offset < insert_line: lines.append(diff.Line(diff.Line.MODIFIED, delete_offset, old_lines[delete_offset - 1], insert_offset, new_lines[insert_offset - 1], is_whitespace=chunk.is_whitespace)) delete_offset += 1 insert_offset += 1 while delete_offset < delete_line: lines.append(diff.Line(diff.Line.DELETED, delete_offset, old_lines[delete_offset - 1], insert_offset, None)) delete_offset += 1 while insert_offset < insert_line: lines.append(diff.Line(diff.Line.INSERTED, delete_offset, None, insert_offset, new_lines[insert_offset - 1])) insert_offset += 1 deleted_line = old_lines[delete_offset - 1] inserted_line = new_lines[insert_offset - 1] if highlight and ops: deleted_line, inserted_line = diff.html.lineDiffHTML(ops, deleted_line, inserted_line) lines.append(diff.Line(diff.Line.MODIFIED, delete_offset, deleted_line, insert_offset, inserted_line, is_whitespace=chunk.is_whitespace)) delete_offset += 1 insert_offset += 1 deleteStop = chunk.delete_offset + chunk.delete_count insertStop = chunk.insert_offset + chunk.insert_count while delete_offset < deleteStop and insert_offset < insertStop: lines.append(diff.Line(diff.Line.REPLACED, delete_offset, old_lines[delete_offset - 1], insert_offset, new_lines[insert_offset - 1], is_whitespace=chunk.is_whitespace)) delete_offset += 1 insert_offset += 1 while delete_offset < deleteStop: lines.append(diff.Line(diff.Line.DELETED, delete_offset, old_lines[delete_offset - 1], insert_offset, None)) delete_offset += 1 while insert_offset < insertStop: lines.append(diff.Line(diff.Line.INSERTED, delete_offset, None, insert_offset, new_lines[insert_offset - 1])) insert_offset += 1 deleteStop = min(len(old_lines) + 1, delete_offset + context_lines) while delete_offset < deleteStop: lines.append(diff.Line(diff.Line.CONTEXT, delete_offset, old_lines[delete_offset - 1], insert_offset, new_lines[insert_offset - 1])) delete_offset += 1 insert_offset += 1 macro_chunks.append(diff.MacroChunk(group, lines)) return macro_chunks ================================================ FILE: src/extensions/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os def getExtensionPath(author_name, extension_name): return extension.Extension(author_name, extension_name).getPath() def getExtensionInstallPath(sha1): import configuration return os.path.join(configuration.extensions.INSTALL_DIR, sha1) import manifest import extension import resource ================================================ FILE: src/extensions/execute.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import errno import os import socket import subprocess import time import configuration import auth import dbutils from extensions.extension import Extension from textutils import json_encode, json_decode def startProcess(flavor): executable = configuration.extensions.FLAVORS[flavor]["executable"] library = configuration.extensions.FLAVORS[flavor]["library"] process = subprocess.Popen( [executable, "critic-launcher.js"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=library) return process class ProcessException(Exception): pass class ProcessError(ProcessException): def __init__(self, message): super(ProcessError, self).__init__( "Failed to execute process: %s" % message) class ProcessTimeout(ProcessException): def __init__(self, timeout): super(ProcessTimeout, self).__init__( "Process timed out after %d seconds" % timeout) class ProcessFailure(ProcessException): def __init__(self, returncode, stderr): super(ProcessFailure, self).__init__( "Process returned non-zero exit status %d" % returncode) self.returncode = returncode self.stderr = stderr def executeProcess(db, manifest, role_name, script, function, extension_id, user_id, argv, timeout, stdin=None, rlimit_rss=256): # If |user_id| is not the same as |db.user|, then one user's access of the # system is triggering an extension on behalf of another user. This will # for instance happen when one user is adding changes to a review, # triggering an extension filter hook set up by another user. # # In this case, we need to check that the other user can access the # extension. # # If |user_id| is the same as |db.user|, we need to use |db.profiles|, which # may contain a profile associated with an access token that was used to # authenticate the user. if user_id != db.user.id: user = dbutils.User.fromId(db, user_id) authentication_labels = auth.DATABASE.getAuthenticationLabels(user) profiles = [auth.AccessControlProfile.forUser( db, user, authentication_labels)] else: authentication_labels = db.authentication_labels profiles = db.profiles extension = Extension.fromId(db, extension_id) if not auth.AccessControlProfile.isAllowedExtension( profiles, "execute", extension): raise auth.AccessDenied("Access denied to extension: execute %s" % extension.getKey()) flavor = manifest.flavor if manifest.flavor not in configuration.extensions.FLAVORS: flavor = configuration.extensions.DEFAULT_FLAVOR stdin_data = "%s\n" % json_encode({ "library_path": configuration.extensions.FLAVORS[flavor]["library"], "rlimit": { "rss": rlimit_rss }, "hostname": configuration.base.HOSTNAME, "dbname": configuration.database.PARAMETERS["database"], "dbuser": configuration.database.PARAMETERS["user"], "git": configuration.executables.GIT, "python": configuration.executables.PYTHON, "python_path": "%s:%s" % (configuration.paths.CONFIG_DIR, configuration.paths.INSTALL_DIR), "repository_work_copy_path": configuration.extensions.WORKCOPY_DIR, "changeset_address": configuration.services.CHANGESET["address"], "branchtracker_pid_path": configuration.services.BRANCHTRACKER["pidfile_path"], "maildelivery_pid_path": configuration.services.MAILDELIVERY["pidfile_path"], "is_development": configuration.debug.IS_DEVELOPMENT, "extension_path": manifest.path, "extension_id": extension_id, "user_id": user_id, "authentication_labels": list(authentication_labels), "role": role_name, "script_path": script, "fn": function, "argv": argv }) if stdin is not None: stdin_data += stdin # Double the timeout. Timeouts are primarily handled by the extension runner # service, which returns an error response on timeout. This deadline here is # thus mostly to catch the extension runner service itself timing out. deadline = time.time() + timeout * 2 try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.settimeout(max(0, deadline - time.time())) connection.connect(configuration.services.EXTENSIONRUNNER["address"]) connection.sendall(json_encode({ "stdin": stdin_data, "flavor": flavor, "timeout": timeout })) connection.shutdown(socket.SHUT_WR) data = "" while True: connection.settimeout(max(0, deadline - time.time())) try: received = connection.recv(4096) except socket.error as error: if error.errno == errno.EINTR: continue raise if not received: break data += received connection.close() except socket.timeout as error: raise ProcessTimeout(timeout) except socket.error as error: raise ProcessError("failed to read response: %s" % error) try: data = json_decode(data) except ValueError as error: raise ProcessError("failed to decode response: %s" % error) if data["status"] == "timeout": raise ProcessTimeout(timeout) if data["status"] == "error": raise ProcessError(data["error"]) if data["returncode"] != 0: raise ProcessFailure(data["returncode"], data["stderr"]) return data["stdout"] ================================================ FILE: src/extensions/extension.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import subprocess import pwd import dbutils import htmlutils from extensions.manifest import Manifest, ManifestError from extensions import getExtensionInstallPath class ExtensionError(Exception): def __init__(self, message, extension=None): super(ExtensionError, self).__init__(message) self.extension = extension class Extension(object): def __init__(self, author_name, extension_name): import configuration if os.path.sep in extension_name: raise ExtensionError( "Invalid extension name: %s" % extension_name) self.__author_name = author_name self.__extension_name = extension_name self.__manifest = {} if author_name: try: user_home_dir = pwd.getpwnam(author_name).pw_dir except KeyError: raise ExtensionError( "No such system user: %s" % author_name, extension=self) self.__path = os.path.join( user_home_dir, configuration.extensions.USER_EXTENSIONS_DIR, extension_name) else: self.__path = os.path.join( configuration.extensions.SYSTEM_EXTENSIONS_DIR, extension_name) if not (os.path.isdir(self.__path) and os.access(self.__path, os.R_OK | os.X_OK)): raise ExtensionError( "Invalid or inaccessible extension dir: %s" % self.__path, extension=self) def isSystemExtension(self): return self.__author_name is None def getAuthorName(self): if self.isSystemExtension(): return None return self.__author_name def getName(self): return self.__extension_name def getTitle(self, db, html=False): if html: title = "%s" % htmlutils.htmlify(self.getName()) else: title = self.getName() if not self.isSystemExtension(): author = self.getAuthor(db) try: manifest = self.getManifest() except ManifestError: # Can't access information from the manifest, so assume "yes". is_author = True else: is_author = manifest.isAuthor(db, author) if is_author: title += " by " else: title += " hosted by " if html: title += htmlutils.htmlify(author.fullname) else: title += author.fullname return title def getKey(self): if self.isSystemExtension(): return self.__extension_name else: return "%s/%s" % (self.__author_name, self.__extension_name) def getPath(self): return self.__path def getVersions(self): import configuration try: output = subprocess.check_output( [configuration.executables.GIT, "for-each-ref", "--format=%(refname)", "refs/heads/version/"], stderr=subprocess.STDOUT, cwd=self.__path) except subprocess.CalledProcessError: # Not a git repository => no versions (except "Live"). return [] versions = [] for ref in output.splitlines(): if ref.startswith("refs/heads/version/"): versions.append(ref[len("refs/heads/version/"):]) return versions def getManifest(self, version=None, sha1=None): import configuration path = self.__path source = None if sha1 is not None: if sha1 in self.__manifest: return self.__manifest[sha1] install_path = getExtensionInstallPath(sha1) with open(os.path.join(install_path, "MANIFEST")) as manifest_file: source = manifest_file.read() path = "" % sha1[:8] elif version in self.__manifest: return self.__manifest[version] if source is None and version is not None: source = subprocess.check_output( [configuration.executables.GIT, "cat-file", "blob", "version/%s:MANIFEST" % version], cwd=self.__path) manifest = Manifest(path, source) manifest.read() if sha1 is not None: self.__manifest[sha1] = manifest else: self.__manifest[version] = manifest return manifest def getCurrentSHA1(self, version): import configuration return subprocess.check_output( [configuration.executables.GIT, "rev-parse", "--verify", "version/%s" % version], cwd=self.__path).strip() def prepareVersionSnapshot(self, version): import configuration sha1 = self.getCurrentSHA1(version) if not os.path.isdir(getExtensionInstallPath(sha1)): git_archive = subprocess.Popen( [configuration.executables.GIT, "archive", "--format=tar", "--prefix=%s/" % sha1, sha1], stdout=subprocess.PIPE, cwd=self.__path) subprocess.check_call( [configuration.executables.TAR, "x"], stdin=git_archive.stdout, cwd=configuration.extensions.INSTALL_DIR) return sha1 def getAuthor(self, db): if self.isSystemExtension(): return None return dbutils.User.fromName(db, self.getAuthorName()) def getExtensionID(self, db, create=False): cursor = db.cursor() if self.isSystemExtension(): author_id = None cursor.execute("""SELECT extensions.id FROM extensions WHERE extensions.author IS NULL AND extensions.name=%s""", (self.__extension_name,)) else: author_id = self.getAuthor(db).id cursor.execute("""SELECT extensions.id FROM extensions WHERE extensions.author=%s AND extensions.name=%s""", (author_id, self.__extension_name)) row = cursor.fetchone() if row: return row[0] elif create: cursor.execute("""INSERT INTO extensions (author, name) VALUES (%s, %s) RETURNING id""", (author_id, self.__extension_name)) return cursor.fetchone()[0] else: return None def getInstalledVersion(self, db, user): """Return (sha1, name) of the version currently installed by the user. If the user doesn't have the extension installed, return (False, False). If the user has the "live" version installed, return (None, None). """ extension_id = self.getExtensionID(db) if extension_id is None: # An extension is recorded in the database and assigned an ID the # first time it's installed. If it doesn't have an ID, then no user # can have any version of it installed. return (False, False) cursor = db.cursor() if user is None: cursor.execute("""SELECT extensionversions.sha1, extensionversions.name FROM extensioninstalls LEFT OUTER JOIN extensionversions ON (extensionversions.id=extensioninstalls.version) WHERE extensioninstalls.uid IS NULL AND extensioninstalls.extension=%s""", (extension_id,)) else: cursor.execute("""SELECT extensionversions.sha1, extensionversions.name FROM extensioninstalls LEFT OUTER JOIN extensionversions ON (extensionversions.id=extensioninstalls.version) WHERE extensioninstalls.uid=%s AND extensioninstalls.extension=%s""", (user.id, extension_id)) row = cursor.fetchone() if row: return row else: return (False, False) @staticmethod def fromId(db, extension_id): cursor = db.cursor() cursor.execute("""SELECT users.name, extensions.name FROM extensions LEFT OUTER JOIN users ON (users.id=extensions.author) WHERE extensions.id=%s""", (extension_id,)) row = cursor.fetchone() if not row: raise ExtensionError("Invalid extension id: %d" % extension_id) author_name, extension_name = row return Extension(author_name, extension_name) @staticmethod def getInstalls(db, user): """ Return a list of extension installs in effect for the specified user If 'user' is None, all universal extension installs are listed. Each install is returned as a tuple containing four elements, the extension id, the version id, the version SHA-1 and a boolean which is true if the install is universal. For a LIVE version, the version id and the version SHA-1 are None. The list of installs is ordered by precedence; most significant install first, least significant install last. """ cursor = db.cursor() cursor.execute("""SELECT extensioninstalls.id, extensioninstalls.extension, extensionversions.id, extensionversions.sha1, extensioninstalls.uid IS NULL FROM extensioninstalls LEFT OUTER JOIN extensionversions ON (extensionversions.id=extensioninstalls.version) WHERE uid=%s OR uid IS NULL ORDER BY uid NULLS FIRST""", (user.id if user else None,)) install_per_extension = {} # Since we ordered by 'uid' with nulls ("universal installs") first, # we'll overwrite universal installs with per-user installs, as intended. for install_id, extension_id, version_id, version_sha1, is_universal in cursor: install_per_extension[extension_id] = (install_id, version_id, version_sha1, is_universal) installs = [(install_id, extension_id, version_id, version_sha1, is_universal) for extension_id, (install_id, version_id, version_sha1, is_universal) in install_per_extension.items()] # Sort installs by install id, higher first. This means a later install # takes precedence over an earlier, if they both handle the same path. installs.sort(reverse=True) # Drop the install_id; it is not relevant past this point. return [(extension_id, version_id, version_sha1, is_universal) for _, extension_id, version_id, version_sha1, is_universal in installs] @staticmethod def getUpdatedExtensions(db, user): cursor = db.cursor() cursor.execute("""SELECT users.name, users.fullname, extensions.name, extensionversions.name, extensionversions.sha1 FROM users JOIN extensions ON (extensions.author=users.id) JOIN extensionversions ON (extensionversions.extension=extensions.id) JOIN extensioninstalls ON (extensioninstalls.version=extensionversions.id) WHERE extensioninstalls.uid=%s""", (user.id,)) updated = [] for author_name, author_fullname, extension_name, version_name, version_sha1 in cursor: extension = Extension(author_name, extension_name) if extension.getCurrentSHA1(version_name) != version_sha1: updated.append((author_fullname, extension_name)) return updated @staticmethod def find(db): import configuration def search(user_name, search_dir): if not (os.path.isdir(search_dir) and os.access(search_dir, os.X_OK | os.R_OK)): return [] extensions = [] for extension_name in os.listdir(search_dir): extension_dir = os.path.join(search_dir, extension_name) manifest_path = os.path.join(extension_dir, "MANIFEST") if not (os.path.isdir(extension_dir) and os.access(extension_dir, os.X_OK | os.R_OK) and os.access(manifest_path, os.R_OK)): continue extensions.append(Extension(user_name, extension_name)) return extensions extensions = search(None, configuration.extensions.SYSTEM_EXTENSIONS_DIR) if configuration.extensions.USER_EXTENSIONS_DIR: cursor = db.cursor() cursor.execute("SELECT name FROM users WHERE status!='retired' ORDER BY name ASC") for (user_name,) in cursor: try: pwd_entry = pwd.getpwnam(user_name) except KeyError: continue user_dir = os.path.join( pwd_entry.pw_dir, configuration.extensions.USER_EXTENSIONS_DIR) extensions.extend(search(user_name, user_dir)) return extensions ================================================ FILE: src/extensions/installation.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import auth from extensions.extension import Extension, ExtensionError class InstallationError(Exception): def __init__(self, title, message, is_html=False): self.title = title self.message = message self.is_html = is_html def doInstallExtension(db, user, extension, version): auth.AccessControl.accessExtension(db, "install", extension) is_universal = user is None extension_id = extension.getExtensionID(db, create=True) manifest = extension.getManifest(version) # Detect conflicting extension installs. current_installs = Extension.getInstalls(db, user) for current_extension_id, _, _, current_is_universal in current_installs: # Two installs never conflict if one is universal and one is not. if is_universal != current_is_universal: continue try: current_extension = Extension.fromId(db, current_extension_id) except ExtensionError as error: # Invalid extension => no conflict. # # But if there would be a conflict, should the installed extension # later become valid again, then delete the installation. if extension.getName() == error.extension.getName(): doUninstallExtension(db, user, error.extension) continue # Same extension => conflict # # The web UI will typically not let you try to do this; if the extension # is already installed the UI will only let you uninstall or upgrade it. # But you never know. Also, there's a UNIQUE constraint in the database # that would prevent this, but with a significantly worse error message, # of course. if extension_id == current_extension_id: raise InstallationError( title="Conflicting install", message=("The extension %s is already " "%sinstalled." % (current_extension.getTitle(db), "universally " if is_universal else "")), is_html=True) # Different extensions, same name => also conflict # # Two extensions with the same name are probably simply two forks of the # same extension, and are very likely to have overlapping and # conflicting functionality. Also, extension resource paths only # contain the extension name as an identifier, and thus will conflict # between the two extensions, even if they are actually completely # unrelated. if extension.getName() == current_extension.getName(): raise InstallationError( title="Conflicting install", message=("The extension %s is already " "%sinstalled, and conflicts with the extension " "%s since they have the same name." % (current_extension.getTitle(db), "universally " if is_universal else "", extension.getTitle(db))), is_html=True) cursor = db.cursor() if is_universal: user_id = None else: user_id = user.id if version is not None: sha1 = extension.prepareVersionSnapshot(version) cursor.execute("""SELECT id FROM extensionversions WHERE extension=%s AND name=%s AND sha1=%s""", (extension_id, version, sha1)) row = cursor.fetchone() if row: (version_id,) = row else: cursor.execute("""INSERT INTO extensionversions (extension, name, sha1) VALUES (%s, %s, %s) RETURNING id""", (extension_id, version, sha1)) (version_id,) = cursor.fetchone() for role in manifest.roles: role.install(db, version_id) else: version_id = None cursor.execute("""INSERT INTO extensioninstalls (uid, extension, version) VALUES (%s, %s, %s)""", (user_id, extension_id, version_id)) def doUninstallExtension(db, user, extension): extension_id = extension.getExtensionID(db) if extension_id is None: return cursor = db.cursor() if user is not None: cursor.execute("""DELETE FROM extensioninstalls WHERE uid=%s AND extension=%s""", (user.id, extension_id)) else: cursor.execute("""DELETE FROM extensioninstalls WHERE uid IS NULL AND extension=%s""", (extension_id,)) def getExtension(author_name, extension_name): """Create an Extension object ignoring whether it is valid""" try: return Extension(author_name, extension_name) except ExtensionError as error: if error.extension is None: raise error return error.extension def installExtension(db, user, author_name, extension_name, version): doInstallExtension(db, user, Extension(author_name, extension_name), version) db.commit() def uninstallExtension(db, user, author_name, extension_name, version): doUninstallExtension(db, user, getExtension(author_name, extension_name)) db.commit() def reinstallExtension(db, user, author_name, extension_name, version): doUninstallExtension(db, user, getExtension(author_name, extension_name)) doInstallExtension(db, user, Extension(author_name, extension_name), version) db.commit() ================================================ FILE: src/extensions/manifest.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import re from textutils import json_decode RE_ROLE_Page = re.compile(r"^\[Page\s+(.*?)\s*\]$", re.IGNORECASE) RE_ROLE_Inject = re.compile(r"^\[Inject\s+(.*?)\s*\]$", re.IGNORECASE) RE_ROLE_ProcessCommits = re.compile(r"^\[ProcessCommits\]$", re.IGNORECASE) RE_ROLE_FilterHook = re.compile(r"^\[FilterHook\s+(.*?)\s*\]$", re.IGNORECASE) RE_ROLE_Scheduled = re.compile(r"^\[Scheduled\]$", re.IGNORECASE) class ManifestError(Exception): pass class Role: def __init__(self, location): self.script = None self.function = None self.description = None self.location = location def install(self, db, version_id): cursor = db.cursor() cursor.execute("""INSERT INTO extensionroles (version, script, function) VALUES (%s, %s, %s) RETURNING id""", (version_id, self.script, self.function)) return cursor.fetchone()[0] def process(self, name, value, location): if name == "description": self.description = value return True elif name == "script": self.script = value return True elif name == "function": self.function = value return True else: return False def check(self): if not self.description: raise ManifestError("%s: manifest error: expected role description" % self.location) elif not self.script: raise ManifestError("%s: manifest error: expected role script" % self.location) elif not self.function: raise ManifestError("%s: manifest error: expected role function" % self.location) class URLRole(Role): def __init__(self, location, pattern): Role.__init__(self, location) self.pattern = pattern self.regexp = "^" + re.sub(r"[\|\[\](){}^$+]", lambda match: '\\' + match.group(0), pattern.replace('.', '\\.').replace('?', '.').replace('*', '.*')) + "$" def check(self): Role.check(self) if self.pattern.startswith("/"): raise ManifestError("%s: manifest error: path pattern should not start with a '/'" % self.location) class PageRole(URLRole): def __init__(self, location, pattern): URLRole.__init__(self, location, pattern) def name(self): return "Page" def install(self, db, version_id): role_id = Role.install(self, db, version_id) cursor = db.cursor() cursor.execute("""INSERT INTO extensionpageroles (role, path) VALUES (%s, %s)""", (role_id, self.regexp)) return role_id class InjectRole(URLRole): def __init__(self, location, pattern): URLRole.__init__(self, location, pattern) def name(self): return "Inject" def process(self, name, value, location): if Role.process(self, name, value, location): return True if name == "cached": # Ignored for compatibility with extensions that use it. return True return False def install(self, db, version_id): role_id = Role.install(self, db, version_id) cursor = db.cursor() cursor.execute("""INSERT INTO extensioninjectroles (role, path) VALUES (%s, %s)""", (role_id, self.regexp)) return role_id class ProcessCommitsRole(Role): def __init__(self, location): Role.__init__(self, location) def name(self): return "ProcessCommits" def install(self, db, version_id): role_id = Role.install(self, db, version_id) cursor = db.cursor() cursor.execute("""INSERT INTO extensionprocesscommitsroles (role) VALUES (%s)""", (role_id,)) return role_id class FilterHookRole(Role): def __init__(self, location, name): Role.__init__(self, location) self.name = name self.title = name self.data_description = None def name(self): return "FilterHook" def process(self, name, value, location): if Role.process(self, name, value, location): return True if name == "title": self.title = value return True if name == "datadescription": self.data_description = value return True return False def check(self): Role.check(self) if not re.match("^[a-z0-9_]+$", self.name, re.IGNORECASE): raise ManifestError("%s: manifest error: invalid filter hook name: " "should contain only ASCII letters and numbers " "and underscores" % self.location) def install(self, db, version_id): role_id = Role.install(self, db, version_id) cursor = db.cursor() cursor.execute("""INSERT INTO extensionfilterhookroles (role, name, title, role_description, data_description) VALUES (%s, %s, %s, %s, %s)""", (role_id, self.name, self.title, self.description, self.data_description)) return role_id class ScheduledRole(Role): def __init__(self, location): Role.__init__(self, location) self.frequency = None self.at = None def name(self): return "Scheduled" def install(self, db, version_id): role_id = Role.install(self, db, version_id) cursor = db.cursor() cursor.execute("""INSERT INTO extensionscheduledroles (role, frequency, at) VALUES (%s, %s, %s)""", (role_id, self.frequency, self.at)) return role_id def process(self, name, value, location): if Role.process(self, name, value, location): return True elif name == "frequency": if value in ("monthly", "weekly", "daily", "hourly"): self.frequency = value.lower() else: raise ManifestError("%s: invalid frequency: must be one of 'monthly', 'weekly', 'daily' and 'hourly'" % location) elif name == "at": self.at = value.lower() else: return False return True def check(self): Role.check(self) if not self.frequency: raise ManifestError("%s: manifest error: expected role parameter 'frequency'" % self.location) if not self.at: raise ManifestError("%s: manifest error: expected role parameter 'at'" % self.location) if self.frequency == "monthly": match = re.match("(\d+) (\d{2}):(\d{2})$", self.at) if match: date = int(match.group(1).lstrip("0")) hour = int(match.group(2).lstrip("0")) minute = int(match.group(3).lstrip("0")) if (1 <= date <= 31) and (0 <= hour <= 23) and (0 <= minute <= 59): return raise ManifestError("invalid at specification for monthly trigger, must be 'D HH:MM' (D = day in month; 1-31), is '%s'" % self.at) elif self.frequency == "weekly": match = re.match("(?:mon(?:day)?|tue(?:sday)?|wed(?:nesday)?|thu(?:rsday)?|fri(?:day)?|sat(?:urday)?|sun(?:day)?) (\d{2}):(\d{2})$", self.at) if match: hour = int(match.group(1).lstrip("0")) minute = int(match.group(2).lstrip("0")) if (0 <= hour <= 23) and (0 <= minute <= 59): return raise ManifestError("invalid at specification for weekly trigger, must be 'WEEKDAY HH:MM' (WEEKDAY = mon|tue|wed|thu|fri|sat|sun), is '%s'" % self.at) elif self.frequency == "daily": match = re.match("(\d{2}):(\d{2})$", self.at) if match: hour = int(match.group(1).lstrip("0")) minute = int(match.group(2).lstrip("0")) if (0 <= hour <= 23) and (0 <= minute <= 59): return raise ManifestError("invalid at specification for daily trigger, must be 'HH:MM'") elif self.frequency == "hourly": match = re.match("(\d{2})$", self.at) if match: minute = int(match.group(1).lstrip("0")) if (0 <= minute <= 59): return raise ManifestError("invalid at specification for hourly trigger, must be 'MM'") class Author(object): def __init__(self, value): match = re.match(r"\s*(.*?)\s+<(.+?)>\s*$", value) if match: self.name, self.email = match.groups() else: self.name = value.strip() self.email = None class Manifest(object): def __init__(self, path, source=None): import configuration self.path = path self.source = source self.authors = [] self.description = None self.flavor = configuration.extensions.DEFAULT_FLAVOR self.roles = [] self.status = None self.hidden = False def isAuthor(self, db, user): for author in self.authors: if author.name in (user.name, user.fullname) \ or user.hasGitEmail(db, author.email): return True return False def getAuthors(self): return self.authors def read(self): import configuration path = os.path.join(self.path, "MANIFEST") if self.source: lines = self.source.splitlines() else: try: lines = open(path).readlines() except IOError: raise ManifestError("%s: file not found" % path) lines = map(str.strip, lines) def process(value): value = value.strip() if value[0] == '"' == value[-1]: return json_decode(value) else: return value role = None for index, line in enumerate(lines): if not line or line.lstrip().startswith("#"): continue location = "%s:%d" % (path, index + 1) if not role: try: name, value = line.split("=", 1) if name.strip().lower() == "author": for value in process(value).split(","): self.authors.append(Author(value)) continue elif name.strip().lower() == "description": self.description = process(value) continue elif name.strip().lower() == "flavor": self.flavor = process(value) if self.flavor not in configuration.extensions.FLAVORS: raise ManifestError("%s: manifest error: unsupported 'flavor', supported values are: %s" % (location, ", ".join(map(repr, configuration.extensions.FLAVORS.keys())))) continue elif name.strip().lower() == "hidden": value = process(value).lower() if value in ("true", "yes"): self.hidden = True elif value not in ("false", "no"): raise ManifestError("%s: manifest error: valid values for 'hidden' are 'true'/'yes' and 'false'/'no'" % location) continue except: pass if not self.authors: raise ManifestError("%s: manifest error: expected extension author" % location) elif not self.description: raise ManifestError("%s: manifest error: expected extension description" % location) if role: if "=" in line: name, value = line.split("=", 1) if role.process(name.strip().lower(), process(value), location): continue role.check() self.roles.append(role) match = RE_ROLE_Page.match(line) if match: role = PageRole(location, match.group(1)) continue match = RE_ROLE_Inject.match(line) if match: role = InjectRole(location, match.group(1)) continue match = RE_ROLE_ProcessCommits.match(line) if match: role = ProcessCommitsRole(location) continue match = RE_ROLE_FilterHook.match(line) if match: role = FilterHookRole(location, match.group(1)) continue match = RE_ROLE_Scheduled.match(line) if match: role = ScheduledRole(location) continue raise ManifestError("%s: manifest error: unexpected line: %r" % (location, line)) if not self.authors: raise ManifestError("%s: manifest error: expected extension author" % path) elif not self.description: raise ManifestError("%s: manifest error: expected extension description" % path) if role: role.check() self.roles.append(role) if not self.roles: raise ManifestError("%s: manifest error: no roles defined" % path) @staticmethod def load(extension_path): manifest = Manifest(extension_path) manifest.read() return manifest ================================================ FILE: src/extensions/resource.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import errno from extensions import getExtensionPath, getExtensionInstallPath def get(req, db, user, path): import configuration extension_name, resource_path = path.split("/", 1) cursor = db.cursor() cursor.execute("""SELECT users.name, extensionversions.sha1 FROM extensions JOIN extensioninstalls ON (extensioninstalls.extension=extensions.id) LEFT OUTER JOIN extensionversions ON (extensionversions.id=extensioninstalls.version) LEFT OUTER JOIN users ON (users.id=extensions.author) WHERE extensions.name=%s AND (extensioninstalls.uid=%s OR extensioninstalls.uid IS NULL) ORDER BY extensioninstalls.uid ASC NULLS LAST LIMIT 1""", (extension_name, user.id)) row = cursor.fetchone() if not row: return None, None author_name, version_sha1 = row if version_sha1 is None: extension_path = getExtensionPath(author_name, extension_name) else: extension_path = getExtensionInstallPath(version_sha1) resource_path = os.path.join(extension_path, "resources", resource_path) def guessContentType(name): extension_parts = name.split(".")[1:] while extension_parts: extension = ".".join(extension_parts) mimetype = configuration.mimetypes.MIMETYPES.get(extension) if mimetype: return mimetype extension_parts = extension_parts[1:] else: return "application/octet-stream" try: with open(resource_path) as resource_file: resource = resource_file.read() except IOError as error: if error.errno in (errno.ENOENT, errno.EACCES): return None, None raise else: return guessContentType(os.path.basename(resource_path)), resource ================================================ FILE: src/extensions/role/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page import inject import filterhook import processcommits ================================================ FILE: src/extensions/role/filterhook.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import os import signal import traceback import configuration import dbutils import gitutils import mailutils import htmlutils from extensions.extension import Extension, ExtensionError from extensions.manifest import Manifest, ManifestError, FilterHookRole from extensions.execute import ProcessException, ProcessFailure, executeProcess def signalExtensionTasksService(): try: with open(configuration.services.EXTENSIONTASKS["pidfile_path"]) as pidfile: pid = int(pidfile.read().strip()) os.kill(pid, signal.SIGHUP) except Exception: # Print traceback to stderr. Might end up in web server's error log, # where it has a chance to be noticed. traceback.print_exc() def listFilterHooks(db, user): cursor = db.cursor() installs = Extension.getInstalls(db, user) filterhooks = [] for extension_id, version_id, version_sha1, is_universal in installs: if version_id is not None: cursor.execute("""SELECT 1 FROM extensionroles JOIN extensionfilterhookroles ON (role=id) WHERE version=%s""", (version_id,)) if not cursor.fetchone(): continue extension = Extension.fromId(db, extension_id) manifest = extension.getManifest(sha1=version_sha1) else: try: extension = Extension.fromId(db, extension_id) except ExtensionError: # If the author/hosting user no longer exists, or the extension # directory no longer exists or is inaccessible, ignore the # extension. continue try: manifest = Manifest.load(extension.getPath()) except ManifestError: # If the MANIFEST is missing or invalid, we can't know whether # the extension has any filter hook roles, so assume it doesn't # and ignore it. continue if not any(isinstance(role, FilterHookRole) for role in manifest.roles): continue filterhooks.append((extension, manifest, sorted( (role for role in manifest.roles if isinstance(role, FilterHookRole)), key=lambda role: role.title))) return sorted(filterhooks, key=lambda (extension, manifest, roles): extension.getKey()) def getFilterHookRole(db, filter_id): cursor = db.cursor() cursor.execute("""SELECT extension, uid, name FROM extensionhookfilters WHERE id=%s""", (filter_id,)) extension_id, user_id, filterhook_name = cursor.fetchone() extension = Extension.fromId(db, extension_id) user = dbutils.User.fromId(db, user_id) installed_sha1, _ = extension.getInstalledVersion(db, user) if installed_sha1 is False: return manifest = extension.getManifest(sha1=installed_sha1) for role in manifest.roles: if isinstance(role, FilterHookRole) and role.name == filterhook_name: return role def queueFilterHookEvent(db, filter_id, review, user, commits, file_ids): cursor = db.cursor() cursor.execute("""SELECT data FROM extensionhookfilters WHERE id=%s""", (filter_id,)) data, = cursor.fetchone() cursor.execute("""INSERT INTO extensionfilterhookevents (filter, review, uid, data) VALUES (%s, %s, %s, %s) RETURNING id""", (filter_id, review.id, user.id, data)) event_id, = cursor.fetchone() cursor.executemany("""INSERT INTO extensionfilterhookcommits (event, commit) VALUES (%s, %s)""", [(event_id, commit.getId(db)) for commit in commits]) cursor.executemany("""INSERT INTO extensionfilterhookfiles (event, file) VALUES (%s, %s)""", [(event_id, file_id) for file_id in file_ids]) def transactionCallback(event): if event == "commit": signalExtensionTasksService() db.registerTransactionCallback(transactionCallback) def processFilterHookEvent(db, event_id, logfn): cursor = db.cursor() cursor.execute("""SELECT filters.extension, filters.uid, filters.path, filters.name, events.review, events.uid, events.data FROM extensionfilterhookevents AS events JOIN extensionhookfilters AS filters ON (filters.id=events.filter) WHERE events.id=%s""", (event_id,)) # Note: # - filter_user_id / filter_user represent the user whose filter was # triggered. # - user_id /user represent the user that added commits and thereby # triggered the filter. (extension_id, filter_user_id, filter_path, filterhook_name, review_id, user_id, filter_data) = cursor.fetchone() extension = Extension.fromId(db, extension_id) filter_user = dbutils.User.fromId(db, filter_user_id) installed_sha1, _ = extension.getInstalledVersion(db, filter_user) if installed_sha1 is False: # Invalid event (user doesn't have extension installed); do nothing. # The event will be deleted by the caller. return manifest = extension.getManifest(sha1=installed_sha1) for role in manifest.roles: if isinstance(role, FilterHookRole) and role.name == filterhook_name: break else: # Invalid event (installed version of extension doesn't have the named # filter hook role); do nothing. The event will be deleted by the # caller. return cursor.execute("""SELECT commit FROM extensionfilterhookcommits WHERE event=%s""", (event_id,)) commit_ids = [commit_id for (commit_id,) in cursor] cursor.execute("""SELECT file FROM extensionfilterhookfiles WHERE event=%s""", (event_id,)) file_ids = [file_id for (file_id,) in cursor] argv = """ (function () { var review = new critic.Review(%(review_id)d); var user = new critic.User(%(user_id)d); var repository = review.repository; var commits = new critic.CommitSet( %(commit_ids)r.map( function (commit_id) { return repository.getCommit(commit_id); })); var files = %(file_ids)r.map( function (file_id) { return critic.File.find(file_id); }); return [%(filter_data)s, review, user, commits, files]; })() """ % { "filter_data": htmlutils.jsify(filter_data), "review_id": review_id, "user_id": user_id, "commit_ids": commit_ids, "file_ids": file_ids } argv = re.sub("[ \n]+", " ", argv.strip()) logfn("argv=%r" % argv) logfn("script=%r" % role.script) logfn("function=%r" % role.function) try: executeProcess( db, manifest, "filterhook", role.script, role.function, extension_id, filter_user_id, argv, configuration.extensions.LONG_TIMEOUT) except ProcessException as error: review = dbutils.Review.fromId(db, review_id) recipients = set([filter_user]) author = extension.getAuthor(db) if author is None: recipients.update(dbutils.User.withRole(db, "administrator")) else: recipients.add(author) body = """\ An error occurred while processing an extension hook filter event! Filter details: Extension: %(extension.title)s Filter hook: %(role.title)s Repository: %(repository.name)s Path: %(filter.path)s Data: %(filter.data)s Event details: Review: r/%(review.id)d "%(review.summary)s" Commits: %(commits)s Error details: Error: %(error.message)s Output:%(error.output)s -- critic""" commits = (gitutils.Commit.fromId(db, review.repository, commit_id) for commit_id in commit_ids) commits_text = "\n ".join( ('%s "%s"' % (commit.sha1[:8], commit.niceSummary()) for commit in commits)) if isinstance(error, ProcessFailure): error_output = "\n\n " + "\n ".join(error.stderr.splitlines()) else: error_output = " %s" % error.message body = body % { "extension.title": extension.getTitle(db), "role.title": role.title, "repository.name": review.repository.name, "filter.path": filter_path, "filter.data": htmlutils.jsify(filter_data), "review.id": review.id, "review.summary": review.summary, "commits": commits_text, "error.message": error.message, "error.output": error_output } mailutils.sendMessage( recipients=list(recipients), subject="Failed: " + role.title, body=body) ================================================ FILE: src/extensions/role/inject.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import urlparse import configuration from auth import AccessDenied from htmlutils import jsify from request import decodeURIComponent from textutils import json_decode, json_encode from extensions import getExtensionInstallPath from extensions.extension import Extension, ExtensionError from extensions.execute import ProcessTimeout, ProcessFailure, executeProcess from extensions.manifest import Manifest, ManifestError, InjectRole class InjectError(Exception): pass class InjectIgnored(Exception): pass def processLine(paths, line): try: command, value = line.split(" ", 1) except ValueError: raise InjectError("Invalid line in output: %r" % line) if command not in ("link", "script", "stylesheet", "preference"): raise InjectError("Invalid command: %r" % command) value = value.strip() try: value = json_decode(value) except ValueError: raise InjectError("Invalid JSON: %r" % value) def is_string(value): return isinstance(value, basestring) if command in ("script", "stylesheet") and not is_string(value): raise InjectError("Invalid value for %r: %r (expected string)" % (command, value)) elif command == "link": if isinstance(value, dict): if "label" not in value or not is_string(value["label"]): raise InjectError("Invalid value for %r: %r (expected attribute 'label' of type string)" % (command, value)) elif "url" not in value or not is_string(value["url"]) or value["url"] is None: raise InjectError("Invalid value for %r: %r (expected attribute 'url' of type string or null)" % (command, value)) # Alternatively support [label, url] (backwards compatibility). elif not isinstance(value, list) or len(value) != 2: raise InjectError("Invalid value for %r: %r (expected object { \"label\": LABEL, \"url\": URL })" % (command, value)) elif not is_string(value[0]): raise InjectError("Invalid value for %r: %r (expected string at array[0])" % (command, value)) elif not (is_string(value[1]) or value[1] is None): raise InjectError("Invalid value for %r: %r (expected string or null at array[1])" % (command, value)) else: value = { "label": value[0], "url": value[1] } elif command == "preference": if "config" not in paths: raise InjectError("Invalid command: %r only valid on /config page" % command) elif not isinstance(value, dict): raise InjectError("Invalid value for %r: %r (expected object)" % (command, value)) for name in ("url", "name", "type", "value", "default", "description"): if name not in value: raise InjectError("Invalid value for %r: %r (missing attribute %r)" % (command, value, name)) preference_url = value["url"] preference_name = value["name"] preference_type = value["type"] preference_value = value["value"] preference_default = value["default"] preference_description = value["description"] if not is_string(preference_url): raise InjectError("Invalid value for %r: %r (expected attribute 'url' of type string)" % (command, value)) elif not is_string(preference_name): raise InjectError("Invalid value for %r: %r (expected attribute 'name' of type string)" % (command, value)) elif not is_string(preference_description): raise InjectError("Invalid value for %r: %r (expected attribute 'description' of type string)" % (command, value)) if is_string(preference_type): if preference_type not in ("boolean", "integer", "string"): raise InjectError("Invalid value for %r: %r (unsupported preference type)" % (command, value)) if preference_type == "boolean": type_check = lambda value: isinstance(value, bool) elif preference_type == "integer": type_check = lambda value: isinstance(value, int) else: type_check = is_string if not type_check(preference_value): raise InjectError("Invalid value for %r: %r (type mismatch between 'value' and 'type')" % (command, value)) if not type_check(preference_default): raise InjectError("Invalid value for %r: %r (type mismatch between 'default' and 'type')" % (command, value)) else: if not isinstance(preference_type, list): raise InjectError("Invalid value for %r: %r (invalid 'type', expected string or array)" % (command, value)) for index, choice in enumerate(preference_type): if not isinstance(choice, dict) \ or not isinstance(choice.get("value"), basestring) \ or not isinstance(choice.get("title"), basestring): raise InjectError("Invalid value for %r: %r (invalid preference choice: %r)" % (command, value, choice)) choices = set([choice["value"] for choice in preference_type]) if not is_string(preference_value) or preference_value not in choices: raise InjectError("Invalid value for %r: %r ('value' not among valid choices)" % (command, value)) if not is_string(preference_default) or preference_default not in choices: raise InjectError("Invalid value for %r: %r ('default' not among valid choices)" % (command, value)) return (command, value) def execute(db, req, user, document, links, injected, profiler=None): cursor = db.cursor() installs = Extension.getInstalls(db, user) def get_matching_path(path_regexp): if re.match(path_regexp, req.path): return (req.path, req.query) elif re.match(path_regexp, req.original_path): return (req.original_path, req.original_query) else: return None, None query = None for extension_id, version_id, version_sha1, is_universal in installs: handlers = [] try: if version_id is not None: cursor.execute("""SELECT script, function, path FROM extensionroles JOIN extensioninjectroles ON (role=id) WHERE version=%s ORDER BY id ASC""", (version_id,)) for script, function, path_regexp in cursor: path, query = get_matching_path(path_regexp) if path is not None: handlers.append((path, query, script, function)) if not handlers: continue extension = Extension.fromId(db, extension_id) manifest = Manifest.load(getExtensionInstallPath(version_sha1)) else: extension = Extension.fromId(db, extension_id) manifest = Manifest.load(extension.getPath()) for role in manifest.roles: if isinstance(role, InjectRole): path, query = get_matching_path(role.regexp) if path is not None: handlers.append((path, query, role.script, role.function)) if not handlers: continue def construct_query(query): if not query: return "null" params = urlparse.parse_qs(query, keep_blank_values=True) for key in params: values = params[key] if len(values) == 1: if not values[0]: params[key] = None else: params[key] = values[0] return ("Object.freeze({ raw: %s, params: Object.freeze(%s) })" % (json_encode(query), json_encode(params))) preferences = None commands = [] for path, query, script, function in handlers: argv = "[%s, %s]" % (jsify(path), construct_query(query)) try: stdout_data = executeProcess( db, manifest, "inject", script, function, extension_id, user.id, argv, configuration.extensions.SHORT_TIMEOUT) except ProcessTimeout as error: raise InjectError(error.message) except ProcessFailure as error: if error.returncode < 0: raise InjectError("Process terminated by signal %d." % -error.returncode) else: raise InjectError("Process returned %d.\n%s" % (error.returncode, error.stderr)) except AccessDenied: raise InjectIgnored() for line in stdout_data.splitlines(): if line.strip(): commands.append(processLine(path, line.strip())) for command, value in commands: if command == "script": document.addExternalScript(value, use_static=False, order=1) elif command == "stylesheet": document.addExternalStylesheet(value, use_static=False, order=1) elif command == "link": for index, (_, label, _, _) in enumerate(links): if label == value["label"]: if value["url"] is None: del links[index] else: links[index][0] = value["url"] break else: if value["url"] is not None: links.append([value["url"], value["label"], None, None]) elif command == "preference": if not preferences: preferences = [] injected.setdefault("preferences", []).append( (extension.getName(), extension.getAuthor(db), preferences)) preferences.append(value) if profiler: profiler.check("inject: %s" % extension.getKey()) except ExtensionError as error: document.comment("\n\n[%s] Extension error:\nInvalid extension:\n%s\n\n" % (error.extension.getKey(), error.message)) except ManifestError as error: document.comment("\n\n[%s] Extension error:\nInvalid MANIFEST:\n%s\n\n" % (extension.getKey(), error.message)) except InjectError as error: document.comment("\n\n[%s] Extension error:\n%s\n\n" % (extension.getKey(), error.message)) except InjectIgnored: pass ================================================ FILE: src/extensions/role/page.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import re import configuration from htmlutils import jsify from request import decodeURIComponent from extensions import getExtensionInstallPath from extensions.extension import Extension, ExtensionError from extensions.execute import ProcessTimeout, ProcessFailure, executeProcess from extensions.manifest import Manifest, ManifestError, PageRole from extensions.utils import renderTutorial def execute(db, req, user): cursor = db.cursor() installs = Extension.getInstalls(db, user) argv = None stdin_data = None for extension_id, version_id, version_sha1, is_universal in installs: handlers = [] if version_id is not None: cursor.execute("""SELECT script, function, path FROM extensionroles JOIN extensionpageroles ON (role=id) WHERE version=%s ORDER BY id ASC""", (version_id,)) for script, function, path_regexp in cursor: if re.match(path_regexp, req.path): handlers.append((script, function)) if not handlers: continue extension_path = getExtensionInstallPath(version_sha1) manifest = Manifest.load(extension_path) else: try: extension = Extension.fromId(db, extension_id) except ExtensionError: # If the author/hosting user no longer exists, or the extension # directory no longer exists or is inaccessible, ignore the # extension. continue try: manifest = Manifest.load(extension.getPath()) except ManifestError: # If the MANIFEST is missing or invalid, we can't know whether # the extension has a page role handling the path, so assume it # doesn't and ignore it. continue for role in manifest.roles: if isinstance(role, PageRole) and re.match(role.regexp, req.path): handlers.append((role.script, role.function)) if not handlers: continue if argv is None: def param(raw): parts = raw.split("=", 1) if len(parts) == 1: return "%s: null" % jsify(decodeURIComponent(raw)) else: return "%s: %s" % (jsify(decodeURIComponent(parts[0])), jsify(decodeURIComponent(parts[1]))) if req.query: query = ("Object.freeze({ raw: %s, params: Object.freeze({ %s }) })" % (jsify(req.query), ", ".join(map(param, req.query.split("&"))))) else: query = "null" headers = ("Object.freeze({ %s })" % ", ".join(("%s: %s" % (jsify(name), jsify(value))) for name, value in req.getRequestHeaders().items())) argv = ("[%(method)s, %(path)s, %(query)s, %(headers)s]" % { 'method': jsify(req.method), 'path': jsify(req.path), 'query': query, 'headers': headers }) if req.method == "POST": if stdin_data is None: stdin_data = req.read() for script, function in handlers: before = time.time() try: stdout_data = executeProcess( db, manifest, "page", script, function, extension_id, user.id, argv, configuration.extensions.LONG_TIMEOUT, stdin=stdin_data) except ProcessTimeout as error: req.setStatus(500, "Extension Timeout") return error.message except ProcessFailure as error: req.setStatus(500, "Extension Failure") if error.returncode < 0: return ("Extension failure: terminated by signal %d\n" % -error.returncode) else: return ("Extension failure: returned %d\n%s" % (error.returncode, error.stderr)) after = time.time() status = None headers = {} if not stdout_data: return False while True: try: line, stdout_data = stdout_data.split("\n", 1) except: req.setStatus(500, "Extension Error") return "Extension error: output format error.\n%r\n" % stdout_data if status is None: try: status = int(line.strip()) except: req.setStatus(500, "Extension Error") return "Extension error: first line should contain only a numeric HTTP status code.\n%r\n" % line elif not line: break else: try: name, value = line.split(":", 1) except: req.setStatus(500, "Extension Error") return "Extension error: header line should be on 'name: value' format.\n%r\n" % line headers[name.strip()] = value.strip() if status is None: req.setStatus(500, "Extension Error") return "Extension error: first line should contain only a numeric HTTP status code.\n" content_type = "text/plain" for name, value in headers.items(): if name.lower() == "content-type": content_type = value del headers[name] else: headers[name] = value req.setStatus(status) req.setContentType(content_type) for name, value in headers.items(): req.addResponseHeader(name, value) if content_type == "text/tutorial": req.setContentType("text/html") return renderTutorial(db, user, stdout_data) if content_type.startswith("text/html"): stdout_data += "\n\n\n" % (after - before) return stdout_data return False ================================================ FILE: src/extensions/role/processcommits.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import configuration import gitutils import log.commitset import changeset.utils from extensions import getExtensionInstallPath from extensions.extension import Extension from extensions.execute import ProcessTimeout, ProcessFailure, executeProcess from extensions.manifest import Manifest, ManifestError, ProcessCommitsRole def execute(db, user, review, all_commits, old_head, new_head, output): cursor = db.cursor() installs = Extension.getInstalls(db, user) data = None for extension_id, version_id, version_sha1, is_universal in installs: handlers = [] extension = Extension.fromId(db, extension_id) if version_id is not None: cursor.execute("""SELECT script, function FROM extensionroles JOIN extensionprocesscommitsroles ON (role=id) WHERE version=%s ORDER BY id ASC""", (version_id,)) handlers.extend(cursor) if not handlers: continue extension_path = getExtensionInstallPath(version_sha1) manifest = Manifest.load(extension_path) else: manifest = Manifest.load(extension.getPath()) for role in manifest.roles: if isinstance(role, ProcessCommitsRole): handlers.append((role.script, role.function)) if not handlers: continue if data is None: commitset = log.commitset.CommitSet(all_commits) assert old_head is None or old_head in commitset.getTails() assert new_head in commitset.getHeads() assert len(commitset.getHeads()) == 1 tails = commitset.getFilteredTails(review.repository) if len(tails) == 1: tail = gitutils.Commit.fromSHA1(db, review.repository, tails.pop()) changeset_id = changeset.utils.createChangeset( db, user, review.repository, from_commit=tail, to_commit=new_head)[0].id changeset_arg = "repository.getChangeset(%d)" % changeset_id else: changeset_arg = "null" commits_arg = "[%s]" % ",".join( [("repository.getCommit(%d)" % commit.getId(db)) for commit in all_commits]) data = { "review_id": review.id, "changeset": changeset_arg, "commits": commits_arg } for script, function in handlers: class Error(Exception): pass def print_header(): header = "%s::%s()" % (script, function) print >>output, ("\n[%s] %s\n[%s] %s" % (extension.getName(), header, extension.getName(), "=" * len(header))) try: argv = """ (function () { var review = new critic.Review(%(review_id)d); var repository = review.repository; var changeset = %(changeset)s; var commitset = new critic.CommitSet(%(commits)s); return [review, changeset, commitset]; })() """ % data argv = re.sub("[ \n]+", " ", argv.strip()) try: stdout_data = executeProcess( db, manifest, "processcommits", script, function, extension_id, user.id, argv, configuration.extensions.SHORT_TIMEOUT) except ProcessTimeout as error: raise Error(error.message) except ProcessError as error: if error.returncode < 0: raise Error("Process terminated by signal %d." % -error.returncode) else: raise Error("Process returned %d.\n%s" % (error.returncode, error.stderr)) if stdout_data.strip(): print_header() for line in stdout_data.splitlines(): print >>output, "[%s] %s" % (extension.getName(), line) except Error as error: print_header() print >>output, "[%s] Extension error: %s" % (extension.getName(), error.message) ================================================ FILE: src/extensions/unittest.py ================================================ def independence(): # Simply check that extensions can be imported. This is run in a test # flagged as "local" since we want extensions to be possible to import in # standalone unit tests. # # Nothing in extensions can actually be used, of course, but that's not a # problem; the unit tests simply need to make sure not to depend on that. import extensions print "independence: ok" ================================================ FILE: src/extensions/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import htmlutils import page.utils import textformatting def renderTutorial(db, user, source): document = htmlutils.Document() document.addExternalStylesheet("resource/tutorial.css") document.addExternalScript("resource/tutorial.js") document.addInternalStylesheet("div.main table td.text { %s }" % user.getPreference(db, "style.tutorialFont")) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user) table = body.div("main").table("paleyellow", align="center") textformatting.renderFormatted(db, user, table, source.splitlines(), toc=True) return str(document) ================================================ FILE: src/gitutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import subprocess import re import time import atexit import os import traceback import threading import tempfile import shutil import stat import contextlib import base64 import base import configuration import textutils import htmlutils import communicate import diff.parse re_author_committer = re.compile("(.*) <(.*)> ([0-9]+ [-+][0-9]+)") re_sha1 = re.compile("^[A-Za-z0-9]{40}$") REPOSITORY_RELAYCOPY_DIR = os.path.join(configuration.paths.DATA_DIR, "relay") REPOSITORY_WORKCOPY_DIR = os.path.join(configuration.paths.DATA_DIR, "temporary") # Reference used to keep various commits alive. KEEPALIVE_REF_CHAIN = "refs/internal/keepalive-chain" KEEPALIVE_REF_PREFIX = "refs/keepalive/" # This is what an empty tree object hashes to. EMPTY_TREE_SHA1 = "4b825dc642cb6eb9a060e54bf8d69288fbee4904" def same_filesystem(pathA, pathB): return os.stat(pathA).st_dev == os.stat(pathB).st_dev def getGitEnvironment(author=True, committer=True): env = {} def name(parameter): if parameter is True: return "Critic System" elif isinstance(parameter, CommitUserTime): return parameter.name else: return parameter.fullname def email(parameter): if parameter is True or not parameter.email: return configuration.base.SYSTEM_USER_EMAIL else: return parameter.email if author: env["GIT_AUTHOR_NAME"] = name(author) env["GIT_AUTHOR_EMAIL"] = email(author) if committer: env["GIT_COMMITTER_NAME"] = name(committer) env["GIT_COMMITTER_EMAIL"] = email(committer) return env class GitError(base.Error): pass class GitReferenceError(GitError): """Exception raised on an invalid SHA-1s or refs.""" def __init__(self, message, sha1=None, ref=None, repository=None): super(GitReferenceError, self).__init__(message) self.sha1 = sha1 self.ref = ref self.repository = repository class GitCommandError(GitError): """Exception raised when a Git command fails.""" def __init__(self, cmdline, output, cwd): super(GitCommandError, self).__init__("'%s' failed: %s (in %s)" % (cmdline, output, cwd)) self.cmdline = cmdline self.output = output self.cwd = cwd class GitObject: def __init__(self, sha1, type, size, data): self.sha1 = sha1 self.type = type self.size = size self.data = data def __getitem__(self, index): if index == 0: return self.type elif index == 1: return self.size elif index == 2: return self.data raise IndexError("GitObject index out of range: %d" % index) class GitHttpBackendError(GitError): def __init__(self, returncode, stderr): message = "Git failed!" if returncode < 0: message = "Git terminated by signal %d!" % -returncode elif returncode > 0: message = "Git exited with status %d!" % returncode if stderr.strip(): message += "\n" + stderr super(GitHttpBackendError, self).__init__(message) self.returncode = returncode self.stderr = stderr class GitHttpBackendNeedsUser(GitError): pass class NoSuchRepository(base.Error): """Exception raised by Repository.fromName() for invalid names.""" def __init__(self, value): super(NoSuchRepository, self).__init__("No such repository: %s" % str(value)) self.value = value class Repository: class FromParameter: def __init__(self, db): self.db = db def __call__(self, value): return Repository.fromParameter(self.db, value) def __init__(self, db=None, repository_id=None, parent=None, name=None, path=None): assert path self.id = repository_id self.name = name self.path = path self.parent = parent self.__batch = None self.__batchCheck = None self.__cacheBlobs = False self.__cacheDisabled = False if db: self.__db = db db.atexit(self.__terminate) else: self.__db = None atexit.register(self.__terminate) def __str__(self): return self.path def getURL(self, db, user): return Repository.constructURL(db, user, self.path) @staticmethod def constructURL(db, user, path): path = os.path.relpath(path, configuration.paths.GIT_DIR) url_type = user.getPreference(db, "repository.urlType") if url_type == "git": url_format = "git://%s/%s" elif url_type in ("ssh", "host"): if url_type == "ssh": prefix = "ssh://%s" else: prefix = "%s:" url_format = prefix + os.path.join(configuration.paths.GIT_DIR, "%s") else: import dbutils url_prefix = dbutils.getURLPrefix(db, user) return "%s/%s" % (url_prefix, path) return url_format % (configuration.base.HOSTNAME, path) def enableBlobCache(self): assert self.__db self.__cacheBlobs = True def disableCache(self): self.__cacheDisabled = True def checkAccess(self, db, access_type): import auth assert access_type in ("read", "modify") auth.AccessControl.accessRepository(db, access_type, self) @staticmethod def fromId(db, repository_id, for_modify=False): if repository_id in db.storage["Repository"]: repository = db.storage["Repository"][repository_id] else: cursor = db.cursor() cursor.execute("SELECT parent, name, path FROM repositories WHERE id=%s", (repository_id,)) parent_id, name, path = cursor.fetchone() parent = None if parent_id is None else Repository.fromId(db, parent_id) repository = Repository(db, repository_id=repository_id, parent=parent, name=name, path=path) # Raises auth.AccessDenied if access should not be allowed. repository.checkAccess(db, "modify" if for_modify else "read") db.storage["Repository"][repository_id] = repository db.storage["Repository"][repository.name] = repository return repository @staticmethod def fromName(db, name, for_modify=False): if name in db.storage["Repository"]: return db.storage["Repository"][name] else: cursor = db.cursor() cursor.execute("SELECT id FROM repositories WHERE name=%s", (name,)) row = cursor.fetchone() if not row: return None repository_id, = row return Repository.fromId(db, repository_id, for_modify) @staticmethod def fromParameter(db, parameter): try: repository = Repository.fromId(db, int(parameter)) except: repository = Repository.fromName(db, parameter) if repository: return repository else: raise NoSuchRepository(parameter) @staticmethod def fromSHA1(db, sha1): cursor = db.cursor() cursor.execute("SELECT id FROM repositories ORDER BY id ASC") for (repository_id,) in cursor: repository = Repository.fromId(db, repository_id) if repository.iscommit(sha1): return repository raise GitReferenceError( "Couldn't find commit %s in any repository." % sha1, sha1=sha1) @staticmethod def fromPath(db, path, for_modify=False): cursor = db.cursor() cursor.execute("SELECT id FROM repositories WHERE path=%s", (path,)) for (repository_id,) in cursor: return Repository.fromId(db, repository_id, for_modify=for_modify) raise NoSuchRepository(path) @staticmethod def fromAPI(api_repository): return api_repository._impl.getInternal(api_repository.critic) def __terminate(self, db=None): self.stopBatch() def __startBatch(self): if self.__batch is None: self.__batch = subprocess.Popen( [configuration.executables.GIT, 'cat-file', '--batch'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.path) def __startBatchCheck(self): if self.__batchCheck is None: self.__batchCheck = subprocess.Popen( [configuration.executables.GIT, 'cat-file', '--batch-check'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.path) def stopBatch(self): if self.__batch: try: os.kill(self.__batch.pid, 9) except: pass try: self.__batch.wait() except: pass self.__batch = None if self.__batchCheck: try: os.kill(self.__batchCheck.pid, 9) except: pass try: self.__batchCheck.wait() except: pass self.__batchCheck = None @staticmethod def forEach(db, fn): for key, repository in db.storage["Repository"].items(): if isinstance(key, int): fn(db, repository) def getJS(self): return "var repository = critic.repository = new Repository(%d, %s, %s);" % (self.id, htmlutils.jsify(self.name), htmlutils.jsify(self.path)) def getModuleRepository(self, db, commit, path): tree = Tree.fromPath(commit, "/") source = self.fetch(tree[".gitmodules"].sha1).data lines = iter(source.splitlines()) for line in lines: if line == ('[submodule "%s"]' % path): break else: return None for line in lines: line = line.strip() if not line or line[0] == "#": continue elif line[0] == "[": return None key, value = map(str.strip, line.split("=")) if key == "url": path = os.path.abspath(os.path.join(self.path, value)) cursor = db.cursor() cursor.execute("SELECT id FROM repositories WHERE path=%s", (path,)) row = cursor.fetchone() if row: return Repository.fromId(db, row[0]) else: return None else: return None def fetch(self, sha1, fetchData=True): if self.__db: cache = self.__db.storage["Repository"] cached_object = cache.get("object:" + sha1) if cached_object: self.__db.recordProfiling("fetch: " + cached_object.type + " (cached)", 0) return cached_object before = time.time() if fetchData: self.__startBatch() stdin, stdout = self.__batch.stdin, self.__batch.stdout else: self.__startBatchCheck() stdin, stdout = self.__batchCheck.stdin, self.__batchCheck.stdout try: stdin.write(sha1 + '\n') except: raise GitError("failed when writing to 'git cat-file' stdin: %s" % stdout.read()) line = stdout.readline() if line == ("%s missing\n" % sha1): raise GitReferenceError("%s missing from %s" % (sha1[:8], self.path), sha1=sha1, repository=self) try: sha1, type, size = line.split() except: raise GitError("unexpected output from 'git cat-file --batch': %s" % line) size = int(size) if fetchData: data = stdout.read(size) stdout.read(1) else: data = None git_object = GitObject(sha1, type, size, data) after = time.time() if not self.__cacheDisabled and (type != "blob" or self.__cacheBlobs): cache["object:" + sha1] = git_object if self.__db: self.__db.recordProfiling("fetch: " + type, after - before) return git_object def run(self, command, *arguments, **kwargs): return self.runCustom(self.path, command, *arguments, **kwargs) def runCustom(self, cwd, command, *arguments, **kwargs): argv = [configuration.executables.GIT, command] argv.extend(arguments) stdin_data = kwargs.get("input") if stdin_data is None: stdin = None else: stdin = subprocess.PIPE env = {} env.update(os.environ) env.update(configuration.executables.GIT_ENV) env.update(kwargs.get("env", {})) if "GIT_DIR" in env: del env["GIT_DIR"] git = subprocess.Popen(argv, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) stdout, stderr = git.communicate(stdin_data) if kwargs.get("check_errors", True): if git.returncode == 0: if kwargs.get("include_stderr", False): return stdout + stderr else: return stdout else: cmdline = " ".join(argv) output = stderr.strip() raise GitCommandError(cmdline, output, cwd) else: return git.returncode, stdout, stderr def createBranch(self, name, startpoint): argv = [configuration.executables.GIT, 'branch', name, startpoint] git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = git.communicate() if git.returncode != 0: cmdline = " ".join(argv) output = stderr.strip() raise GitCommandError(cmdline, output, self.path) def deleteBranch(self, name): argv = [configuration.executables.GIT, 'branch', '-D', name] git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = git.communicate() if git.returncode != 0: cmdline = " ".join(argv) output = stderr.strip() raise GitCommandError(cmdline, output, self.path) def mergebase(self, commit_or_commits, db=None): if db and isinstance(commit_or_commits, Commit): cursor = db.cursor() cursor.execute("SELECT mergebase FROM mergebases WHERE commit=%s", (commit_or_commits.getId(db),)) try: return cursor.fetchone()[0] except: result = self.mergebase(commit_or_commits) cursor.execute("INSERT INTO mergebases (commit, mergebase) VALUES (%s, %s)", (commit_or_commits.getId(db), result)) return result try: sha1s = commit_or_commits.parents except: sha1s = map(str, commit_or_commits) assert len(sha1s) >= 2 argv = [configuration.executables.GIT, 'merge-base'] + sha1s git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = git.communicate() if git.returncode == 0: return stdout.strip() else: cmdline = " ".join(argv) output = stderr.strip() raise GitCommandError(cmdline, output, self.path) def getCommonAncestor(self, commit_or_commits): try: sha1s = commit_or_commits.parents except: sha1s = list(commit_or_commits) assert len(sha1s) >= 2 mergebases = [self.mergebase([sha1s[0], sha1]) for sha1 in sha1s[1:]] if len(mergebases) == 1: return mergebases[0] else: return self.getCommonAncestor(mergebases) def revparse(self, name): git = subprocess.Popen( [configuration.executables.GIT, 'rev-parse', '--verify', '--quiet', name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = git.communicate() if git.returncode == 0: return stdout.strip() else: raise GitReferenceError("'git rev-parse' failed: %s" % stderr.strip(), ref=name, repository=self) def revlist(self, included, excluded, *args, **kwargs): args = list(args) args.extend([str(ref) for ref in included]) args.extend(['^' + str(ref) for ref in excluded]) if "paths" in kwargs: args.append("--") args.extend(kwargs["paths"]) return self.run('rev-list', *args).splitlines() def iscommit(self, name): git = subprocess.Popen( [configuration.executables.GIT, 'cat-file', '-t', name], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.path) stdout, stderr = git.communicate() if git.returncode == 0: return stdout.strip() == "commit" else: return False def createref(self, name, value): assert name.startswith("refs/") self.run("update-ref", name, str(value), "0" * 40) def updateref(self, name, new_value, old_value=None): assert name.startswith("refs/") args = ["update-ref", name, str(new_value)] if old_value is not None: args.append(str(old_value)) self.run(*args) def deleteref(self, name, value=None): assert name.startswith("refs/") args = ["update-ref", "-d", name] if value is not None: args.append(str(value)) self.run(*args) def keepalive(self, commit): sha1 = str(commit) self.run("update-ref", KEEPALIVE_REF_PREFIX + sha1, sha1) return sha1 def packKeepaliveRefs(self): """ Pack the repository's keepalive refs into a single chain """ def splitRefs(output): return [(int(timestamp.split()[0]), sha1, timestamp) for sha1, _, timestamp in (line.partition(":") for line in output.splitlines()) # Skip the root commit, which has summary "Root". if len(sha1) == 40] loose_keepalive_refs = splitRefs( self.run("for-each-ref", "--sort=committerdate", "--format=%(objectname):%(committerdate:raw)", KEEPALIVE_REF_PREFIX)) if not loose_keepalive_refs: # No loose refs => no need to (re)pack. return try: old_value = self.revparse(KEEPALIVE_REF_CHAIN) except GitReferenceError: old_value = "0" * 40 packed_keepalive_refs = [] else: packed_keepalive_refs = splitRefs( self.run("log", "--first-parent", "--date=raw", "--format=%s:%cd", old_value)) keepalive_refs = sorted(packed_keepalive_refs + loose_keepalive_refs) env = getGitEnvironment() def withDates(env, timestamp): env["GIT_AUTHOR_DATE"] = timestamp env["GIT_COMMITTER_DATE"] = timestamp return env # Note: we don't keep the generated commits alive by updating refs while # doing this. Since commit-tree itself produces unreferenced objects, # it seems unlikely it will ever run an automatic GC, and if someone # else triggers a GC while we're working, and it prunes our objects, # then we'll fail, which is no big deal (we'd just leave the existing # keepalive refs unmodified.) # # Also note: in most cases, the repacked keepalive chain will end up # reusing the commit objects from the existing keepalive chain, since # all meta-data in the generated commits come from the commits that we # keep alive, and the order stable. try: processed = set() new_value = self.run( "commit-tree", EMPTY_TREE_SHA1, input="Root", env=withDates(env, keepalive_refs[0][2])).strip() for _, sha1, timestamp in keepalive_refs: if sha1 in processed: continue processed.add(sha1) new_value = self.run( "commit-tree", EMPTY_TREE_SHA1, "-p", new_value, "-p", sha1, input=sha1, env=withDates(env, timestamp)).strip() self.updateref(KEEPALIVE_REF_CHAIN, new_value, old_value) except GitCommandError: # No big deal if this fails here; this is just a maintenance # operation. We'll try again another day. return False for _, sha1, _ in loose_keepalive_refs: try: self.deleteref(KEEPALIVE_REF_PREFIX + sha1, sha1) except GitCommandError: # Ignore failures to delete loose keepalive refs. pass return True @contextlib.contextmanager def temporaryref(self, commit=None): if commit: sha1 = self.revparse(str(commit)) name = "refs/temporary/%s" % sha1 self.createref(name, sha1) else: sha1 = None name = "refs/temporary/%s-%s" % (time.strftime("%Y%m%d%H%M%S"), base64.b32encode(os.urandom(10))) try: yield name finally: self.deleteref(name, sha1) def __copy(self, identifier, flavor): base_args = ["clone", "--quiet"] if flavor == "relay": base_args.append("--bare") base_dir = REPOSITORY_RELAYCOPY_DIR else: assert flavor == "work" base_dir = REPOSITORY_WORKCOPY_DIR class Copy(object): def __init__(self, origin, path, name): self.origin = origin self.path = path self.name = name def run(self, *args, **kwargs): return self.origin.runCustom( os.path.join(self.path, self.name), *args, **kwargs) def __enter__(self): return self def __exit__(self, *args): shutil.rmtree(self.path) return False path = tempfile.mkdtemp(prefix="%s_%s_" % (self.name, identifier), dir=base_dir) name = os.path.basename(self.path) local_args = base_args[:] if not same_filesystem(self.path, path): local_args.append("--shared") local_args.extend([self.path, name]) fallback_args = base_args[:] fallback_args.extend(["file://" + os.path.abspath(self.path), name]) try: # Try cloning with --local (implied by using a plain path as the # repository URL.) This may fail due to inaccessible pack-*.keep # files in the repository. self.runCustom(path, *local_args) except GitCommandError: try: # Try cloning without --local (implied by using a file:// # repository URL.) This is slower and uses more disk space, but # is immune to the problems with inaccessible pack-*.keep files. self.runCustom(path, *fallback_args) except GitCommandError: shutil.rmtree(path) raise return Copy(self, path, name) def relaycopy(self, identifier): return self.__copy(identifier, "relay") def workcopy(self, identifier): return self.__copy(identifier, "work") def replaymerge(self, db, user, commit): with self.workcopy(commit.sha1) as workcopy: with self.temporaryref(commit) as ref_name: # Fetch the merge to replay from the main repository into the work copy. workcopy.run('fetch', 'origin', ref_name) parent_sha1s = commit.parents # Create and check out a branch at first parent. workcopy.run('checkout', '-b', 'replay', parent_sha1s[0]) # Then perform the merge with the other parents. returncode, stdout, stderr = workcopy.run("merge", *parent_sha1s[1:], env=getGitEnvironment(author=commit.author), check_errors=False) # If the merge produced conflicts, just stage and commit them: if returncode != 0: # Reset any submodule gitlinks with conflicts: since we don't # have the submodules checked out, "git commit --all" below # may fail to index them. for line in stdout.splitlines(): if line.startswith("CONFLICT (submodule):"): submodule_path = line.split()[-1] workcopy.run("reset", "--", submodule_path, check_errors=False) # Then stage and commit the result, with conflict markers and all. workcopy.run("commit", "--all", "--message=replay of merge that produced %s" % commit.sha1, env=getGitEnvironment(author=commit.author)) sha1 = workcopy.run("rev-parse", "HEAD").strip() # Then push the commit to the main repository. workcopy.run('push', 'origin', 'HEAD:refs/keepalive/' + sha1) commit = Commit.fromSHA1(db, self, sha1) # Finally, return the resulting commit. return commit def getSignificantBranches(self, db): """Return an iterator of "significant" branches A branch is considered significant if it is referenced by the repository's HEAD (if that's a symbolic ref) or if it is set up to track a remote branch.""" import dbutils head_branch = self.getHeadBranch(db) if head_branch: yield head_branch cursor = db.cursor() cursor.execute( """SELECT local_name FROM trackedbranches JOIN branches ON (branches.repository=trackedbranches.repository AND branches.name=trackedbranches.local_name) WHERE branches.repository=%s AND branches.type='normal' ORDER BY trackedbranches.id ASC""", (self.id,)) for (branch_name,) in cursor: if head_branch and head_branch.name == branch_name: continue yield dbutils.Branch.fromName(db, self, branch_name) def getDefaultRemote(self, db): cursor = db.cursor() cursor.execute("""SELECT remote FROM trackedbranches WHERE repository=%s AND local_name IN ('*', 'master') ORDER BY local_name LIMIT 1""", (self.id,)) row = cursor.fetchone() return row[0] if row else None def updateBranchFromRemote(self, db, remote, branch_name): cursor = db.cursor() cursor.execute("""SELECT 1 FROM trackedbranches WHERE repository=%s AND local_name=%s AND NOT disabled""", (self.id, branch_name)) if cursor.fetchone(): # Don't update a branch that the branch tracker service owns; # instead just assume it's already up-to-date. return if not branch_name.startswith("refs/"): branch_name = "refs/heads/%s" % branch_name with self.relaycopy("updateBranchFromRemote") as relay: try: relay.run("fetch", remote, branch_name) except GitCommandError as error: if error.output.startswith("fatal: Couldn't find remote ref "): raise GitReferenceError("Couldn't find ref %s in %s." % (branch_name, remote), ref=branch_name, repository=remote) raise relay.run("push", "-f", "origin", "FETCH_HEAD:%s" % branch_name) @contextlib.contextmanager def fetchTemporaryFromRemote(self, db, remote, ref): import index with self.temporaryref() as temporary_ref: try: self.run("fetch", remote, "%s:%s" % (ref, temporary_ref)) except GitCommandError as error: if error.output.startswith("fatal: Couldn't find remote ref "): raise GitReferenceError("Couldn't find ref %s in %s." % (ref, remote), ref=ref, repository=remote) elif error.output.startswith("fatal: Invalid refspec "): raise GitReferenceError("Invalid ref %r." % ref, ref=ref) raise sha1 = self.run("rev-parse", "--verify", temporary_ref).strip() index.processCommits(db, self, sha1) yield sha1 @staticmethod def readObject(repository_path, object_type, object_sha1): argv = [configuration.executables.GIT, 'cat-file', object_type, object_sha1] git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repository_path) stdout, stderr = git.communicate() if git.returncode != 0: raise GitCommandError(" ".join(argv), stderr.strip(), repository_path) return stdout @staticmethod def lsremote(remote, include_heads=False, include_tags=False, pattern=None, regexp=None): if regexp: name_check = lambda item: bool(regexp.match(item[1])) else: name_check = lambda item: True argv = [configuration.executables.GIT, 'ls-remote'] if include_heads: argv.append("--heads") if include_tags: argv.append("--tags") argv.append(remote) if pattern: argv.append(pattern) git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = git.communicate() if git.returncode == 0: return filter(name_check, (line.split() for line in stdout.splitlines())) else: cmdline = " ".join(argv) output = stderr.strip() cwd = os.getcwd() raise GitCommandError(cmdline, output, cwd) def findInterestingTag(self, db, sha1): cursor = db.cursor() cursor.execute("SELECT name FROM tags WHERE repository=%s AND sha1=%s", (self.id, sha1)) tags = [tag for (tag,) in cursor] try: from customization.filtertags import filterTags tags = filterTags(self, tags) except ImportError: pass if tags: return tags[0] else: return None def getHead(self, db): return Commit.fromSHA1(db, self, self.revparse("HEAD")) def getHeadBranch(self, db): """Return the branch that HEAD references None is returned if HEAD is not a symbolic ref or if it references a ref not under refs/heads/.""" import dbutils try: ref_name = self.run("symbolic-ref", "--quiet", "HEAD").strip() except GitCommandError: # HEAD is not a symbolic ref. pass else: if ref_name.startswith("refs/heads/"): branch_name = ref_name[len("refs/heads/"):] return dbutils.Branch.fromName(db, self, branch_name) def isEmpty(self): try: self.revparse("HEAD") return False except GitError: return True def invokeGitHttpBackend(self, req, user, path): request_environ = req.getEnvironment() environ = { "GIT_HTTP_EXPORT_ALL": "true", "REMOTE_ADDR": request_environ.get("REMOTE_ADDR", "unknown"), "PATH_TRANSLATED": os.path.join(self.path, path), "REQUEST_METHOD": req.method, "QUERY_STRING": req.query } if "CONTENT_TYPE" in request_environ: environ["CONTENT_TYPE"] = request_environ["CONTENT_TYPE"] for name, value in req.getEnvironment().items(): if name.startswith("HTTP_"): environ[name] = value if not user.isAnonymous(): environ["REMOTE_USER"] = user.name elif not configuration.base.ALLOW_ANONYMOUS_USER \ or path == "git-receive-pack" \ or req.getParameter("service", None) == "git-receive-pack": # The git-receive-pack service fails without a user, so request # authorization. raise GitHttpBackendNeedsUser git_http_backend = communicate.Communicate(subprocess.Popen( [configuration.executables.GIT, "http-backend"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environ)) def produceInput(): if req.method not in ("POST", "PUT"): return None else: data = req.read(65536) if not data: return None return data def handleHeaderLine(line): line = line.strip() if not line: req.start() git_http_backend.setCallbacks(stdout=handleOutput) return name, _, value = line.partition(":") name = name.strip() value = value.strip() if name.lower() == "status": status_code, _, status_text = value.partition(" ") req.setStatus(int(status_code), status_text.strip()) elif name.lower() == "content-type": req.setContentType(value) else: req.addResponseHeader(name, value) def handleOutput(data): req.write(data) git_http_backend.setInput(produceInput) git_http_backend.setCallbacks(stdout_line=handleHeaderLine) try: _, stderr = git_http_backend.run() except communicate.ProcessError as error: raise GitHttpBackendError(error.process.returncode, error.stderr) def describe(self, db, sha1): tag = self.findInterestingTag(db, sha1) if tag: return tag commit = Commit.fromSHA1(db, self, sha1) for branch in self.getSignificantBranches(db): if commit == branch.head_sha1: return "tip of " + branch.name elif commit.isAncestorOf(branch.head_sha1): return branch.name return None class CommitUserTime(object): def __init__(self, name, email, time): self.name = name self.email = email self.time = time def __getIds(self, db): cache = db.storage["CommitUserTime"] cache_key = (self.name, self.email) if cache_key not in cache: cursor = db.cursor() cursor.execute("""SELECT id FROM gitusers WHERE fullname=%s AND email=%s""", (self.name, self.email)) row = cursor.fetchone() if not row: cursor.execute("""INSERT INTO gitusers (fullname, email) VALUES (%s, %s) RETURNING id""", (self.name, self.email)) row = cursor.fetchone() gituser_id, = row cursor.execute("""SELECT uid FROM usergitemails WHERE email=%s""", (self.email,)) user_ids = frozenset(user_id for user_id, in cursor) cache[cache_key] = user_ids, gituser_id return cache.get(cache_key) def getUserIds(self, db): return self.__getIds(db)[0] def getGitUserId(self, db): return self.__getIds(db)[1] def getFullname(self, db): user_ids = self.getUserIds(db) if len(user_ids) == 1: import dbutils return dbutils.User.fromId(db, tuple(user_ids)[0]).fullname else: return self.name def __str__(self): timestamp = time.strftime("%Y-%m-%d %H:%M:%S", self.time) return "%s <%s> at %s" % (self.name, self.email, timestamp) @staticmethod def fromValue(value): match = re_author_committer.match(value) return CommitUserTime(textutils.decode(match.group(1)).encode("utf-8"), textutils.decode(match.group(2)).encode("utf-8"), time.gmtime(int(match.group(3).split(" ")[0]))) class Commit: def __init__(self, repository, id, sha1, parents, author, committer, message, tree): self.repository = repository self.id = id self.sha1 = sha1 self.parents = parents self.author = author self.committer = committer self.message = message self.tree = tree self.__treeCache = {} def __cache(self, db): cache = db.storage["Commit"] if self.id: cache[self.id] = self cache[self.sha1] = self @staticmethod def fromGitObject(db, repository, gitobject, commit_id=None): assert gitobject.type == "commit" data = gitobject.data parents = [] while True: line, data = data.split('\n', 1) if not line: break key, value = line.split(' ', 1) if key == 'tree': tree = value elif key == 'parent': parents.append(value) elif key == 'author': author = CommitUserTime.fromValue(value) elif key == 'committer': committer = CommitUserTime.fromValue(value) message = textutils.decode(data).encode("utf-8") commit = Commit(repository, commit_id, gitobject.sha1, parents, author, committer, message, tree) commit.__cache(db) return commit @staticmethod def fromSHA1(db, repository, sha1, commit_id=None): return Commit.fromGitObject(db, repository, repository.fetch(sha1), commit_id) @staticmethod def fromId(db, repository, commit_id): commit = db.storage["Commit"].get(commit_id) if not commit: cursor = db.cursor() cursor.execute("SELECT sha1 FROM commits WHERE id=%s", (commit_id,)) sha1 = cursor.fetchone()[0] commit = Commit.fromSHA1(db, repository, sha1, commit_id) return commit @staticmethod def fromAPI(api_commit): return Commit.fromSHA1(api_commit.critic.database, Repository.fromAPI(api_commit.repository), api_commit.sha1, api_commit.id) def __hash__(self): return hash(self.sha1) def __eq__(self, other): return self.sha1 == str(other) def __ne__(self, other): return self.sha1 != str(other) def __str__(self): return self.sha1 def __repr__(self): if self.id is None: return "Commit(sha1=%r)" % self.sha1 else: return "Commit(sha1=%r, id=%d)" % (self.sha1, self.id) def summary(self, maxlen=None): summary = self.message.split("\n", 1)[0].strip() if maxlen and len(summary) > maxlen: summary = summary[:maxlen - 3].strip() + "..." return summary def niceSummary(self, include_tag=True): try: summary, _, rest = self.message.partition("\n") if summary.startswith("fixup! ") or summary.startswith("squash! "): fixup_summary = rest.strip().partition("\n")[0].strip() if fixup_summary: what = summary[:summary.index("!")] if include_tag: return "[%s] %s" % (what, fixup_summary) else: return fixup_summary return summary except: return self.summary() def getId(self, db): if self.id is None: cursor = db.cursor() cursor.execute("SELECT id FROM commits WHERE sha1=%s", (self.sha1,)) self.id = cursor.fetchone()[0] self.__cache(db) return self.id def findInterestingTag(self, db): return self.repository.findInterestingTag(db, self.sha1) def describe(self, db): if db: tag = self.findInterestingTag(db) if tag: return tag return self.sha1[:8] def oneline(self, db, decorate=False): line = "%s %s" % (self.sha1[:8], self.niceSummary()) if decorate: decorations = [] if self == self.repository.getHead(db): decorations.append("HEAD") cursor = db.cursor() cursor.execute("""SELECT branches.name FROM branches JOIN reachable ON (reachable.branch=branches.id) JOIN commits ON (commits.id=reachable.commit) WHERE commits.sha1=%s""", (self.sha1,)) decorations.extend(branch for (branch,) in cursor) if decorations: line += " (%s)" % ", ".join(decorations) return line def isAncestorOf(self, other): if isinstance(other, Commit): if self.repository != other.repository: return False other_sha1 = other.sha1 else: other_sha1 = str(other) try: mergebase_sha1 = self.repository.mergebase([self.sha1, other_sha1]) except GitCommandError: # Merge-base fails if there is no common ancestor. And if two # commits have no common ancestor, neither can be an ancestor of the # other, obviously. return False else: return mergebase_sha1 == self.sha1 def getTree(self, path): path = "/" + path.lstrip("/") if path not in self.__treeCache: self.__treeCache[path] = Tree.fromPath(self, path) return self.__treeCache[path] def getFileEntry(self, path): tree = self.getTree(os.path.dirname(path)) if tree is None: return None return tree.get(os.path.basename(path)) def getFileSHA1(self, path): entry = self.getFileEntry(path) if entry is None: return None return entry.sha1 def isDirectory(self, path): return self.getTree(path) is not None RE_LSTREE_LINE = re.compile( "(?P[0-9]{6}) (?Pblob|tree|commit) (?P[0-9a-f]{40}) +" "(?P[0-9]+|-)\t(?P[\"']?)(?P.*)(?P=quote)$") class Tree: class Entry: class Mode(int): def __new__(cls, value): return super(Tree.Entry.Mode, cls).__new__(cls, int(value, 8)) def __str__(self): if stat.S_ISDIR(self): return "d---------" elif self == 0160000: return "m---------" else: if stat.S_ISLNK(self): string = "l" else: string = "-" flags = ["---", "--x", "-w-", "-wx", "r--", "r-x", "rw-", "rwx"] return string + flags[(self & 0700) >> 6] + flags[(self & 070) >> 3] + flags[self & 07] def __init__(self, name, mode, type, sha1, size): if len(name) > 2 and name[0] in ('"', "'") and name[-1] == name[0]: name = diff.parse.demunge(name[1:-1]) self.name = name self.mode = Tree.Entry.Mode(mode) self.type = type self.sha1 = sha1 self.size = size def __str__(self): return self.name def __repr__(self): return "[%s %s %s %s%s]" % (self.mode, self.type, self.name, self.sha1[:8], " %d" % self.size if self.size else "") def __init__(self, entries, commit=None): self.__entries_list = entries self.__entries_dict = dict([(entry.name, entry) for entry in entries]) def __getitem__(self, item): if type(item) == int: return self.__entries_list[item] else: return self.__entries_dict[str(item)] def __len__(self): return len(self.__entries_list) def __iter__(self): return iter(self.__entries_list) def keys(self): return self.__entries_dict.keys() def items(self): return self.__entries_dict.items() def values(self): return self.__entries_dict.values() def get(self, key, default=None): return self.__entries_dict.get(key, default) @staticmethod def fromPath(commit, path): assert path[0] == "/" if path == "/": what = commit.sha1 else: if path[-1] != "/": path += "/" what = "%s:%s" % (commit.sha1, path[1:]) entries = [] try: lstree_output = commit.repository.run("ls-tree", "-l", what) except GitCommandError as error: if error.output == "fatal: Not a valid object name %s" % what: return None raise for line in lstree_output.splitlines(): match = RE_LSTREE_LINE.match(line) assert match, "Unexpected output from 'git ls-tree': %r" % line name = match.group("name") if match.group("quote"): name = diff.parse.demunge(name) if match.group("type") == "blob": size = int(match.group("size")) else: size = None entries.append(Tree.Entry(name=name, mode=match.group("mode"), type=match.group("type"), sha1=match.group("sha1"), size=size)) return Tree(entries) @staticmethod def fromSHA1(repository, sha1): data = repository.fetch(sha1).data entries = [] while len(data): space = data.index(" ") null = data.index("\0", space + 1) mode = data[:space] name = data[space + 1:null] sha1_binary = data[null + 1:null + 21] sha1 = "".join([("%02x" % ord(c)) for c in sha1_binary]) entry_object = repository.fetch(sha1, fetchData=False) entries.append(Tree.Entry(name, mode, entry_object.type, sha1, entry_object.size)) data = data[null + 21:] return Tree(entries) def getTaggedCommit(repository, sha1): """Returns the SHA-1 of the tagged commit. If the supplied SHA-1 sum is a commit object, then it is returned, otherwise it must be a tag object, which is parsed to retrieve the tagged object SHA-1 sum.""" while True: git_object = repository.fetch(sha1) if git_object.type == "commit": return sha1 elif git_object.type != "tag": return sha1 = git_object.data.split("\n", 1)[0].split(" ", 1)[-1] class Blame: def __init__(self, from_commit, to_commit): assert from_commit.repository == to_commit.repository self.repository = from_commit.repository self.from_commit = from_commit self.to_commit = to_commit self.commits = [] self.__commit_ids = {} def blame(self, db, path, first_line, last_line): output = self.repository.run("blame", "--porcelain", "-L", "%d,%d" % (first_line, last_line), "%s..%s" % (self.from_commit.sha1, self.to_commit.sha1), "--", path) inlines = iter(output.splitlines()) lines = [] try: while True: sha1, original_line, current_line = inlines.next().split(" ")[:3] original_line = int(original_line) current_line = int(current_line) author = None author_email = None line = inlines.next() while not line.startswith("\t"): if line.startswith("author "): author = line[7:] elif line.startswith("author-mail "): author_email = line[13:-1] elif line.startswith("summary "): pass line = inlines.next() if sha1 not in self.__commit_ids: commit = Commit.fromSHA1(db, self.repository, sha1) self.__commit_ids[sha1] = len(self.commits) self.commits.append({ "sha1": sha1, "author_name": author, "author_email": author_email, "summary": commit.niceSummary(), "message": commit.message, "original": sha1 == self.from_commit.sha1, "current": sha1 == self.to_commit.sha1 }) lines.append({ "offset": current_line, "commit": self.__commit_ids[sha1] }) except StopIteration: pass return lines class FetchCommits(threading.Thread): def __init__(self, repository, sha1s): super(FetchCommits, self).__init__() self.repository = repository self.sha1s = sha1s self.gitobjects = [] self.commits = None self.error = None self.joined = False self.start() def run(self): try: batch = subprocess.Popen( [configuration.executables.GIT, 'cat-file', '--batch'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.repository.path) stdout, stderr = batch.communicate("\n".join(self.sha1s.keys()) + "\n") gitobjects = [] for sha1, commit_id in self.sha1s.items(): line, stdout = stdout.split("\n", 1) try: object_sha1, object_type, object_size = line.split(" ") except ValueError: raise SyntaxError("unexpected line: %r" % line) assert object_sha1 == sha1, "%s != %s" % (object_sha1, sha1) assert object_type == "commit" object_size = int(object_size) object_data = stdout[:object_size] stdout = stdout[object_size + 1:] gitobjects.append((GitObject(object_sha1, object_type, object_size, object_data), commit_id)) self.gitobjects = gitobjects except Exception: self.error = traceback.format_exc() def getCommits(self, db): self.join() for gitobject, commit_id in self.gitobjects: Commit.fromGitObject(db, self.repository, gitobject, commit_id) ================================================ FILE: src/gitutils_unittest.py ================================================ def keepalives(): # Run Repository.packKeepaliveRefs() and make sure it seems to do its job # correctly. Since it's run as a nightly maintenance task, it would # otherwise not be exercised by testing. import api import gitutils critic = api.critic.startSession(for_testing=True) for repository in api.repository.fetchAll(critic): # Fetch the "internal" repository object. This is a bit ugly, but we # can live with it in a test case. repository = repository._impl.getInternal(critic) # Make sure there's at least one loose keepalive ref. repository.keepalive(repository.revparse("HEAD")) loose_keepalive_refs_before = repository.run( "for-each-ref", "--format=%(objectname)", gitutils.KEEPALIVE_REF_PREFIX).splitlines() assert len(loose_keepalive_refs_before) > 0 repository.packKeepaliveRefs() loose_keepalive_refs_after = repository.run( "for-each-ref", "--format=%(objectname)", gitutils.KEEPALIVE_REF_PREFIX).splitlines() assert len(loose_keepalive_refs_after) == 0 chain_before = repository.revparse(gitutils.KEEPALIVE_REF_CHAIN) # Check that all previous loose keepalive refs are now ancestors of the # keepalive chain ref (IOW, are being kept alive by it.) for sha1 in loose_keepalive_refs_before: mergebase = repository.mergebase([sha1, chain_before]) assert mergebase == sha1 # Make sure there's a loose keepalive ref again. repository.keepalive(repository.revparse("HEAD")) repository.packKeepaliveRefs() chain_after = repository.revparse(gitutils.KEEPALIVE_REF_CHAIN) # Make sure the chain didn't change. assert chain_before == chain_after, ("%s != %s" % (chain_before, chain_after)) print "keepalives: ok" ================================================ FILE: src/hooks/pre-receive ================================================ #!/usr/bin/env python # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from sys import stdin, stdout, path, exit from os import getcwd, getuid, environ from pwd import getpwuid from socket import socket, AF_UNIX, SOCK_STREAM, SHUT_WR from subprocess import Popen, PIPE def gitconfig(name): process = Popen(["git", "config", name], stdout=PIPE) stdout, stderr = process.communicate() if process.returncode == 0: return stdout.strip() else: return None socket_path = gitconfig("critic.socket") repository_name = gitconfig("critic.name") if not socket_path or not repository_name: print """Repository is not configured properly! Please add [critic] \tsocket = \tname = to the repository's configuration file.""" exit(1) server_socket = socket(AF_UNIX, SOCK_STREAM) try: server_socket.connect(socket_path) except: print "Failed to connect to Critic's githook service!" exit(1) # Line 1: user name. data = getpwuid(getuid()).pw_name + "\n" # Line 2: $REMOTE_USER or empty string if undefined. This will only be used # if the actual user (line 1) is the Critic system user. data += environ.get("REMOTE_USER", "") + "\n" # Line 3: repository name. data += repository_name + "\n" # Line 4: flags from $CRITIC_FLAGS or empty string if undefined. data += environ.get("CRITIC_FLAGS", "") + "\n" # Line 5-N: input to the git hook. data += stdin.read() try: server_socket.sendall(data) server_socket.shutdown(SHUT_WR) except: print "Failed to send command to Critic!" exit(1) data = "" try: while True: received = server_socket.recv(4096) if not received: break data += received while "\n" in data and data != "ok\n": line_length = data.index("\n") + 1 line = data[:line_length] data = data[line_length:] stdout.write(line) stdout.flush() server_socket.close() except: print "Failed to read result from Critic!" exit(1) if data == "ok\n": exit(0) else: exit(1) ================================================ FILE: src/htmlutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import time import os import json import urllib import textutils from cStringIO import StringIO from linkify import ALL_LINKTYPES, Context fragments = [] for linktype in ALL_LINKTYPES: if linktype.fragment: fragments.append(linktype.fragment) re_linkify = re.compile("(?:^|\\b|(?=\\W))(" + "|".join(fragments) + ")([.,:;!?)]*(?:\\s|\\b|$))") re_simple = re.compile("^[^ \t\r\n&<>/=`'\"]+$") re_nonascii = re.compile("[^\t\n\r -\x7f]") re_control = re.compile("[\x01-\x1f\x7f]") def htmlify(text, attributeValue=False, pretty=False): if isinstance(text, unicode): text = re_nonascii.sub(lambda x: "&#%d;" % ord(x.group()), text.replace('&', '&').replace('<', '<').replace('>', '>')) else: text = str(text).replace('&', '&').replace('<', '<').replace('>', '>') if attributeValue: if not pretty and re_simple.match(text): return text elif "'" in text: if '"' not in text: text = '"' + text + '"' else: text = "'" + text.replace("'", ''') + "'" else: text = "'" + text + "'" text = re_control.sub(lambda match: "&#%d;" % ord(match.group()), text) return text def jsify(what, as_json=False): if what is None: return "null" elif isinstance(what, bool): return "true" if what else "false" elif isinstance(what, int) or isinstance(what, long): return str(what) else: what = textutils.decode(what) result = json.dumps(what) if not as_json: quote = result[0] return result.replace("]+>") def tabify(line, tabwidth=8, indenttabsmode=True): index = 0 length = len(line) column = 0 result = "" try: leading = True while index < length: tabindex = line.index("\t", index) nontabbed = line[index:tabindex] nontabbed_length = len(re_tag.sub("", nontabbed)) illegal = "" if leading: if nontabbed_length != 0: leading = False elif not indenttabsmode: illegal = " ill" width = tabwidth - (column + nontabbed_length) % tabwidth result += nontabbed + "" % (width, illegal) index = tabindex + 1 column = column + nontabbed_length + width except: result += line[index:] return result BLOCK_ELEMENTS = set(["html", "head", "body", "section", "table", "thead", "tbody", "tfoot", "tr", "td", "th", "div", "p", "ol", "li", "label", "select", "option", "link", "script"]) EMPTY_ELEMENTS = set(["br", "hr", "input", "link", "base", "col"]) def isBlockElement(name): return name in BLOCK_ELEMENTS def isEmptyElement(name): return name in EMPTY_ELEMENTS def mtime(path): try: return long(os.stat(path).st_mtime) except: raise def base36(n): s = "" while n: s = "0123456789abcdefghijklmnopqrstuvwxyz"[n % 36] + s n = n // 36 return s def getStaticResourceURI(name): import configuration uri = "/static-resource/" + name ts = mtime(os.path.join(configuration.paths.INSTALL_DIR, "resources", name)) if ts: uri += "?" + base36(ts) return uri class URL(object): def __init__(self, path, fragment=None, **query): assert path.startswith("/") assert "?" not in path assert "#" not in path self.value = path if query: self.value += "?" + urllib.urlencode( [(name, str(value)) for name, value in query.items()]) if fragment: self.value += "#" + fragment.lstrip("#") def __str__(self): return self.value class MetaInformation(object): def __init__(self): self.__orderIndices = set() self.__externalStylesheetList = [] self.__externalStylesheetSet = set() self.__internalStylesheetList = [] self.__internalStylesheetSet = set() self.__externalScriptList = [] self.__externalScriptSet = set() self.__internalScriptList = [] self.__links = {} self.__title = None self.__finished = False self.__request = None self.__base = "/" def addExternalStylesheet(self, uri, use_static=True, order=0): if use_static: uri = getStaticResourceURI(uri.split("/", 1)[1]) if uri not in self.__externalStylesheetSet: self.__orderIndices.add(order) self.__externalStylesheetList.append((order, uri)) self.__externalStylesheetSet.add(uri) def addInternalStylesheet(self, text, order=0): if text not in self.__internalStylesheetSet: self.__orderIndices.add(order) self.__internalStylesheetList.append((order, text)) self.__internalStylesheetSet.add(text) def addExternalScript(self, uri, use_static=True, order=0): if use_static: uri = getStaticResourceURI(uri.split("/", 1)[1]) if uri not in self.__externalScriptSet: self.__orderIndices.add(order) self.__externalScriptList.append((order, uri)) self.__externalScriptSet.add(uri) def addInternalScript(self, text, order=0): self.__orderIndices.add(order) self.__internalScriptList.append((order, text)) def hasTitle(self): return self.__title is not None def setTitle(self, title): self.__title = title def setLink(self, rel, href): return self.__links.setdefault(rel, href) def setBase(self, base): self.__base = base def setRequest(self, req): self.__request = req def getRequest(self): return self.__request def render(self, target): import configuration if not self.__finished: if self.__title: target.title().text(self.__title) if self.__base: target.base(href=self.__base) for rel, href in self.__links.items(): target.link(rel=rel, href=href) for index in sorted(self.__orderIndices): def filtered(items): return [data for order, data in items if order==index] for uri in filtered(self.__externalStylesheetList): target.link(rel="stylesheet", type="text/css", href=uri) for uri in filtered(self.__externalScriptList): target.script(type="text/javascript", src=uri) for text in filtered(self.__internalStylesheetList): target.style(type="text/css").text(text.strip(), cdata=True) for text in filtered(self.__internalScriptList): target.script(type="text/javascript").text(text.strip(), cdata=True) if configuration.debug.IS_DEVELOPMENT: favicon = "/static-resource/favicon-dev.png" else: favicon = "/static-resource/favicon.png" target.link(rel="icon", type="image/png", href=favicon) self.__finished = True class PausedRendering: pass class Fragment(object): def __init__(self, is_element=False, req=None): self.__children = [] self.__metaInformation = not is_element and MetaInformation() or None def appendChild(self, child): self.__children.append(child) return child def insertChild(self, child, offset=0): self.__children.insert(offset, child) return child def removeChild(self, child): assert child in self.__children self.__children.remove(child) def metaInformation(self): return self.__metaInformation def __len__(self): return len(self.__children) def __getitem__(self, index): return self.__children[index] def __str__(self): return "".join(map(str, self.__children)) def render(self, output, level=0, indent_before=True, stop=None, pretty=True): for child in self.__children: child.render(output, level, indent_before, stop=stop, pretty=pretty) if pretty: output.write("\n") def deleteChildren(self, count=None): if count is None: self.__children = [] else: del self.__children[:count] def hasChildren(self): return bool(self.__children) class Element(Fragment): def __init__(self, name): super(Element, self).__init__(True) self.__name = name self.__attributes = {} self.__empty = isEmptyElement(name) self.__preformatted = False self.__metaInformation = None self.__rendered = False self.__disabled = False def setAttribute(self, name, value): self.__attributes[name] = value def addClass(self, *names): classes = set(self.__attributes.get("class").split()) classes.update(names) self.setAttribute("class", " ".join(classes)) def setPreFormatted(self): self.__preformatted = True def setMetaInformation(self, metaInformation): self.__metaInformation = metaInformation def appendChild(self, child): assert not self.__empty return Fragment.appendChild(self, child) def remove(self): self.__disabled = True def removeIfEmpty(self): self.__disabled = not self.hasChildren() def __str__(self): attributes = "".join([(" %s=%s" % (name, htmlify(value, True))) for name, value in self.__attributes.items()]) if isEmptyElement(self.__name): return "<%s%s>" % (self.__name, attributes) else: return "<%s%s>%s" % (self.__name, attributes, Fragment.__str__(self), self.__name) def render(self, output, level=0, indent_before=True, stop=None, pretty=True): if self.__disabled: return if self.__metaInformation: self.__metaInformation.render(Generator(self, None)) if pretty: indent = " " * level else: indent = "" if indent_before: startindent = indent else: startindent = "" for child in self: if isinstance(child, Element) and isBlockElement(child.__name) or (isinstance(child, Text) or isinstance(child, Comment)) and '\n' in str(child): linebreak = "\n" endindent = indent break else: indent_before = False linebreak = "" endindent = "" if not pretty or self.__preformatted: child_level = 0 linebreak = "" endindent = "" else: child_level = level + 1 attributes = "".join([(" %s=%s" % (name, htmlify(value, True, pretty))) for name, value in self.__attributes.items()]) if self.__empty: if not self.__rendered: output.write("%s<%s%s>" % (startindent, self.__name, attributes)) self.__rendered = True else: if not self.__rendered: output.write("%s<%s%s>%s" % (startindent, self.__name, attributes, linebreak)) self.__rendered = True children_rendered = 0 for child in self: if self.__preformatted: child.setPreFormatted() try: child.render(output, child_level, indent_before, stop, pretty) output.write(linebreak) children_rendered += 1 except PausedRendering: self.deleteChildren(children_rendered) raise self.deleteChildren() if self == stop: raise PausedRendering else: output.write("%s" % (endindent, self.__name)) def empty(self): self.__empty = True class Text(object): def __init__(self, value, preformatted=False, cdata=False): if cdata: self.__value = value elif value is None: self.__value = " " else: self.__value = htmlify(value) self.__preformatted = preformatted def setPreFormatted(self): self.__preformatted = True def render(self, output, level=0, indent_before=True, stop=None, pretty=True): if pretty and level and not self.__preformatted and '\n' in self.__value: indent = " " * level if indent_before: startindent = indent else: startindent = "" output.write(startindent + ('\n' + indent).join([line for line in self.__value.strip().splitlines()])) else: output.write(self.__value) def __str__(self): return self.__value class Comment(object): def __init__(self, value): self.__value = value.replace("--", "- -") def setPreFormatted(self): pass def render(self, output, level=0, indent_before=True, stop=None, pretty=True): if pretty and level and '\n' in self.__value: indent = " " * level if indent_before: startindent = indent else: startindent = "" output.write(startindent + "") else: output.write("") def __str__(self): return self.__value class HTML(object): def __init__(self, value): self.__value = value def setPreFormatted(self): pass def render(self, output, level=0, indent_before=True, stop=None, pretty=True): output.write(self.__value) def __str__(self): return self.__value def safestr(value): try: return str(value) except: return unicode(value) class Generator(object): def __init__(self, target, metaInformation): self.__target = target self.__metaInformation = metaInformation def __enter__(self): return self def __exit__(self, *args): return False def __eq__(self, other): return other == self.__target def __open(self, __name, **attributes): target = self.__target.appendChild(Element(__name)) if "__generator__" in attributes: del attributes["__generator__"] generator = True else: generator = __name not in EMPTY_ELEMENTS for name, value in attributes.items(): if value is not None: target.setAttribute(name.strip("_").replace("_", "-"), safestr(value)) if not generator: return self else: return Generator(target, self.__metaInformation) def __getattr__(self, name): def open(*className, **attributes): assert len(className) == 0 or len(className) == 1 if className: return self.__open(name, _class=className[0], **attributes) else: return self.__open(name, **attributes) return open def head(self, **attributes): target = self.__target.appendChild(Element("head")) for name, value in attributes.items(): if value is not None: target.setAttribute(name.strip("_").replace("_", "-"), safestr(value)) target.setMetaInformation(self.__metaInformation) return Generator(target, self.__metaInformation) def append(self, fragment): if fragment is not None: if isinstance(fragment, Generator): self.__target.appendChild(fragment.__target) else: self.__target.appendChild(fragment) def remove(self): self.__target.remove() def removeIfEmpty(self): self.__target.removeIfEmpty() def text(self, value=None, preformatted=False, cdata=False, linkify=False, repository=None, escape=False): if linkify: assert not cdata if isinstance(linkify, Context): context = linkify else: context = Context(repository=repository) for linktype in ALL_LINKTYPES: if linktype.match(value): url = linktype.linkify(value, context) if url: self.a(href=url).text(value, escape=escape) break else: for word in re_linkify.split(value): if word: for linktype in ALL_LINKTYPES: if linktype.match(word): url = linktype.linkify(word, context) if url: self.a(href=url).text(word, escape=escape) break else: self.text(word, preformatted, escape=escape) else: if escape: value = textutils.escape(value) self.__target.appendChild(Text(value, preformatted, cdata)) return self def comment(self, value): self.__target.appendChild(Comment(safestr(value))) return self def commentFirst(self, value): self.__target.insertChild(Comment(safestr(value)), offset=0) return self def innerHTML(self, value=" "): self.__target.appendChild(HTML(safestr(value))) return self def setAttribute(self, name, value): self.__target.setAttribute(name, value) return self def addClass(self, *names): self.__target.addClass(*names) return self def render(self, output, level=0, stop=None, pretty=True): self.__target.render(output, level, stop=stop, pretty=pretty) def empty(self): self.__target.empty() return self def preformatted(self): self.__target.setPreFormatted() return self def addExternalStylesheet(self, uri, use_static=True, order=0): self.__metaInformation.addExternalStylesheet(uri, use_static, order=order) def addInternalStylesheet(self, text, order=0): self.__metaInformation.addInternalStylesheet(text, order=order) def addExternalScript(self, uri, use_static=True, order=0): self.__metaInformation.addExternalScript(uri, use_static, order=order) def addInternalScript(self, text, here=False, order=0): if here: self.script(type="text/javascript").text(text.strip().replace("") self.__doctype = False before = time.time() try: Generator.render(self, output, stop=stop, pretty=pretty) finished = True except PausedRendering: finished = False after = time.time() self.__rendering += after - before if not plain and finished: output.write("\n" % (self.__generation * 1000, self.__rendering * 1000)) self.__start = time.time() return output.getvalue() def __str__(self): return self.render() def stripStylesheet(text, compact): if compact: text = re.sub(r"/\*(?:[^*]|\*[^/])*\*/", "", text) text = re.sub(r"\s*([,:;{}])\s*", lambda m: m.group(1), text) text = re.sub(r"\s+", " ", text) return text if __name__ == "__main__": generator = Document() row = generator.html().body().table(border=1).tbody().tr() row.td(_class="left").div(id="foo").text("Column 1\nMore text") row.td(_class="right").text("text").comment("comment") print generator.render() ================================================ FILE: src/htmlutils_unittest.py ================================================ def independence(): # Simply check that htmlutils can be imported. import htmlutils print "independence: ok" ================================================ FILE: src/index.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys from subprocess import Popen as process, PIPE from re import compile, split from time import gmtime, strftime from pwd import getpwuid from os import getuid import gitutils from log.commitset import CommitSet import dbutils import reviewing.utils import reviewing.mail import reviewing.rebase import configuration import log.commitset import textutils if configuration.extensions.ENABLED: import extensions.role.processcommits try: from customization.githook import Reject, update except ImportError: class Reject(Exception): pass def update(_repository, _ref, _old, _new): pass def reflow(message): return textutils.reflow(message, line_length=80 - len("remote: ")) def timestamp(time): return strftime("%Y-%m-%d %H:%M:%S", time) class IndexException(Exception): pass def processCommits(db, repository, sha1): sha1 = repository.run("rev-parse", "--verify", "--quiet", sha1 + "^{commit}").strip() stack = [] edges_values = [] cursor = db.cursor() cursor.execute("SELECT 1 FROM commits LIMIT 1") emptydb = cursor.fetchone() is None cursor.execute("""SELECT commits.sha1 FROM commits JOIN branches ON (branches.head=commits.id) WHERE branches.repository=%s AND branches.type='normal' AND branches.base IS NULL ORDER BY branches.id ASC""", (repository.id,)) try: base_sha1 = cursor.fetchone()[0] count = int(repository.run("rev-list", "--count", "%s..%s" % (base_sha1, sha1)).strip()) except: count = 0 if count > configuration.limits.PUSH_COMMIT_LIMIT: raise IndexException("""\ You're trying to add %d new commits to this repository. Are you perhaps pushing to the wrong repository?""" % count) commits_values = [] commits = set() while True: if sha1 not in commits: commit = gitutils.Commit.fromSHA1(db, repository, sha1) if commit.author.email: author_id = commit.author.getGitUserId(db) else: author_id = 0 if commit.committer.email: committer_id = commit.committer.getGitUserId(db) else: committer_id = 0 if emptydb: row = None else: cursor.execute("SELECT id FROM commits WHERE sha1=%s", (commit.sha1,)) row = cursor.fetchone() new_commit = False if not row: commits_values.append((commit.sha1, author_id, committer_id, timestamp(commit.author.time), timestamp(commit.committer.time))) new_commit = True commits.add(sha1) if new_commit: edges_values.extend([(parent_sha1, commit.sha1) for parent_sha1 in set(commit.parents)]) stack.extend(set(commit.parents)) if not stack: break sha1 = stack.pop(0) cursor.executemany("""INSERT INTO commits (sha1, author_gituser, commit_gituser, author_time, commit_time) VALUES (%s, %s, %s, %s, %s)""", commits_values) cursor.executemany("""INSERT INTO edges (parent, child) SELECT parents.id, children.id FROM commits AS parents, commits AS children WHERE parents.sha1=%s AND children.sha1=%s""", edges_values) db.commit() def createBranches(db, user, repository, branches, flags): if len(branches) > 1: try: from customization.branches import compareBranchNames except ImportError: def compareBranchNames(name1, name2): name1 = split("([-_+])", name1) name2 = split("([-_+])", name2) for name1, name2 in map(None, name1, name2): if name1 is None: return -1 elif name2 is None: return 1 elif name1 != name2: try: return cmp(int(name1), int(name2)) except: return cmp(name1, name2) else: return 0 def compareBranches(branch1, branch2): name1, head1 = branch1 name2, head2 = branch2 # Same name ought not occur twice, but just to be on the safe side. if name1 == name2: return 0 # Special case for master. Mostly redundant, because it's quite # unlikely that master would be created along with other branches. elif name1 == "master": return -1 elif name2 == "master": return 1 # Try a natural ordering based on the relationships of the head # commits of the two branches, unless the heads are the same: if head1 != head2: base = repository.mergebase([head1, head2]) # If either head is an ancestor of the other head, merge-base # between them will be the ancestor head, and in that case, # process that branch first. Otherwise, then that would be # guaranteed to show up as empty, and that's probably not the # intention. if base == head1: return -1 elif base == head2: return 1 # Two non-taskbranch branches that seem "unrelated". Process them # ordered by name, mostly so that this comparison function is well- # behaved. return compareBranchNames(name1, name2) branches.sort(cmp=compareBranches) multiple = len(branches) > 1 for name, head in branches: createBranch(db, user, repository, name, head, multiple, flags) def createBranch(db, user, repository, name, head, multiple, flags): try: update(repository.path, "refs/heads/" + name, None, head) except Reject as rejected: raise IndexException(str(rejected)) except Exception: pass cursor = db.cursor() # Check if a branch with this name already "exists". branch = dbutils.Branch.fromName(db, repository, name, load_review=True) if branch is not None: if branch.archived: # This is a (review) branch that has been archived. It's expected # that Git thinks the user is creating a new branch. message = """\ This repository already contains a branch named '%s', but it has been archived, meaning it has been hidden from view to reduce the number of visible refs in this repository.""" % name if branch.review: message += """ To continue working on this branch, you need to first reopen the review that is associated with the branch. You can do this from the review's front-page: %s""" % branch.review.getURL(db, user, indent=2) raise IndexException(reflow(message)) else: # This is a branch that's not supposed to have been archived, # meaning it appears to have just gone missing from the repository. # Handle this the same way we handle updates where Git's idea of the # branches current value doesn't match what we think it should be. # # We can trigger that handling by calling updateBranch() with any # "wrong" old value. updateBranch(db, user, repository, name, "0" * 40, head, multiple, flags) return def commit_id(sha1): cursor.execute("SELECT id FROM commits WHERE sha1=%s", [sha1]) return cursor.fetchone()[0] components = name.split("/") for index in range(1, len(components)): try: repository.revparse("refs/heads/%s" % "/".join(components[:index])) except: continue message = ("Cannot create branch with name '%s' since there is already a branch named '%s' in the repository." % (name, "/".join(components[:index]))) raise IndexException(reflow(message)) if name.startswith("r/"): try: review_id = int(name[2:]) cursor.execute("SELECT branches.name FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE reviews.id=%s", (review_id,)) row = cursor.fetchone() message = "Refusing to create review named as a number." if row: message += "\nDid you mean to push to the branch '%s', perhaps?" % row[0] raise IndexException(message) except ValueError: pass if user.isSystem(): raise IndexException("Refusing to create review this way.") elif user.getPreference(db, "review.createViaPush"): the_commit = gitutils.Commit.fromSHA1(db, repository, head, commit_id(head)) all_commits = [the_commit] review = reviewing.utils.createReview( db, user, repository, all_commits, name, the_commit.niceSummary(include_tag=False), None, via_push=True) print "Submitted review:" print review.getURL(db, user, indent=2) if review.reviewers: print " Reviewers:" for reviewer in review.reviewers: print " %s <%s>" % (reviewer.fullname, reviewer.email) if review.watchers: print " Watchers:" for watcher in review.watchers: print " %s <%s>" % (watcher.fullname, watcher.email) if configuration.extensions.ENABLED: if extensions.role.processcommits.execute(db, user, review, all_commits, None, the_commit, sys.stdout): print print "Thank you!" return True else: raise IndexException("Refusing to create review; user preference 'review.createViaPush' is not enabled.") sha1 = head base = None tail = None cursor.execute("""SELECT 1 FROM reachable JOIN branches ON (branches.id=reachable.branch) JOIN repositories ON (repositories.id=branches.repository) WHERE repositories.id=%s LIMIT 1""", (repository.id,)) if cursor.fetchone(): def reachable(sha1): cursor.execute("""SELECT branches.id FROM branches JOIN reachable ON (reachable.branch=branches.id) JOIN commits ON (commits.id=reachable.commit) WHERE branches.repository=%s AND branches.type='normal' AND commits.sha1=%s ORDER BY reachable.branch ASC LIMIT 1""", (repository.id, sha1)) return cursor.fetchone() else: def reachable(sha1): return None commit_map = {} commit_list = [] row = reachable(sha1) if row: # Head of branch is reachable from an existing branch. Could be because # this branch is actually empty (just created with no "own" commits) or # it could have been merged into some other already existing branch. We # can't tell, so we just record it as empty. base = row[0] tail = sha1 else: stack = [] while True: if sha1 not in commit_map: commit = gitutils.Commit.fromSHA1(db, repository, sha1) commit_map[sha1] = commit commit_list.append(commit) for sha1 in commit.parents: if sha1 not in commit_map: row = reachable(sha1) if not row: stack.append(sha1) elif base is None: base = row[0] tail = sha1 base_chain = [base] while True: cursor.execute("SELECT base FROM branches WHERE id=%s", (base_chain[-1],)) next = cursor.fetchone()[0] if next is None: break else: base_chain.append(next) def reachable(sha1): cursor.execute("""SELECT 1 FROM reachable JOIN commits ON (commits.id=reachable.commit) WHERE reachable.branch=ANY (%s) AND commits.sha1=%s""", (base_chain, sha1)) return cursor.fetchone() if stack: sha1 = stack.pop(0) else: break if not base: cursor.execute("INSERT INTO branches (repository, name, head) VALUES (%s, %s, %s) RETURNING id", (repository.id, name, commit_id(head))) branch_id = cursor.fetchone()[0] else: cursor.execute("INSERT INTO branches (repository, name, head, base, tail) VALUES (%s, %s, %s, %s, %s) RETURNING id", (repository.id, name, commit_id(head), base, commit_id(tail))) branch_id = cursor.fetchone()[0] # Suppress the "user friendly" feedback if the push is performed by the # Critic system user, since there wouldn't be a human being reading it. if not user.isSystem(): cursor.execute("SELECT name FROM branches WHERE id=%s", [base]) print "Added branch based on %s containing %d commit%s:" % (cursor.fetchone()[0], len(commit_list), "s" if len(commit_list) > 1 else "") for url_prefix in user.getCriticURLs(db): print " %s/log?repository=%d&branch=%s" % (url_prefix, repository.id, name) if len(commit_list) > 1: print "To create a review of all %d commits:" % len(commit_list) else: print "To create a review of the commit:" for url_prefix in user.getCriticURLs(db): print " %s/createreview?repository=%d&branch=%s" % (url_prefix, repository.id, name) reachable_values = [(branch_id, commit.sha1) for commit in commit_list] cursor.executemany("INSERT INTO reachable (branch, commit) SELECT %s, id FROM commits WHERE sha1=%s", reachable_values) def updateBranch(db, user, repository, name, old, new, multiple, flags): try: update(repository.path, "refs/heads/" + name, old, new) except Reject as rejected: raise IndexException(str(rejected)) except Exception: pass try: branch = dbutils.Branch.fromName(db, repository, name, for_update=dbutils.NOWAIT) except dbutils.FailedToLock: raise IndexException(reflow( "The branch '%s' is currently locked since it is being updated " "by another push. Please fetch and try again." % name)) else: if not branch: # FIXME: We should handle this better. Maybe just redirect to # createBranch()? raise IndexException("The branch '%s' is not in the database!" % name) base_branch_id = branch.base.id if branch.base else None if branch.head_sha1 != old: if new == branch.head_sha1: # This is what we think the ref ought to be already. Do nothing, # and let the repository "catch up." return else: data = { "name": name, "old": old[:8], "new": new[:8], "current": branch.head_sha1[:8] } message = """CONFUSED! Git thinks %(name)s points to %(old)s, but Critic thinks it points to %(current)s. Rejecting push since it would only makes matters worse. To resolve this problem, use git push -f critic %(current)s:%(name)s to resynchronize the Git repository with Critic's database. Note that 'critic' above must be replaced by the actual name of your Critic remote, if not 'critic'.""" % data raise IndexException(textutils.reflow(message, line_length=80 - len("remote: "))) cursor = db.cursor() cursor.execute("""SELECT id, remote, remote_name, forced, updating FROM trackedbranches WHERE repository=%s AND local_name=%s AND NOT disabled""", (repository.id, name)) row = cursor.fetchone() if row: trackedbranch_id, remote, remote_name, forced, updating = row tracked_branch = "%s in %s" % (remote_name, remote) assert not forced or not name.startswith("r/") if not user.isSystem() \ or flags.get("trackedbranch_id") != str(trackedbranch_id): raise IndexException("""\ The branch '%s' is set up to track '%s' in %s Please don't push it manually to this repository.""" % (name, remote_name, remote)) assert updating if not name.startswith("r/"): conflicting = repository.revlist([branch.head_sha1], [new]) added = repository.revlist([new], [branch.head_sha1]) if conflicting: if forced: if branch.base is None: cursor.executemany("""DELETE FROM reachable WHERE branch=%s AND commit IN (SELECT id FROM commits WHERE sha1=%s)""", [(branch.id, sha1) for sha1 in conflicting]) else: print "Non-fast-forward update detected; deleting and recreating branch." deleteBranch(db, user, repository, branch.name, old) createBranches(db, user, repository, [(branch.name, new)], flags) return else: raise IndexException("""\ Rejecting non-fast-forward update of branch. To perform the update, you can delete the branch using git push critic :%s first, and then repeat this push.""" % name) cursor.executemany("""INSERT INTO reachable (branch, commit) SELECT %s, commits.id FROM commits WHERE sha1=%s""", [(branch.id, sha1) for sha1 in added]) new_head = gitutils.Commit.fromSHA1(db, repository, new) cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (new_head.getId(db), branch.id)) output = [] if conflicting: output.append("Pruned %d conflicting commits." % len(conflicting)) if added: output.append("Added %d new commits." % len(added)) if output: print "\n".join(output) return else: tracked_branch = False cursor.execute("SELECT id FROM reviews WHERE branch=%s", (branch.id,)) row = cursor.fetchone() is_review = bool(row) if is_review: if multiple: raise IndexException("""\ Refusing to update review in push of multiple refs. Please push one review branch at a time.""") review_id = row[0] cursor.execute("""SELECT id, old_head, old_upstream, new_upstream, uid, branch FROM reviewrebases WHERE review=%s AND new_head IS NULL""", (review_id,)) row = cursor.fetchone() if row: if tracked_branch: raise IndexException("Refusing to perform a review rebase via an automatic update.") rebase_id, old_head_id, old_upstream_id, new_upstream_id, rebaser_id, onto_branch = row review = dbutils.Review.fromId(db, review_id) rebaser = dbutils.User.fromId(db, rebaser_id) if rebaser.id != user.id: if user.isSystem(): user = rebaser else: raise IndexException("""\ This review is currently being rebased by %s <%s> and can't be otherwise updated right now.""" % (rebaser.fullname, rebaser.email)) old_head = gitutils.Commit.fromId(db, repository, old_head_id) old_commitset = log.commitset.CommitSet(review.branch.getCommits(db)) if old_head.sha1 != old: raise IndexException("""\ Unexpected error. The branch appears to have been updated since your rebase was prepared. You need to cancel the rebase via the review front-page and then try again, and/or report a bug about this error.""") if old_upstream_id is not None: new_head = gitutils.Commit.fromSHA1(db, repository, new) old_upstream = gitutils.Commit.fromId(db, repository, old_upstream_id) if new_upstream_id is not None: new_upstream = gitutils.Commit.fromId(db, repository, new_upstream_id) else: if len(new_head.parents) != 1: raise IndexException("Invalid rebase: New head can't be a merge commit.") new_upstream = gitutils.Commit.fromSHA1(db, repository, new_head.parents[0]) if new_upstream in old_commitset.getTails(): old_upstream = new_upstream = None else: old_upstream = None if old_upstream: unrelated_move = False if not new_upstream.isAncestorOf(new): raise IndexException("""\ Invalid rebase: The new upstream commit you specified when the rebase was prepared is not an ancestor of the commit now pushed. You may want to cancel the rebase via the review front-page, and prepare another one specifying the correct new upstream commit; or rebase the branch onto the new upstream specified and then push that instead.""") if not old_upstream.isAncestorOf(new_upstream): unrelated_move = True equivalent_merge = replayed_rebase = None if unrelated_move: replayed_rebase = reviewing.rebase.replayRebase( db, review, user, old_head, old_upstream, new_head, new_upstream, onto_branch) else: equivalent_merge = reviewing.rebase.createEquivalentMergeCommit( db, review, user, old_head, old_upstream, new_head, new_upstream, onto_branch) new_sha1s = repository.revlist([new_head.sha1], [new_upstream.sha1], '--topo-order') rebased_commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in new_sha1s] reachable_values = [(review.branch.id, sha1) for sha1 in new_sha1s] pending_mails = [] recipients = review.getRecipients(db) for to_user in recipients: pending_mails.extend(reviewing.mail.sendReviewRebased( db, user, to_user, recipients, review, new_upstream, rebased_commits, onto_branch)) print "Rebase performed." review.setPerformedRebase(old_head, new_head, old_upstream, new_upstream, user, equivalent_merge, replayed_rebase) if unrelated_move: reviewing.utils.addCommitsToReview( db, user, review, [replayed_rebase], pending_mails=pending_mails, silent_if_empty=set([replayed_rebase]), replayed_rebases={ replayed_rebase: new_head }) repository.keepalive(old_head) repository.keepalive(replayed_rebase) cursor.execute("""UPDATE reviewrebases SET replayed_rebase=%s WHERE id=%s""", (replayed_rebase.getId(db), rebase_id)) else: reviewing.utils.addCommitsToReview( db, user, review, [equivalent_merge], pending_mails=pending_mails, silent_if_empty=set([equivalent_merge]), full_merges=set([equivalent_merge])) repository.keepalive(equivalent_merge) cursor.execute("""UPDATE reviewrebases SET equivalent_merge=%s WHERE id=%s""", (equivalent_merge.getId(db), rebase_id)) cursor.execute("""UPDATE reviewrebases SET new_head=%s, new_upstream=%s WHERE id=%s""", (new_head.getId(db), new_upstream.getId(db), rebase_id)) cursor.execute("""INSERT INTO previousreachable (rebase, commit) SELECT %s, commit FROM reachable WHERE branch=%s""", (rebase_id, review.branch.id)) cursor.execute("DELETE FROM reachable WHERE branch=%s", (review.branch.id,)) cursor.executemany("""INSERT INTO reachable (branch, commit) SELECT %s, commits.id FROM commits WHERE commits.sha1=%s""", reachable_values) cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (new_head.getId(db), review.branch.id)) else: old_commitset = log.commitset.CommitSet(review.branch.getCommits(db)) new_sha1s = repository.revlist([new], old_commitset.getTails(), '--topo-order') if old_head.sha1 in new_sha1s: raise IndexException("""\ Invalid history rewrite: Old head of the branch reachable from the pushed ref; no history rewrite performed. (Cancel the rebase via the review front-page if you've changed your mind.)""") for new_sha1 in new_sha1s: new_head = gitutils.Commit.fromSHA1(db, repository, new_sha1) if new_head.tree == old_head.tree: break else: raise IndexException("""\ Invalid history rewrite: The rebase introduced unexpected code changes. Use git diff between the review branch in Critic's repository and the rebased local branch to see what those changes are.""") rebased_commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in repository.revlist([new_head], old_commitset.getTails(), '--topo-order')] new_commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in repository.revlist([new], [new_head], '--topo-order')] reachable_values = [(review.branch.id, sha1) for sha1 in new_sha1s] pending_mails = [] recipients = review.getRecipients(db) for to_user in recipients: pending_mails.extend(reviewing.mail.sendReviewRebased(db, user, to_user, recipients, review, None, rebased_commits)) print "History rewrite performed." if new_commits: reviewing.utils.addCommitsToReview(db, user, review, new_commits, pending_mails=pending_mails) else: reviewing.mail.sendPendingMails(pending_mails) cursor.execute("""UPDATE reviewrebases SET new_head=%s WHERE id=%s""", (new_head.getId(db), rebase_id)) cursor.execute("""INSERT INTO previousreachable (rebase, commit) SELECT %s, commit FROM reachable WHERE branch=%s""", (rebase_id, review.branch.id)) cursor.execute("DELETE FROM reachable WHERE branch=%s", (review.branch.id,)) cursor.executemany("""INSERT INTO reachable (branch, commit) SELECT %s, commits.id FROM commits WHERE commits.sha1=%s""", reachable_values) cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (gitutils.Commit.fromSHA1(db, repository, new).getId(db), review.branch.id)) repository.keepalive(old) review.incrementSerial(db) return True elif old != repository.mergebase([old, new]): raise IndexException("Rejecting non-fast-forward update of review branch.") elif old != repository.mergebase([old, new]): raise IndexException("""\ Rejecting non-fast-forward update of branch. To perform the update, you can delete the branch using git push critic :%s first, and then repeat this push.""" % name) cursor.execute("SELECT id FROM branches WHERE repository=%s AND base IS NULL ORDER BY id ASC LIMIT 1", (repository.id,)) root_branch_id = cursor.fetchone()[0] def isreachable(sha1): if is_review and sha1 == branch.tail_sha1: return True if base_branch_id: cursor.execute("""SELECT 1 FROM commits JOIN reachable ON (reachable.commit=commits.id) WHERE commits.sha1=%s AND reachable.branch IN (%s, %s, %s)""", (sha1, branch.id, base_branch_id, root_branch_id)) else: cursor.execute("""SELECT 1 FROM commits JOIN reachable ON (reachable.commit=commits.id) WHERE commits.sha1=%s AND reachable.branch IN (%s, %s)""", (sha1, branch.id, root_branch_id)) return cursor.fetchone() is not None stack = [new] commits = set() commit_list = [] processed = set() while stack: sha1 = stack.pop() if sha1 not in commits and not isreachable(sha1): commits.add(sha1) commit_list.append(sha1) stack.extend([parent_sha1 for parent_sha1 in gitutils.Commit.fromSHA1(db, repository, sha1).parents if parent_sha1 not in processed]) processed.add(sha1) branch = dbutils.Branch.fromName(db, repository, name) review = dbutils.Review.fromBranch(db, branch) if review: if review.state != "open": raise IndexException("""\ The review is closed and can't be extended. You need to reopen it at %s before you can add commits to it.""" % review.getURL(db, user, 2)) all_commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in reversed(commit_list)] tails = CommitSet(all_commits).getTails() if old not in tails: raise IndexException("""\ Push rejected; would break the review. It looks like some of the pushed commits are reachable from the repository's main branch, and thus consequently the commits currently included in the review are too. Perhaps you should request a new review of the follow-up commits?""") reviewing.utils.addCommitsToReview(db, user, review, all_commits, commitset=commits, tracked_branch=tracked_branch) reachable_values = [(branch.id, sha1) for sha1 in reversed(commit_list) if sha1 in commits] cursor.executemany("INSERT INTO reachable (branch, commit) SELECT %s, commits.id FROM commits WHERE commits.sha1=%s", reachable_values) cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (gitutils.Commit.fromSHA1(db, repository, new).getId(db), branch.id)) db.commit() if configuration.extensions.ENABLED and review: extensions.role.processcommits.execute(db, user, review, all_commits, gitutils.Commit.fromSHA1(db, repository, old), gitutils.Commit.fromSHA1(db, repository, new), sys.stdout) def deleteBranch(db, user, repository, name, old): try: update(repository.path, "refs/heads/" + name, old, None) except Reject as rejected: raise IndexException(str(rejected)) except Exception: pass branch = dbutils.Branch.fromName(db, repository, name) if branch: review = dbutils.Review.fromBranch(db, branch) if review: raise IndexException("This is Critic refusing to delete a branch that belongs to a review.") cursor = db.cursor() cursor.execute("SELECT COUNT(*) FROM reachable WHERE branch=%s", (branch.id,)) ncommits = cursor.fetchone()[0] if branch.base: cursor.execute("UPDATE branches SET base=%s WHERE base=%s", (branch.base.id, branch.id)) cursor.execute("DELETE FROM branches WHERE id=%s", (branch.id,)) # Suppress the "user friendly" feedback if the push is performed by the # Critic system user, since there wouldn't be a human being reading it. if not user.isSystem(): print "Deleted branch containing %d commit%s." % (ncommits, "s" if ncommits > 1 else "") def createTag(db, user, repository, name, sha1): sha1 = gitutils.getTaggedCommit(repository, sha1) if sha1: cursor = db.cursor() cursor.execute("INSERT INTO tags (name, repository, sha1) VALUES (%s, %s, %s)", (name, repository.id, sha1)) def updateTag(db, user, repository, name, old_sha1, new_sha1): sha1 = gitutils.getTaggedCommit(repository, new_sha1) cursor = db.cursor() if sha1: cursor.execute("UPDATE tags SET sha1=%s WHERE name=%s AND repository=%s", (sha1, name, repository.id)) else: cursor.execute("DELETE FROM tags WHERE name=%s AND repository=%s", (name, repository.id)) def deleteTag(db, user, repository, name): cursor = db.cursor() cursor.execute("DELETE FROM tags WHERE name=%s AND repository=%s", (name, repository.id)) ================================================ FILE: src/inpututils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os # Try to import the readline module to augment raw_input(), used below, which # automatically uses readline for line editing if it has been loaded. We don't # really care if it fails; that just means raw_input() is a bit dumber. try: import readline except: pass __doc__ = "Helper functions for prompting for and reading input." def apply_check(check, input): result = check(input) if result is None: return True elif result is True: print "Invalid input." print else: print "Invalid input: %s." % result print return False def yes_or_no(prompt, default=None): prompt = "%s [%s/%s] " % (prompt, "Y" if default is True else "y", "N" if default is False else "n") while True: try: input = raw_input(prompt) except KeyboardInterrupt: print raise if input.lower() in ("y", "yes"): return True elif input.lower() in ("n", "no"): return False elif input or default is None: print "Please answer 'y'/'yes' or 'n'/'no'." print else: return default def string(prompt, default=None, check=None): prompt = "%s%s " % (prompt, (" [%s]" % default) if default is not None else "") while True: try: input = raw_input(prompt) except KeyboardInterrupt: print raise if default and not input: input = default if check: if apply_check(check, input): return input elif not input: print "Invalid input: empty." else: return input def password(prompt, default=None, twice=True): import termios prompt = "%s%s " % (prompt, " [****]" if default is not None else "") def internal(prompt): if os.isatty(sys.stdin.fileno()): old = termios.tcgetattr(sys.stdin) new = old[:] new[3] = new[3] & ~termios.ECHO try: termios.tcsetattr(sys.stdin, termios.TCSADRAIN, new) try: password = raw_input(prompt) except KeyboardInterrupt: print raise finally: termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old) else: password = sys.stdin.readline().rstrip("\n") print if default and not password: return default else: return password while True: password = internal(prompt) if twice: andagain = internal("And again: ") if password == andagain: return password else: print print "Passwords differ. Please try again." print else: return password ================================================ FILE: src/jsonapi/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import contextlib import itertools import re import api import auth import request import textutils class Error(Exception): pass class PathError(Error): """Raised for valid paths that don't match a resource Results in a 404 "Not Found" response. Note: A "valid" path is one that could have returned a resource, had the system's dynamic state (database + repositories) been different.""" http_status = 404 title = "No such resource" class UsageError(Error): """Raised for invalid paths and/or query parameters Results in a 400 "Bad Request" response. Note: An "invalid" path is one that could never (in this version of Critic) return any other response, regardless of the system's dynamic state (database + repositories.)""" http_status = 400 title = "Invalid API request" class InputError(Error): http_status = 400 title = "Invalid API input" class PermissionDenied(Error): http_status = 403 title = "Permission denied" class ResultDelayed(Error): http_status = 202 title = "Resource temporarily unavailable" class InternalRedirect(Exception): def __init__(self, resource_path, subresource_path=None, value=None, values=None): self.resource_path = resource_path self.subresource_path = subresource_path or [] self.value = value self.values = values class ResourceSkipped(Exception): """Raised by a resource class's json() to skip the resource The message should explain why it was skipped, which may be sent to the client in a "404 Not Found" response.""" pass SPECIAL_QUERY_PARAMETERS = frozenset(["fields", "include", "debug"]) def _process_fields(value): fields = set() for field in value.split(","): fields.add(field) # For fields on the form 'a.b.c', add the prefixes 'a' and 'a.b' as # well, so that one can request inclusion of fields in sub-objects # without having to explicitly request the sub-object be included. while True: field, _, _ = field.rpartition(".") if not field: break fields.add(field) return fields class Parameters(object): def __init__(self, critic, req): self.critic = critic self.req = req self.debug = req.getParameter( "debug", set(), filter=lambda value: set(value.split(","))) self.fields = req.getParameter( "fields", set(), filter=_process_fields) self.fields_per_type = {} self.__query_parameters = { name: value for name, value in req.getParameters().items() if name not in SPECIAL_QUERY_PARAMETERS } self.__resource_name = None self.range_accessed = False self.context = {} self.subresource_path = [] self.output_format = self.__query_parameters.get( "output_format", "default") def __prepareType(self, resource_type): if resource_type not in self.fields_per_type: self.fields_per_type[resource_type] = self.req.getParameter( "fields[%s]" % resource_type, self.fields, filter=_process_fields) return self.fields_per_type[resource_type] def hasField(self, resource_type, key): fields = self.__prepareType(resource_type) return not fields or key in fields def filtered(self, resource_type, resource_json): fields = self.__prepareType(resource_type) if fields: def filter_json(prefix, key, json): if isinstance(json, dict): if key: prefix += key + "." return { key: filter_json(prefix, key, value) for key, value in json.items() if prefix + key in fields } else: return json return filter_json("", "", resource_json) return resource_json @contextlib.contextmanager def forResource(self, resource): assert self.__resource_name is None self.__resource_name = resource.name yield self.__resource_name = None def getQueryParameter(self, name, converter=None, exceptions=()): if self.__resource_name: value = self.__query_parameters.get( "%s[%s]" % (name, self.__resource_name)) else: value = None if value is None: value = self.__query_parameters.get(name) if value is not None and converter: try: value = converter(value) except exceptions: raise UsageError("Invalid %s parameter: %r" % (name, value)) return value def getRange(self): self.range_accessed = True offset = self.getQueryParameter( "offset", converter=int, exceptions=ValueError) if offset is not None: if offset < 0: raise UsageError("Invalid offset parameter: %r" % offset) count = self.getQueryParameter( "count", converter=int, exceptions=ValueError) if count is not None: if count < 1: raise UsageError("Invalid count parameter: %r" % count) if offset and count: return offset, offset + count return offset, count def setContext(self, key, value): if key in self.context: existing = self.context[key] if existing is None or existing != value: self.context[key] = None else: self.context[key] = value class Linked(object): def __init__(self, req=None): if req is not None: include = req.getParameter( "include", [], filter=lambda value: value.split(",")) self.linked_per_type = { resource_type: set() for resource_type in include } def __getitem__(self, resource_type): return self.linked_per_type[resource_type] def __setitem__(self, resource_type, value): self.linked_per_type[resource_type] = value def isEmpty(self): return not any(self.linked_per_type.values()) def add(self, resource_path, *values): resource_class = lookup(resource_path) assert all(isinstance(value, resource_class.value_class) for value in values) linked = self.linked_per_type.get(resource_class.name) if linked is not None: linked.update(values) return resource_class def filter_referenced(self, json): if isinstance(json, dict): return { key: self.filter_referenced(value) for key, value in json.items() } elif isinstance(json, list): return [self.filter_referenced(value) for value in json] elif type(json) in VALUE_CLASSES: resource_path = VALUE_CLASSES[type(json)] resource_class = self.add(resource_path, json) return resource_class.resource_id(json) else: return json def copy(self): linked = Linked() linked.linked_per_type = { resource_type: set(linked_objects) for resource_type, linked_objects in self.linked_per_type.items() } return linked HANDLERS = {} VALUE_CLASSES = {} def registerHandler(path, resource_class): HANDLERS[path] = resource_class if not path.startswith("..."): if isinstance(resource_class.value_class, tuple): for value_class in resource_class.value_class: VALUE_CLASSES[value_class] = path else: VALUE_CLASSES[resource_class.value_class] = path def PrimaryResource(resource_class): assert hasattr(resource_class, "name") assert hasattr(resource_class, "value_class") for name in ("single", "multiple", "create", "update", "delete"): if not hasattr(resource_class, name): setattr(resource_class, name, None) for name in ("exceptions", "objects", "lists", "maps"): if not hasattr(resource_class, name): setattr(resource_class, name, ()) for name in ("anonymous_create", "anonymous_update", "anonymous_delete"): if not hasattr(resource_class, name): setattr(resource_class, name, False) if not hasattr(resource_class, "resource_id"): resource_class.resource_id = staticmethod(lambda value: value.id) contexts = getattr(resource_class, "contexts", (None,)) if None in contexts: registerHandler("v1/" + resource_class.name, resource_class) for context in filter(None, contexts): registerHandler(".../%s/%s" % (context, resource_class.name), resource_class) return resource_class def lookup(resource_path): if not isinstance(resource_path, list): resource_path = resource_path.split("/") for offset in range(len(resource_path) - 1): if offset: resource_id = "/".join(["..."] + resource_path[offset:]) else: resource_id = "/".join(resource_path) try: return HANDLERS[resource_id] except KeyError: continue else: raise PathError("Invalid resource: %r" % "/".join(resource_path)) def find(resource_name): suffix = "/" + resource_name return (resource_class for resource_id, resource_class in HANDLERS.items() if resource_id.endswith(suffix)) def id_or_name(argument): try: return int(argument), None except ValueError: return None, argument def numeric_id(argument): try: value = int(argument) if value < 1: raise ValueError return value except ValueError: raise UsageError("Invalid numeric id: %r" % argument) def deduce(resource_path, parameters): resource_class = lookup(resource_path) try: return resource_class.deduce(parameters) except resource_class.exceptions as error: raise PathError("Resource not found: %s" % error.message) def from_parameter(resource_path, parameter_name, parameters): resource_class = lookup(resource_path) parameter_value = parameters.getQueryParameter(parameter_name) if parameter_value is None: return None try: return resource_class.fromParameter(parameter_value, parameters) except resource_class.exceptions as error: raise PathError("Invalid parameter: %s=%s: %s" % (parameter_name, parameter_value, error.message)) def sorted_by_id(items): return sorted(items, key=lambda item: item.id) import check from check import convert, ensure import v1 import documentation def getAPIVersion(req): path = req.path.split("/") assert len(path) >= 1 and path[0] == "api" if len(path) < 2: return None api_version = path[1] if api_version != "v1": raise PathError("Unsupported API version: %r" % api_version) return api_version def finishGET(critic, req, parameters, resource_class, value, values): assert (value is None) != (values is None) api_version = getAPIVersion(req) try: if values is not None: values_json = [] for value in values: try: values_json.append(resource_class.json(value, parameters)) except ResourceSkipped: pass resource_json = { resource_class.name: values_json } else: try: resource_json = resource_class.json(value, parameters) except ResourceSkipped as error: raise PathError("Resource not found: %s" % error.message) if parameters.output_format == "static": resource_json = { resource_class.name: [resource_json] } except resource_class.exceptions as error: raise PathError("Resource not found: %s" % error.message) except IndexError: raise PathError("List index out of range") if req.method != "DELETE" and parameters.subresource_path: subresource_json = resource_json for component in parameters.subresource_path: subresource_json = subresource_json[component] resource_json = { "/".join(parameters.subresource_path): subresource_json } linked = Linked(req) resource_json = linked.filter_referenced(resource_json) if linked.linked_per_type: all_linked = linked.copy() linked_json = resource_json["linked"] = { resource_type: [] for resource_type in linked.linked_per_type } while not linked.isEmpty(): additional_linked = Linked(req) for resource_type, linked_values in linked.linked_per_type.items(): resource_class = lookup([api_version, resource_type]) for linked_value in linked_values: try: linked_value_json = resource_class.json(linked_value, parameters) except ResourceSkipped: continue linked_json[resource_type].append( additional_linked.filter_referenced(linked_value_json)) for resource_type in linked.linked_per_type.keys(): additional_linked[resource_type] -= all_linked[resource_type] all_linked[resource_type] |= linked[resource_type] linked = additional_linked for linked_items in linked_json.values(): if linked_items and "id" in linked_items[0]: linked_items.sort(key=lambda item: item["id"]) if critic.database.profiling and "dbqueries" in parameters.debug: import profiling # Sort items by accumulated time. items = sorted(critic.database.profiling.items(), key=lambda item: item[1][1], reverse=True) resource_json.setdefault("debug", {})["dbqueries"] = { "formatted": profiling.formatDBProfiling(critic.database), "items": [ { "query": re.sub(r"\s+", " ", query), "count": count, "accumulated": { "time": accumulated_ms, "rows": accumulated_rows }, "maximum": { "time": maximum_ms, "rows": maximum_rows } } for query, (count, accumulated_ms, maximum_ms, accumulated_rows, maximum_rows) in items ] } return resource_json def requireSignIn(critic): if critic.actual_user is None: raise UsageError("Sign-in required") def finishPOST(critic, req, parameters, resource_class, value, values, data): if not resource_class.anonymous_create: requireSignIn(critic) if (value or values) and not parameters.subresource_path: raise UsageError("Invalid POST request") if not resource_class.create: raise UsageError("Resource class does not support creating: " % resource_class.name) while True: try: value, values = resource_class.create( parameters, value, values, data) except resource_class.exceptions as error: raise UsageError(error.message) except InternalRedirect as redirect: resource_class = lookup(redirect.resource_path) parameters.subresource_path = redirect.subresource_path value = redirect.value values = redirect.values else: break return finishGET(critic, req, parameters, resource_class, value, values) def finishPUT(critic, req, parameters, resource_class, value, values, data): if not resource_class.anonymous_update: requireSignIn(critic) if not resource_class.update: raise UsageError("Resource class does not support updating: " % resource_class.name) if value or values: try: resource_class.update(parameters, value, values, data) except resource_class.exceptions as error: raise UsageError(error.message) return finishGET(critic, req, parameters, resource_class, value, values) def finishDELETE(critic, req, parameters, resource_class, value, values): if not resource_class.anonymous_delete: requireSignIn(critic) if not resource_class.delete: raise UsageError("Resource class does not support deleting: " % resource_class.name) if parameters.output_format == "static": if value is not None: resource_ids = [value.id] else: resource_ids = [resource.id for resource in values] try: return_value = resource_class.delete(parameters, value, values) except resource_class.exceptions as error: raise UsageError(error.message) if return_value is None: if parameters.output_format == "static": return { "deleted": { resource_class.name: resource_ids }, } raise request.NoContent() value, values = return_value return finishGET(critic, req, parameters, resource_class, value, values) def handleRequestInternal(critic, req): api_version = getAPIVersion(req) if not api_version: if req.method == "GET": documentation.describeRoot() else: raise UsageError("Invalid %s request" % req.method) prefix = [api_version] parameters = Parameters(critic, req) path = req.path.rstrip("/").split("/")[2:] if not path: if req.method == "GET": describe_parameter = parameters.getQueryParameter("describe") if describe_parameter: v1.documentation.describeResource(describe_parameter) v1.documentation.describeVersion() else: raise UsageError("Invalid %s request" % req.method) if req.method in ("POST", "PUT"): try: data = textutils.json_decode(req.read()) except ValueError: raise UsageError("Invalid %s request body" % req.method) context = None resource_class = None while True: next_component = path.pop(0) if resource_class and (next_component in resource_class.objects or next_component in resource_class.lists or next_component in resource_class.maps): subresource_id = [] subresource_path = [] while True: subresource_id.append(next_component) subresource_path.append(next_component) if "/".join(subresource_id) in resource_class.objects: pass elif "/".join(subresource_id) in resource_class.lists: if path: try: subresource_path.append(int(path[0])) except ValueError: raise UsageError( "Item identifier must be an integer: %r" % path[0]) else: del path[0] elif "/".join(subresource_id) in resource_class.maps: if path: subresource_path.append(path[0]) else: raise PathError("Invalid resource: %r / %r" % ("/".join(resource_path), "/".join(subresource_id))) if not path: break next_component = path.pop(0) parameters.subresource_path = subresource_path break resource_path = prefix + [next_component] resource_class = lookup(resource_path) prefix.append(resource_class.name) value = None values = None resource_id = "/".join(resource_path) try: if path and resource_class.single: arguments = filter(None, path.pop(0).split(",")) if len(arguments) == 0 or (len(arguments) > 1 and path): raise UsageError("Invalid resource path: %s" % req.path) if len(arguments) == 1: with parameters.forResource(resource_class): value = resource_class.single(parameters, arguments[0]) assert isinstance(value, resource_class.value_class) if not path: break else: with parameters.forResource(resource_class): values = [resource_class.single(parameters, argument) for argument in arguments] assert all(isinstance(value, resource_class.value_class) for value in values) break elif not path: if req.method == "POST": break if not resource_class.multiple: raise UsageError("Resource requires an argument: %s" % resource_id) with parameters.forResource(resource_class): values = resource_class.multiple(parameters) if isinstance(values, resource_class.value_class): value, values = values, None elif not parameters.range_accessed: begin, end = parameters.getRange() values = itertools.islice(values, begin, end) break except resource_class.exceptions as error: raise PathError("Resource not found: %s" % error.message) if values and not isinstance(values, list): values = list(values) if req.method == "GET": return finishGET(critic, req, parameters, resource_class, value, values) elif req.method == "POST": return finishPOST( critic, req, parameters, resource_class, value, values, data) elif req.method == "PUT": return finishPUT( critic, req, parameters, resource_class, value, values, data) elif req.method == "DELETE": return finishDELETE( critic, req, parameters, resource_class, value, values) def handleRequest(critic, req): try: return handleRequestInternal(critic, req) except (api.PermissionDenied, auth.AccessDenied) as error: raise PermissionDenied(error.message) except api.ResultDelayedError: raise ResultDelayed("Please try again later") ================================================ FILE: src/jsonapi/check.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import contextlib import re import api import jsonapi def ishashable(value): try: hash(value) except TypeError: return False else: return True class TypeCheckerContext(object): def __init__(self, parameters): self.critic = parameters.critic self.__repository = parameters.context.get("repositories") self.__review = parameters.context.get("reviews") self.__path = ["data"] @contextlib.contextmanager def push(self, element): if isinstance(element, int): self.__path.append("[%d]" % element) else: self.__path.append("." + str(element)) yield self.__path.pop() def __str__(self): return "".join(self.__path) @property def review(self): return self.__review @review.setter def review(self, review): assert self.__review is None or self.__review == review self.__review = review if review is not None: self.repository = review.repository @property def repository(self): return self.__repository @repository.setter def repository(self, repository): assert self.__repository is None or self.__repository == repository self.__repository = repository class TypeChecker(object): convert_exception = () def check_compatibility(self, context, value): if hasattr(self, "required_isinstance"): return isinstance(value, self.required_isinstance) return True def __call__(self, context, value): if not self.check_compatibility(context, value): raise jsonapi.InputError("%s: expected %s" % (context, self)) message = self.check(context, value) if message is not None: raise jsonapi.InputError("%s: %s" % (context, message)) if hasattr(self, "convert"): try: value = self.convert(context, value) except self.convert_exception as error: raise jsonapi.InputError("%s: %s" % (context, error.message)) if hasattr(self, "process"): self.process(context, value) return value def __str__(self): return self.expected_type def check(self, context, value): pass @staticmethod def make(value): if ishashable(value) and value in CHECKER_MAP: value = CHECKER_MAP[value] if isinstance(value, TypeChecker): return value if isinstance(value, type) and issubclass(value, TypeChecker): return value() if isinstance(value, list): assert(len(value) == 1) return ListChecker(value[0]) if isinstance(value, (set, frozenset, tuple)): if all(isinstance(item, str) for item in value): return EnumerationChecker(*value) return VariantChecker(value) if isinstance(value, dict): return ObjectChecker(value) raise Exception("invalid checked type: %r" % value) class ListChecker(TypeChecker): required_isinstance = list def __init__(self, checker): self.checker = TypeChecker.make(checker) self.expected_type = "list of %s" % self.checker.expected_type def convert(self, context, value): result = [] for index, element in enumerate(value): with context.push(index): result.append(self.checker(context, element)) return result class VariantChecker(TypeChecker): def __init__(self, checkers=None): if checkers is None: checkers = self.types self.checkers = map(TypeChecker.make, checkers) self.matched = None self.expected_type = "%s or %s" % (", ".join(map(str, self.checkers[:-1])), self.checkers[-1]) def check_compatibility(self, context, value): for checker in self.checkers: if checker.check_compatibility(context, value): self.matched = checker self.convert_exception = checker.convert_exception return True return False def convert(self, context, value): try: return self.matched(context, value) finally: self.matched = None class ObjectChecker(TypeChecker): required_isinstance = dict expected_type = "object" def __init__(self, attributes): self.attributes = {} self.prioritized = set() for attribute_name, attribute_type in attributes.items(): required = False default = False if attribute_name.endswith("=null"): default = True attribute_name = attribute_name[:-5] elif attribute_name.endswith("?"): attribute_name = attribute_name[:-1] else: required = True if attribute_name.endswith("!"): attribute_name = attribute_name[:-1] self.prioritized.add(attribute_name) self.attributes[attribute_name] = (required, default, TypeChecker.make(attribute_type)) def convert(self, context, value): result = {} def convert_attributes(attributes): for attribute_name, attribute_value in attributes: with context.push(attribute_name): if attribute_name not in self.attributes: raise jsonapi.InputError( "%s: unexpected attribute" % context) result[attribute_name] = self.attributes[attribute_name][2]( context, attribute_value) convert_attributes((attribute_name, attribute_value) for attribute_name, attribute_value in value.items() if attribute_name in self.prioritized) convert_attributes((attribute_name, attribute_value) for attribute_name, attribute_value in value.items() if attribute_name not in self.prioritized) for attribute_name, (required, default, _) in self.attributes.items(): if attribute_name not in result: if required: with context.push(attribute_name): raise jsonapi.InputError("%s: missing attribute" % context) elif default: result[attribute_name] = None return result class IntegerChecker(TypeChecker): required_isinstance = int expected_type = "integer" class RestrictedInteger(IntegerChecker): def __init__(self, minvalue=None, maxvalue=None): self.minvalue = minvalue self.maxvalue = maxvalue def check(self, context, value): if self.minvalue is not None and value < self.minvalue: return "must be at least %d" % self.minvalue if self.maxvalue is not None and value > self.maxvalue: return "can be at most %d" % self.maxvalue return super(RestrictedInteger, self).check(context, value) class NonNegativeInteger(RestrictedInteger): def __init__(self): super(NonNegativeInteger, self).__init__(minvalue=0) class PositiveInteger(RestrictedInteger): def __init__(self): super(PositiveInteger, self).__init__(minvalue=1) class StringChecker(TypeChecker): required_isinstance = basestring expected_type = "string" class RestrictedString(StringChecker): def __init__(self, minlength=None, maxlength=None, regexp=None): self.minlength = minlength self.maxlength = maxlength self.regexp = re.compile(regexp) if regexp else None def check(self, context, value): if self.minlength is not None and len(value) < self.minlength: return "must be at least %d characters long" % self.minlength if self.maxlength is not None and len(value) < self.maxlength: return "can be at most %d characters long" % self.maxlength if self.regexp is not None and not self.regexp.match(value): return "must match '%s'" % self.regexp.pattern return super(RestrictedString, self).check(context, value) class RegularExpression(StringChecker): def check(self, context, value): try: re.compile(value) except re.error: return "must be a valid Python regular expression" return super(RegularExpression, self).check(context, value) class EnumerationChecker(StringChecker): def __init__(self, *values): self.values = frozenset(values) def check(self, context, value): if value not in self.values: values = sorted(self.values) return ("must be one of %s and %s" % (", ".join(values[:-1]), values[-1])) return super(EnumerationChecker, self).check(context, value) class BooleanChecker(TypeChecker): required_isinstance = bool expected_type = "boolean" class UserId(PositiveInteger): convert_exception = api.user.InvalidUserId def convert(self, context, value): return api.user.fetch(context.critic, user_id=value) class UserName(StringChecker): convert_exception = api.user.InvalidUserName def convert(self, context, value): return api.user.fetch(context.critic, name=value) class User(VariantChecker): types = (UserId, UserName) class RepositoryId(PositiveInteger): convert_exception = api.repository.InvalidRepositoryId def convert(self, context, value): return api.repository.fetch(context.critic, repository_id=value) class RepositoryName(StringChecker): convert_exception = api.repository.InvalidRepositoryName def convert(self, context, value): return api.repository.fetch(context.critic, name=value) class Repository(VariantChecker): types = (RepositoryId, RepositoryName) def process(self, context, repository): context.repository = repository class Required(TypeChecker): def check(self, context, value): if context.repository is None: return "no repository set in context" return super(Repository.Required, self).check(context, value) class Review(PositiveInteger): convert_exception = api.review.InvalidReviewId def convert(self, context, value): return api.review.fetch(context.critic, review_id=value) def process(self, context, review): context.review = review class Comment(PositiveInteger): convert_exception = api.comment.InvalidCommentId def convert(self, context, value): return api.comment.fetch(context.critic, comment_id=value) def process(self, context, comment): context.review = comment.review class Reply(PositiveInteger): convert_exception = api.reply.InvalidReplyId def convert(self, context, value): return api.reply.fetch(context.critic, reply_id=value) class CommitId(PositiveInteger): convert_exception = api.commit.InvalidCommitId def convert(self, context, value): return api.commit.fetch(context.repository, commit_id=value) class CommitReference(StringChecker): convert_exception = api.repository.InvalidRef def convert(self, context, value): return api.commit.fetch(context.repository, ref=value) class Commit(VariantChecker, Repository.Required): types = (CommitId, CommitReference) class FileId(PositiveInteger): convert_exception = api.file.InvalidFileId def convert(self, context, value): return api.file.fetch(context.critic, file_id=value) class FilePath(StringChecker): convert_exception = api.file.InvalidPath def convert(self, context, value): return api.file.fetch(context.critic, path=value) class File(VariantChecker): types = (FileId, FilePath) class Changeset(PositiveInteger, Repository.Required): convert_exception = api.changeset.InvalidChangesetId def convert(self, context, value): assert context.repository return api.changeset.fetch( context.critic, context.repository, changeset_id=value) class ExtensionId(PositiveInteger): convert_exception = api.extension.InvalidExtensionId def convert(self, context, value): return api.extension.fetch(context.critic, extension_id=value) class ExtensionKey(StringChecker): convert_exception = api.extension.InvalidExtensionKey def convert(self, context, value): return api.extension.fetch(context.critic, key=value) class Extension(VariantChecker): types = (ExtensionId, ExtensionKey) class AccessControlProfile(PositiveInteger): convert_exception = api.accesscontrolprofile.InvalidAccessControlProfileId def convert(self, context, value): return api.accesscontrolprofile.fetch(context.critic, profile_id=value) CHECKER_MAP = { int: IntegerChecker(), str: StringChecker(), bool: BooleanChecker(), api.user.User: User, api.repository.Repository: Repository, api.review.Review: Review, api.comment.Comment: Comment, api.reply.Reply: Reply, api.commit.Commit: Commit, api.file.File: File, api.changeset.Changeset: Changeset, api.extension.Extension: Extension, api.accesscontrolprofile.AccessControlProfile: AccessControlProfile } def convert(parameters, checker, value): context = TypeCheckerContext(parameters) return TypeChecker.make(checker)(context, value) def ensure(data, path, ensured_value): if isinstance(path, (tuple, list)): for key in path[:-1]: data = data[key] key = path[-1] else: key = path if key not in data: data[key] = ensured_value elif data[key] != ensured_value: path_string = "data" for key in path: if isinstance(key, str): path_string += "." + key else: path_string += "[%d]" % key raise jsonapi.InputError("%s: must be %r or omitted" % (path_string, ensured_value)) ================================================ FILE: src/jsonapi/documentation.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils def describeRoot(): # Since there is only one supported API version; simply redirect # to its documentation. raise page.utils.MovedTemporarily("/api/v1") ================================================ FILE: src/jsonapi/v1/README.txt ================================================ API design ========== Primary vs secondary resources ------------------------------ Accessed resources (objects) are classified as either "primary" or "secondary". Primary resources are those that are directly addressable via some path, such as users (/api/v1/users/1) or reviews (/api/v1/reviews/1), but also a users individual registered email addresses (/api/v1/users/1/emails/1). A primary resource has a resource type that is simply the path component, i.e. "users", "reviews" or "emails" for the resources mentioned in this paragraph. A secondary resource is not directly addressable via a path and is only ever returned as part of a primary resource. A primary resource referenced by another primary resource is always included as an id reference only, never expanded. Such a referenced resource may, if requested, still be included in the response, but then as a separate linked resource. Example: Users are primary resources, and are referenced by various fields in the a review resource, but only by id: /api/v1/reviews/1 => { "id": 1, "owners: [2, 3], "reviewers": [4, 5, 6], ... } The name+email+timestamp objects that represent the author and committer metadata in commits, OTOH, are secondary resources, and thus directly included in the commit resource that reference them: /api/v1/repository/1/commits/1 => { "id": 1, "author": { "name": "...", "email": "...", "timestamp": ... }, "committer": { "name": "...", "email": "...", "timestamp": ... }, } This rule exists mostly to define a consistent answer to the question: "Should resource X be included in resource Y?" Referenced primary resource are not directly included because A) they are generally convenient enough to access on their own, given an id, and B) can always be included in the response anyway, as a linked resource. Linked resources ---------------- Typically, any primary resource whose id is included as part of another primary resource is recorded as a linked resource. This includes the case of the other primary resource also being "just" a linked resource, as long as that other primary resource was actually going to be included in the final response. Linked resources are included in the final response if requested via the 'include' query parameter. Its value is a comma-separated list of resource types. Example: /api/v1/reviews/1?include=users => { "id": 1, "owners": [2, 3], "reviewers": [3, 4], ... "linked": { "users": [{ "id": 2, "name": "...", ... }, { "id": 3, "name": "...", ... }, { "id": 4, "name": "...", ... }] }, } Some references to primary resources do not cause the referenced resource to be recorded as a linked resource. One notable such exception is the references to a commit's parent commits, since the recursive processing of linked resources could easily cause a repository's entire history to be included. Example: /api/v1/repositories/1/commits/1?include=commits => { "id": 1, "parents": [2, 3], ... "linked": {} } Note that the exception here is the commit resource's 'parents' field, not commits in general. Other resources that include lists of commit reference do record them as linked resources. Example: /api/v1/reviews/1?include=commits => { "id": 1, "commits": [1, 2, 3], ... "linked": { "commits": [{ "id": 1, "parents": [2], ... }, { "id": 2, "parents": [3], ... }, { "id": 3, "parents": [4], ... }] } } Note: In this scenario, commits 1-3 are included since they are directly referenced from the review resource, but commit 4, which is referenced from commit 3's 'parents' field, is not included. Collections ----------- Primary resources can also typically be accessed as collections, normally via a path that doesn't include the final component that identifies the specific resource. A collection of primary resources is returned as an object with a single key, the resource type, mapped to an array of resources. Example: /api/v1/users => { "users": [{ "id": 1, "name": "...", ... }, { "id": 2, "name": "...", ... }, ...] } The top-level { resource_type: collection } structure is there to make it possible to also include linked resources as part of the top-level structure. This would not be possible if the top-level structure was an array, for instance. Implementation ============== A primary resource is implemented by decorating a class with the decorator |jsonapi.PrimaryResource|, as such: class User(object): """Internal representation""" def __init__(self, name): self.name = name @jsonapi.PrimaryResource class Users(object): name = "apples" value_class = User @staticmethod def json(value, parameters, linked): return { "name": value.name } @staticmethod def single(parameters, argument): return User(argument) @staticmethod def multiple(parameters): return [User("alice"), User("bob")] A resource class is never instantiated; it is only expected to have class attributes and static (or class) methods. Two attributes are required: |name| and |value_class|. In addition, these attributes are used if present: |contexts| and |exceptions|. name ---- The resource name (typically plural) as it appears in the path. This defines the paths that this resource class handles. If the |name| attribute is "users", the resource class handles the path /api/v1/users/, unless the |contexts| attribute is present and overrides this (see below). value_class ----------- The internal type of the values being "wrapped". contexts -------- The optional |contexts| attribute should be a tuple containing strings or the special value None. If it contains None, the resource can appear without context, meaning at the beginning of a path. If it contains strings, those strings should match the name of other primary resources, and the meaning is that this resource can occur following that other resource on a path. exceptions ---------- The optional |exceptions| attribute should be a tuple containing exception types that the resource class's methods can raise and have converted into PathError exceptions. json() ------ The json() method is called to convert an instance of the resource class's internal value class to a simple data structure (typically a dictionary) that can then be converted into a JSON string. It must be implemented. The |value| parameter is the value being converted. It is guaranteed to be an instance of the resource class's internal value class. The |parameters| parameter gives access to query string parameters, and to context objects introduced by earlier path segments (all but the last). The |linked| parameter holds an object that can be used to register other primary resources referenced by this resource. single() -------- The single() method is called when processing a path ...// where is the resource class's |name| attribute and is the |argument| parameter to the method. If the single() method is not implemented, this type of path is invalid. The |critic| parameter is an api.critic.Critic instance. The |argument| parameter is the next path component, as described above. The |parameters| parameter is the same as to json(). The return value must be an instance of the resource class's internal value class. multiple() ---------- The multiple() method is called when processing a path .../ and would normally return "all resources of this type." It can also filter its return value using query parameters. If the multiple() method is not implemented, this type of path is invalid. The |critic| and |parameters| parameters are the same as to single(). The return value must be an iterable of the resource class's internal value class, or an instance of it. The return value can be an iterator or generator. ================================================ FILE: src/jsonapi/v1/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import configuration import datetime EPOCH = datetime.datetime.utcfromtimestamp(0) def timestamp(timestamp): if timestamp is None: return None return (timestamp - EPOCH).total_seconds() import users import sessions import repositories import commits import branches import reviews import reviewsummaries import rebases import changesets import filechanges import files import comments import replies import batches import reviewablefilechanges import filediffs import filecontents if configuration.auth.ENABLE_ACCESS_TOKENS: import accesstokens import accesscontrolprofiles import labeledaccesscontrolprofiles if configuration.extensions.ENABLED: import extensions import documentation ================================================ FILE: src/jsonapi/v1/accesscontrolprofiles.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import api import jsonapi RULE = api.accesscontrolprofile.AccessControlProfile.RULE_VALUES CATEGORIES = frozenset(["http", "repositories", "extensions"]) REQUEST_METHOD = api.accesscontrolprofile \ .AccessControlProfile.HTTPException.REQUEST_METHODS REPOSITORY_ACCESS_TYPE = api.accesscontrolprofile \ .AccessControlProfile.RepositoryException.ACCESS_TYPES EXTENSION_ACCESS_TYPE = api.accesscontrolprofile \ .AccessControlProfile.ExtensionException.ACCESS_TYPES HTTP_EXCEPTION = { "request_method=null": REQUEST_METHOD, "path_pattern=null": jsonapi.check.RegularExpression } REPOSITORIES_EXCEPTION = { "access_type=null": REPOSITORY_ACCESS_TYPE, "repository=null": api.repository.Repository } EXTENSION_EXCEPTION = { "access_type=null": EXTENSION_ACCESS_TYPE, "extension=null": api.extension.Extension } PROFILE = { "http?": { "rule": RULE, "exceptions?": [HTTP_EXCEPTION] }, "repositories?": { "rule": RULE, "exceptions?": [REPOSITORIES_EXCEPTION] }, "extensions?": { "rule": RULE, "exceptions?": [EXTENSION_EXCEPTION] } } PROFILE_WITH_TITLE = PROFILE.copy() PROFILE_WITH_TITLE["title?"] = str def updateProfile(profile_modifier, converted): def updateExceptions(exceptions_modifier, exceptions): exceptions_modifier.deleteAll() for exception in exceptions: exceptions_modifier.add(**exception) def updateCategory(category): if category not in converted: return if "rule" in converted[category]: profile_modifier.setRule(category, converted[category]["rule"]) if "exceptions" in converted[category]: updateExceptions(profile_modifier.modifyExceptions(category), converted[category]["exceptions"]) if "title" in converted: profile_modifier.setTitle(converted["title"]) updateCategory("http") updateCategory("repositories") updateCategory("extensions") @jsonapi.PrimaryResource class AccessControlProfiles(object): """The access control profiles of this system.""" name = "accesscontrolprofiles" value_class = api.accesscontrolprofile.AccessControlProfile exceptions = (api.accesscontrolprofile.AccessControlProfileError,) objects = ("http", "repositories", "extensions") lists = ("http/exceptions", "repositories/exceptions", "extensions/exceptions") @staticmethod def json(value, parameters, linked): """AccessControlProfile { "id": integer, "title": string or null, "http": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "request_method": string or null, "path_pattern": string or null }] }, "repositories": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "access_type": "read" or "modify", "repository": integer }] }, "extensions": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "access_type": "install" or "execute", "extension": string, }] } }""" # Make sure that only administrator users can access profiles that are # not connected to access tokens, and that only administrator users and # the user that owns the access token can access other profiles. if not value.access_token \ or value.access_token.access_type != "user" \ or parameters.critic.actual_user != value.access_token.user: api.PermissionDenied.raiseUnlessAdministrator(parameters.critic) for exception in value.repositories.exceptions: if exception.repository: linked.add("v1/repositories", exception.repository) for exception in value.extensions.exceptions: if exception.extension: linked.add("v1/extensions", exception.extension) return parameters.filtered("accesscontrolprofiles", { "id": value.id, "title": value.title, "http": { "rule": value.http.rule, "exceptions": [{ "id": exception.id, "request_method": exception.request_method, "path_pattern": exception.path_pattern } for exception in value.http.exceptions] }, "repositories": { "rule": value.repositories.rule, "exceptions": [{ "id": exception.id, "access_type": exception.access_type, "repository": (exception.repository.id if exception.repository else None) } for exception in value.repositories.exceptions] }, "extensions": { "rule": value.extensions.rule, "exceptions": [{ "id": exception.id, "access_type": exception.access_type, "extension": (exception.extension.id if exception.extension else None) } for exception in value.extensions.exceptions] }, }) @staticmethod def single(parameters, argument): """Retrieve one (or more) access control profiles. PROFILE_ID : integer Retrieve an access control profile identified by the profile's unique numeric id.""" return api.accesscontrolprofile.fetch( parameters.critic, profile_id=jsonapi.numeric_id(argument)) @staticmethod def multiple(parameters): """Retrieve all primary access control profiles in the system. title : TITLE : string Retrieve only access control profiles with a matching title.""" title_parameter = parameters.getQueryParameter("title") return api.accesscontrolprofile.fetchAll( parameters.critic, title=title_parameter) @staticmethod def deduce(parameters): profile = parameters.context.get("accesscontrolprofiles") profile_parameter = parameters.getQueryParameter("profile") if profile_parameter is not None: if profile is not None: raise jsonapi.UsageError( "Redundant query parameter: profile=%s" % profile_parameter) profile_id = jsonapi.numeric_id(profile_parameter) profile = api.accesscontrolprofile.fetch( parameters.critic, profile_id=profile_id) return profile @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) profiles = [value] if value else values path = parameters.subresource_path if 0 < len(path) < 2: raise jsonapi.UsageError("Invalid POST request") if len(path) == 2 \ and path[0] in CATEGORIES \ and path[1] == "exceptions": # Create an rule exception. if path[0] == "http": exception_type = HTTP_EXCEPTION elif path[0] == "repositories": exception_type = REPOSITORIES_EXCEPTION else: exception_type = EXTENSION_EXCEPTION converted = jsonapi.convert( parameters, exception_type, data) with api.transaction.Transaction(critic) as transaction: for profile in profiles: modifier = transaction.modifyAccessControlProfile(profile) \ .modifyExceptions(path[0]) \ .add(**converted) return value, values # Create an access control profile. assert not profiles converted = jsonapi.convert(parameters, PROFILE_WITH_TITLE, data) result = [] def collectAccessControlProfile(profile): assert isinstance( profile, api.accesscontrolprofile.AccessControlProfile) result.append(profile) with api.transaction.Transaction(critic) as transaction: modifier = transaction.createAccessControlProfile( callback=collectAccessControlProfile) updateProfile(modifier, converted) assert len(result) == 1 return result[0], None @staticmethod def update(parameters, value, values, data): critic = parameters.critic if value: profiles = [value] else: profiles = values path = parameters.subresource_path if len(path) == 1 and path[0] in CATEGORIES: if path[0] == "http": exception_type = HTTP_EXCEPTION elif path[0] == "repositories": exception_type = REPOSITORIES_EXCEPTION else: exception_type = EXTENSION_EXCEPTION converted = jsonapi.convert( parameters, { "rule?": RULE, "exceptions?": [exception_type] }, data) with api.transaction.Transaction(critic) as transaction: for profile in profiles: modifier = transaction.modifyAccessControlProfile(profile) updateProfile(modifier, { path[0]: converted }) return converted = jsonapi.convert(parameters, PROFILE_WITH_TITLE, data) with api.transaction.Transaction(critic) as transaction: for profile in profiles: modifier = transaction.modifyAccessControlProfile(profile) updateProfile(modifier, converted) @staticmethod def delete(parameters, value, values): critic = parameters.critic path = parameters.subresource_path if value: profiles = [value] else: profiles = values if len(path) in (2, 3) \ and path[0] in CATEGORIES \ and path[1] == "exceptions": exception_id = path[2] if len(path) == 3 else None with api.transaction.Transaction(critic) as transaction: for profile in profiles: modifier = transaction.modifyAccessControlProfile(profile) \ .modifyProfile() \ .modifyExceptions(path[0]) if exception_id is None: modifier.deleteAll() else: modifier.delete(exception_id) return value, values if path: raise jsonapi.UsageError("Invalid DELETE request") with api.transaction.Transaction(critic) as transaction: for profile in profiles: transaction.modifyAccessControlProfile(profile) \ .delete() ================================================ FILE: src/jsonapi/v1/accesstokens.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi ACCESS_TYPE = frozenset(["user", "anonymous", "system"]) from accesscontrolprofiles import (RULE, CATEGORIES, HTTP_EXCEPTION, REPOSITORIES_EXCEPTION, EXTENSION_EXCEPTION, PROFILE, updateProfile) def modifyAccessToken(transaction, access_token): if access_token.user: return transaction \ .modifyUser(access_token.user) \ .modifyAccessToken(access_token) return transaction \ .modifyAccessToken(access_token) @jsonapi.PrimaryResource class AccessTokens(object): """Access tokens.""" name = "accesstokens" contexts = (None, "users") value_class = api.accesstoken.AccessToken exceptions = (api.accesstoken.AccessTokenError,) objects = ("profile", "profile/http", "profile/repositories", "profile/extensions") lists = ("profile/http/exceptions", "profile/repositories/exceptions", "profile/extensions/exceptions") @staticmethod def json(value, parameters): """AccessToken { "id": integer, "access_type": "user", "anonymous" or "system", "user": integer or null, "part1": string, "part2": string, "title": string or null, "profile": null or AccessControlProfile } AccessControlProfile { "http": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "request_method": string or null, "path_pattern": string or null }] }, "repositories": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "access_type": "read" or "modify", "repository": integer }] }, "extensions": { "rule": "allow" or "deny", "exceptions: [{ "id": integer, "access_type": "install" or "execute", "extension": string, }] } }""" # Make sure that only administrator users can access other user's access # tokens or access tokens that do not belong to any user. if value.access_type != "user" \ or parameters.critic.actual_user != value.user: api.PermissionDenied.raiseUnlessAdministrator(parameters.critic) data = { "id": value.id, "access_type": value.access_type, "user": value.user, "part1": value.part1, "part2": value.part2, "title": value.title } if value.profile: data["profile"] = { "http": { "rule": value.profile.http.rule, "exceptions": [{ "id": exception.id, "request_method": exception.request_method, "path_pattern": exception.path_pattern } for exception in value.profile.http.exceptions] }, "repositories": { "rule": value.profile.repositories.rule, "exceptions": [{ "id": exception.id, "access_type": exception.access_type, "repository": exception.repository } for exception in value.profile.repositories.exceptions] }, "extensions": { "rule": value.profile.extensions.rule, "exceptions": [{ "id": exception.id, "access_type": exception.access_type, "extension": exception.extension } for exception in value.profile.extensions.exceptions] }, } else: data["profile"] = None return parameters.filtered("accesstokens", data) @staticmethod def single(parameters, argument): """Retrieve one (or more) access tokens. TOKEN_ID : integer Retrieve an access token identified by its unique numeric id.""" value = api.accesstoken.fetch(parameters.critic, jsonapi.numeric_id(argument)) if "users" in parameters.context: if value.user != parameters.context["users"]: raise InvalidAccessTokenId(jsonapi.numeric_id(argument)) return value @staticmethod def multiple(parameters): """All access tokens.""" user = jsonapi.deduce("v1/users", parameters) # Only administrators are allowed to access all access tokens in the # system. if user is None: api.PermissionDenied.raiseUnlessAdministrator(parameters.critic) return api.accesstoken.fetchAll(parameters.critic, user=user) @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) access_tokens = [value] if value else values path = parameters.subresource_path if 0 < len(path) < 3: raise jsonapi.UsageError("Invalid POST request") if len(path) == 3 \ and path[0] == "profile" \ and path[1] in CATEGORIES \ and path[2] == "exceptions": # Create an rule exception. if path[1] == "http": exception_type = HTTP_EXCEPTION elif path[1] == "repositories": exception_type = REPOSITORIES_EXCEPTION else: exception_type = EXTENSION_EXCEPTION converted = jsonapi.convert( parameters, exception_type, data) with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifier = modifyAccessToken(transaction, access_token) \ .modifyProfile() \ .modifyExceptions(path[1]) \ .add(**converted) return value, values # Create an access token. assert not access_tokens converted = jsonapi.convert( parameters, { "access_type?": ACCESS_TYPE, "title?": str, "profile?": PROFILE }, data) result = [] def collectAccessToken(token): assert isinstance(token, api.accesstoken.AccessToken) result.append(token) with api.transaction.Transaction(critic) as transaction: modifier = transaction \ .modifyUser(user) access_type = converted.get("access_type", "user") access_token = modifier.createAccessToken( access_type=access_type, title=converted.get("title"), callback=collectAccessToken) if "profile" in converted: modifier = transaction if access_type == "user": modifier = modifier.modifyUser(user) modifier = modifier \ .modifyAccessToken(access_token) \ .modifyProfile() updateProfile(modifier, converted["profile"]) assert len(result) == 1 return result[0], None @staticmethod def update(parameters, value, values, data): critic = parameters.critic if value: access_tokens = [value] else: access_tokens = values path = parameters.subresource_path if path == ["profile"]: converted = jsonapi.convert(parameters, PROFILE, data) with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifier = modifyAccessToken(transaction, access_token) \ .modifyProfile() updateProfile(modifier, converted) return if len(path) == 2 \ and path[0] == "profile" \ and path[1] in CATEGORIES: if path[1] == "http": exception_type = HTTP_EXCEPTION elif path[1] == "repositories": exception_type = REPOSITORIES_EXCEPTION else: exception_type = EXTENSION_EXCEPTION converted = jsonapi.convert( parameters, { "rule?": RULE, "exceptions?": [exception_type] }, data) with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifier = modifyAccessToken(transaction, access_token) \ .modifyProfile() updateProfile(modifier, { path[1]: converted }) return converted = jsonapi.check.convert( parameters, { "title?": str, "profile?": PROFILE, }, data) with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifier = modifyAccessToken(transaction, access_token) if "title" in converted: modifier.setTitle(converted["title"]) if "profile" in converted: updateProfile(modifier.modifyProfile(), converted["profile"]) @staticmethod def delete(parameters, value, values): critic = parameters.critic path = parameters.subresource_path if value: access_tokens = [value] else: access_tokens = values if 3 <= len(path) <= 4 \ and path[0] == "profile" \ and path[1] in CATEGORIES \ and path[2] == "exceptions": exception_id = None if len(path) == 4: exception_id = path[3] with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifier = modifyAccessToken(transaction, access_token) \ .modifyProfile() \ .modifyExceptions(path[1]) if exception_id is None: modifier.deleteAll() else: modifier.delete(exception_id) return value, values if path: raise jsonapi.UsageError("Invalid DELETE request") with api.transaction.Transaction(critic) as transaction: for access_token in access_tokens: modifyAccessToken(transaction, access_token) \ .delete() ================================================ FILE: src/jsonapi/v1/batches.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Batches(object): """Batches of changes in reviews.""" name = "batches" contexts = (None, "reviews") value_class = api.batch.Batch exceptions = api.batch.BatchError @staticmethod def json(value, parameters): """{ "id": integer or null, "is_empty": boolean, "review": integer, "author": integer, "comment": integer or null, "timestamp": float or null, "created_comments": integer[], "written_replies": integer[], "resolved_issues": integer[], "reopened_issues": integer[], "morphed_comments": MorphedComment[], "reviewed_changes": integer[], "unreviewed_changes": integer[], } MorphedComment { "comment": integer, "new_type": "issue" or "note", }""" morphed_comments = sorted([ { "comment": comment, "new_type": new_type } for comment, new_type in value.morphed_comments.items() ], key=lambda morphed_comment: morphed_comment["comment"].id) timestamp = jsonapi.v1.timestamp(value.timestamp) return parameters.filtered( "batches", { "id": value.id, "is_empty": value.is_empty, "review": value.review, "author": value.author, "timestamp": timestamp, "comment": value.comment, "created_comments": jsonapi.sorted_by_id( value.created_comments), "written_replies": jsonapi.sorted_by_id( value.written_replies), "resolved_issues": jsonapi.sorted_by_id( value.resolved_issues), "reopened_issues": jsonapi.sorted_by_id( value.reopened_issues), "morphed_comments": morphed_comments, "reviewed_changes": jsonapi.sorted_by_id( value.reviewed_file_changes), "unreviewed_changes": jsonapi.sorted_by_id( value.unreviewed_file_changes) }) @staticmethod def single(parameters, argument): """Retrieve one (or more) batches in reviews. BATCH_ID : integer Retrieve a batch identified by its unique numeric id.""" batch = api.batch.fetch( parameters.critic, batch_id=jsonapi.numeric_id(argument)) review = jsonapi.deduce("v1/reviews", parameters) if review and review != batch.review: raise jsonapi.PathError( "Batch does not belong to specified review") return Batches.setAsContext(parameters, batch) @staticmethod def multiple(parameters): """Retrieve all batches in the system (or review.) review : REVIEW_ID : integer Retrieve only batches in the specified review. Can only be used if a review is not specified in the resource path. author : AUTHOR : integer or string Retrieve only batches authored by the specified user, identified by the user's unique numeric id or user name. unpublished : UNPUBLISHED : 'yes' Retrieve a single batch representing the current user's unpublished changes to a review. Must be combined with `review` and cannot be combined with `author`.""" critic = parameters.critic review = jsonapi.deduce("v1/reviews", parameters) author = jsonapi.from_parameter("v1/users", "author", parameters) unpublished_parameter = parameters.getQueryParameter("unpublished") if unpublished_parameter is not None: if unpublished_parameter == "yes": if author is not None: raise jsonapi.UsageError( "Parameters 'author' and 'unpublished' cannot be " "combined") return api.batch.fetchUnpublished(critic, review) else: raise jsonapi.UsageError( "Invalid 'unpublished' parameter: %r (must be 'yes')" % unpublished_parameter) return api.batch.fetchAll(critic, review=review, author=author) @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) if value or values: raise jsonapi.UsageError("Invalid POST request") converted = jsonapi.convert( parameters, { "review?": api.review.Review, "comment?": str, }, data) review = jsonapi.deduce("v1/reviews", parameters) if not review: if "review" not in converted: raise jsonapi.UsageError("No review specified") review = converted["review"] elif "review" in converted and review != converted["review"]: raise jsonapi.UsageError("Conflicting reviews specified") if "comment" in converted: comment_text = converted["comment"].strip() if not comment_text: raise jsonapi.UsageError("Empty comment specified") else: comment_text = None result = [] def collectBatch(batch): assert isinstance(batch, api.batch.Batch) result.append(batch) with api.transaction.Transaction(critic) as transaction: modifier = transaction.modifyReview(review) if comment_text: note = modifier.createComment(comment_type="note", author=critic.actual_user, text=comment_text) else: note = None modifier.submitChanges(note, callback=collectBatch) assert len(result) == 1 return result[0], None @staticmethod def deduce(parameters): batch = parameters.context.get("batches") batch_parameter = parameters.getQueryParameter("batch") if batch_parameter is not None: if batch is not None: raise jsonapi.UsageError( "Redundant query parameter: batch=%s" % batch_parameter) batch = api.batch.fetch( parameters.critic, jsonapi.numeric_id(batch_parameter)) return batch @staticmethod def setAsContext(parameters, batch): parameters.setContext(Batches.name, batch) return batch ================================================ FILE: src/jsonapi/v1/branches.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Branches(object): """Branches in the Git repositories.""" name = "branches" contexts = (None, "repositories") value_class = api.branch.Branch exceptions = (api.branch.BranchError, api.repository.RepositoryError) @staticmethod def json(value, parameters): """Branch { "id": integer, // the branch's id "name": string, // the branch's name "repository": integer, // the branch's repository's id "head": integer, // the branch's head commit's id }""" return parameters.filtered( "branches", { "id": value.id, "name": value.name, "repository": value.repository, "head": value.head }) @staticmethod def single(parameters, argument): """Retrieve one (or more) branches in the Git repositories. BRANCH_ID : integer Retrieve a branch identified by its unique numeric id.""" return Branches.setAsContext(parameters, api.branch.fetch( parameters.critic, branch_id=jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve all branches in the Git repositories. repository : REPOSITORY : - Include only branches in one repository, identified by the repository's unique numeric id or short-name. name : NAME : string Retrieve only the branch with the specified name. The name should not include the "refs/heads/" prefix. When this parameter is specified a repository must be specified as well, either in the resource path or using the repository parameter.""" repository = jsonapi.deduce("v1/repositories", parameters) name_parameter = parameters.getQueryParameter("name") if name_parameter: if repository is None: raise jsonapi.UsageError( "Named branch access must have repository specified.") return api.branch.fetch( parameters.critic, repository=repository, name=name_parameter) return api.branch.fetchAll(parameters.critic, repository=repository) @staticmethod def setAsContext(parameters, branch): parameters.setContext(Branches.name, branch) return branch import commits @jsonapi.PrimaryResource class BranchCommits(object): """Commits associated with a branch. This is typically not all commits reachable from a branch. When a branch is first pushed to a repository, all commits reachable only from the branch are associated with it. After that, as the branch is updated, all new commits are also associated with the branch.""" name = "commits" contexts = ("branches", "reviews") value_class = api.commit.Commit exceptions = (api.commit.CommitError,) json = staticmethod(commits.Commits.json) @staticmethod def multiple(parameters): """Retrieve all commits associated with the branch. sort : SORT_KEY : - Sort the commits in topological or date order. In either case, a child commit is always sorted before all of its parent commits, but whenever more than one commit could be emitted without violating this rule, topological order prefers the first parent and its ancestors, while date order prefers the commit with the most recent commit date. Topological order is the default.""" branch = parameters.context["branches"] sort_parameter = parameters.getQueryParameter("sort") if sort_parameter is None or sort_parameter == "topological": return branch.commits.topo_ordered elif sort_parameter != "date": raise jsonapi.UsageError("Invalid commits sort parameter: %r" % sort_parameter) return branch.commits.date_ordered ================================================ FILE: src/jsonapi/v1/changesets.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import api import jsonapi @jsonapi.PrimaryResource class Changesets(object): """Changesets in the git repositories""" name = "changesets" contexts = (None, "repositories", "reviews") value_class = api.changeset.Changeset exceptions = (api.changeset.ChangesetError,) @staticmethod def json(value, parameters): """Changeset { "id": integer, // the changeset's id "type": string, // the changeset type (direct, custom, merge, conflict) "from_commit": integer, // commit id for changesets from_commit "to_commit": integer, // commit id for changesets to_commit "files": integer[], // a list of all files changed in this changeset "review_state": ReviewState or null, } ReviewState { "review": integer, "comments": integer[], }""" def review_state(review): if not review: return None comments = api.comment.fetchAll( parameters.critic, review=review, changeset=value) try: reviewablefilechanges = api.reviewablefilechange.fetchAll( parameters.critic, review=review, changeset=value) except api.reviewablefilechange.InvalidChangeset: reviewablefilechanges = None return { "review": review, "commments": comments, "reviewablefilechanges": reviewablefilechanges, } review = jsonapi.deduce("v1/reviews", parameters) contributing_commits = value.contributing_commits if contributing_commits: contributing_commits = list(contributing_commits.topo_ordered) return parameters.filtered( "changesets", { "id": value.id, "type": value.type, "from_commit": value.from_commit, "to_commit": value.to_commit, "files": value.files, "contributing_commits": contributing_commits, "review_state": review_state(review) }) @staticmethod def single(parameters, argument): """Retrieve one (or more) changesets. CHANGESET_ID : integer Retrieve a changeset identified by its unique numeric id. repository : REPOSITORY : - Specify repository to access, identified by its unique numeric id or short-name. Required unless a repository is specified in the resource path.""" repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "repository needs to be specified, ex. &repository=") return Changesets.setAsContext(parameters, api.changeset.fetch( parameters.critic, repository, jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve (and create if it doesn't exist) a changeset identified by a single commit (changeset type: direct) or any two commits in the same repository (changeset type: custom). from : COMMIT_SHA1 : string Retrieve a changeset with a commit (identified by its SHA-1 sum) as its from_commit. The SHA-1 sum can be abbreviated, but must be at least 4 characters long, and must be unambiguous in the repository. Must be used together with parameter 'to'. to : COMMIT_SHA1 : string Retrieve a changeset with a commit (identified by its SHA-1 sum) as its to_commit. The SHA-1 sum can be abbreviated, but must be at least 4 characters long, and must be unambiguous in the repository. Must be used together with parameter 'from'. commit : COMMIT_SHA1 : string Retrieve a changeset with a commit (identified by its SHA-1 sum) as its to_commit, and the commit's parent as its from_commit. The SHA-1 sum can be abbreviated, but must be at least 4 characters long, and must be unambiguous in the repository. Cannot be combined with 'from' or 'to'. Currently does not support merge commits. repository : REPOSITORY : - Specify repository to access, identified by its unique numeric id or short-name. Required unless a repository is specified in the resource path. review : REVIEW_ID : - Specify a review to calculate an "automatic" changeset for. automatic : MODE : string Calculate the changeset commit range automatically based on a review and a mode, which must be "everything", "reviewable" (changes assigned to current user), "relevant" (changes assigned to or files watched by current user) or "pending" (unreviewed changes assigned to current user.) A review must be specified in this case, and none of the 'from', 'to' or 'commit' parameters can be used.""" repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "repository needs to be specified, ex. &repository=") def get_commit(name): return jsonapi.from_parameter("v1/commits", name, parameters) from_commit = get_commit("from") to_commit = get_commit("to") single_commit = get_commit("commit") review = jsonapi.deduce("v1/reviews", parameters) automatic = parameters.getQueryParameter("automatic") if automatic is not None: if automatic not in api.changeset.Changeset.AUTOMATIC_MODES: raise jsonapi.UsageError("Invalid automatic mode: %r (must be " "one of 'everything', 'reviewable', " "'relevant' or 'pending'." % automatic) if review is None: raise jsonapi.UsageError("A review must be specified when " "an automatic mode is used") if from_commit or to_commit or single_commit: raise jsonapi.UsageError("Explicit commit range cannot be " "specified when an automatic mode is " "used") else: if not (from_commit or to_commit or single_commit): raise jsonapi.UsageError( "Missing required parameters from and to, or commit") if (from_commit is None) != (to_commit is None): raise jsonapi.UsageError("Missing required parameters from and " "to, only one supplied") if from_commit == to_commit and from_commit is not None: raise jsonapi.UsageError("from and to can't be the same commit") return Changesets.setAsContext( parameters, api.changeset.fetch( parameters.critic, repository, from_commit=from_commit, to_commit=to_commit, single_commit=single_commit, review=review, automatic=automatic)) @staticmethod def deduce(parameters): repository = jsonapi.deduce("v1/repositories", parameters) changeset = parameters.context.get(Changesets.name) changeset_parameter = parameters.getQueryParameter("changeset") if changeset_parameter is not None: if changeset is not None: raise jsonapi.UsageError( "Redundant query parameter: changeset=%s" % changeset_parameter) if repository is None: raise jsonapi.UsageError( "repository needs to be specified, ex. &repository=") changeset_id = jsonapi.numeric_id(changeset_parameter) changeset = api.changeset.fetch( parameters.critic, repository, changeset_id=changeset_id) return changeset @staticmethod def setAsContext(parameters, changeset): parameters.setContext(Changesets.name, changeset) return changeset ================================================ FILE: src/jsonapi/v1/comments.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Comments(object): """Issues and notes in reviews.""" name = "comments" contexts = (None, "reviews") value_class = (api.comment.Comment, api.comment.Issue, api.comment.Note) exceptions = (api.comment.CommentError, api.reply.ReplyError) @staticmethod def json(value, parameters): """{ "id": integer, "type": "issue" or "note", "is_draft": boolean, "state": "open", "addressed" or "resolved" (null for notes), "review": integer, "author": integer, "location": Location or null, "resolved_by": integer, // user that resolved the issue "addressed_by": integer, // commit that addressed the issue "timestamp": float, "text": string, "replies": integer[], "draft_changes": DraftChanges or null, } Location { "type": "commit-message" or "file-version", "first_line": integer, // first commented line (one-based, inclusive) "last_line": integer, // last commented line (one-based, inclusive) } CommitMessageLocation : Location { "commit": integer // commented commit } FileVersionLocation : Location { "file": integer, // commented file "changeset": integer or null, // commented changeset "commit": integer, // commented commit } DraftChanges { "author": integer, // author of these draft changes "is_draft": boolean, // true if comment itself is unpublished "reply": integer or null, // unpublished reply "new_type": "issue" or "note", // unpublished comment type change "new_state": "open", "addressed" or "resolved" (null for notes) "new_location": FileVersionLocation or null, }""" if isinstance(value, api.comment.Issue): state = value.state resolved_by = value.resolved_by addressed_by = value.addressed_by else: state = None resolved_by = None addressed_by = None def location_json(location): if not location: return None if location.type == "file-version": changeset = jsonapi.deduce("v1/changesets", parameters) if not changeset: # FileVersionLocation.translateTo() only allows one, so let # a deduced changeset win over a deduced commit. commit = jsonapi.deduce("v1/commits", parameters) else: commit = None if changeset or commit: location = location.translateTo(changeset=changeset, commit=commit) if not location: raise jsonapi.ResourceSkipped( "Comment not present in changeset/commit") result = { "type": location.type, "first_line": location.first_line, "last_line": location.last_line } if location.type == "commit-message": result.update({ "commit": location.commit }) else: result.update({ "file": location.file, "changeset": location.changeset, "side": location.side, "commit": location.commit, "is_translated": location.is_translated }) return result draft_changes = value.draft_changes if draft_changes: draft_changes_json = { "author": draft_changes.author, "is_draft": draft_changes.is_draft, "reply": draft_changes.reply, "new_type": draft_changes.new_type, "new_state": None, "new_location": None, } if isinstance(draft_changes, api.comment.Issue.DraftChanges): draft_changes_json.update({ "new_state": draft_changes.new_state, "new_location": location_json(draft_changes.new_location), }) else: draft_changes_json = None timestamp = jsonapi.v1.timestamp(value.timestamp) return parameters.filtered( "comments", { "id": value.id, "type": value.type, "is_draft": value.is_draft, "state": state, "review": value.review, "author": value.author, "location": location_json(value.location), "resolved_by": resolved_by, "addressed_by": addressed_by, "timestamp": timestamp, "text": value.text, "replies": value.replies, "draft_changes": draft_changes_json, }) @staticmethod def single(parameters, argument): """Retrieve one (or more) comments in reviews. COMMENT_ID : integer Retrieve a comment identified by its unique numeric id.""" comment = api.comment.fetch( parameters.critic, comment_id=jsonapi.numeric_id(argument)) review = jsonapi.deduce("v1/reviews", parameters) if review and review != comment.review: raise jsonapi.PathError( "Comment does not belong to specified review") return Comments.setAsContext(parameters, comment) @staticmethod def multiple(parameters): """Retrieve all comments in the system (or review.) with_reply : REPLY_ID : integer Retrieve only the comment to which the specified reply is a reply. This is equivalent to accessing /api/v1/comments/COMMENT_ID with that comment's numeric id. When used, any other parameters are ignored. review : REVIEW_ID : integer Retrieve only comments in the specified review. Can only be used if a review is not specified in the resource path. author : AUTHOR : integer or string Retrieve only comments authored by the specified user, identified by the user's unique numeric id or user name. comment_type : TYPE : - Retrieve only comments of the specified type. Valid values are: issue and note. state : STATE : - Retrieve only issues in the specified state. Valid values are: open, addressed and resolved. location_type : LOCATION : - Retrieve only comments in the specified type of location. Valid values are: general, commit-message and file-version. changeset : CHANGESET_ID : integer Retrieve only comments visible in the specified changeset. Can not be combined with the commit parameter. commit : COMMIT : integer or string Retrieve only comments visible in the specified commit, either in its commit message or in the commit's version of a file. Combine with the location_type parameter to select only one of those possibilities. Can not be combined with the changeset parameter.""" critic = parameters.critic reply = jsonapi.from_parameter("v1/replies", "with_reply", parameters) if reply: return reply.comment review = jsonapi.deduce("v1/reviews", parameters) author = jsonapi.from_parameter("v1/users", "author", parameters) comment_type_parameter = parameters.getQueryParameter("comment_type") if comment_type_parameter: if comment_type_parameter not in api.comment.Comment.TYPE_VALUES: raise jsonapi.UsageError("Invalid comment-type parameter: %r" % comment_type_parameter) comment_type = comment_type_parameter else: comment_type = None state_parameter = parameters.getQueryParameter("state") if state_parameter: if state_parameter not in api.comment.Issue.STATE_VALUES: raise jsonapi.UsageError( "Invalid state parameter: %r" % state_parameter) state = state_parameter else: state = None location_type_parameter = parameters.getQueryParameter("location_type") if location_type_parameter: if location_type_parameter not in api.comment.Location.TYPE_VALUES: raise jsonapi.UsageError("Invalid location-type parameter: %r" % location_type_parameter) location_type = location_type_parameter else: location_type = None changeset = jsonapi.deduce("v1/changesets", parameters) commit = jsonapi.deduce("v1/commits", parameters) if changeset and commit: raise jsonapi.UsageError( "Incompatible parameters: changeset and commit") return api.comment.fetchAll(critic, review=review, author=author, comment_type=comment_type, state=state, location_type=location_type, changeset=changeset, commit=commit) @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) path = parameters.subresource_path if value and path == ["replies"]: assert isinstance(value, api.comment.Comment) Comments.setAsContext(parameters, value) raise jsonapi.InternalRedirect("v1/replies") if value or values or path: raise jsonapi.UsageError("Invalid POST request") converted = jsonapi.convert( parameters, { "type": api.comment.Comment.TYPE_VALUES, "review!?": api.review.Review, "author?": api.user.User, "location?": { # Note: "general" not included here; |location| should be # omitted instead. "type": frozenset(["commit-message", "file-version"]), "first_line": int, "last_line": int, "commit?": api.commit.Commit, "file?": api.file.File, "changeset?": api.changeset.Changeset, "side?": frozenset(["old", "new"]), }, "text": str }, data) review = jsonapi.deduce("v1/reviews", parameters) if not review: if "review" not in converted: raise jsonapi.UsageError("No review specified") review = converted["review"] elif "review" in converted and review != converted["review"]: raise jsonapi.UsageError("Conflicting reviews specified") if "author" in converted: author = converted["author"] else: author = critic.actual_user if converted["type"] == "issue": expected_class = api.comment.Issue else: expected_class = api.comment.Note converted_location = converted.get("location") if converted_location: location_type = converted_location.pop("type") if location_type == "commit-message": required_fields = set(("first_line", "last_line", "commit")) optional_fields = set() else: required_fields = set(("first_line", "last_line", "file")) optional_fields = set(("commit", "changeset", "side")) accepted_fields = required_fields | optional_fields for required_field in required_fields: if required_field not in converted_location: raise jsonapi.InputError( "data.location.%s: missing attribute" % required_field) for actual_field in converted_location.keys(): if actual_field not in accepted_fields: raise jsonapi.InputError( "data.location.%s: unexpected attribute" % actual_field) if location_type == "commit-message": max_line = len( converted_location["commit"].message.splitlines()) else: if "commit" in converted_location: if "changeset" in converted_location: raise jsonapi.InputError( "data.location: only one of commit and changeset " "can be specified") changeset = None side = None commit = converted_location["commit"] elif "changeset" not in converted_location: raise jsonapi.InputError( "data.location: one of commit and changeset must be " "specified") elif "side" not in converted_location: raise jsonapi.InputError( "data.location.side: missing attribute (required when " "changeset is specified)") else: changeset = converted_location["changeset"] side = converted_location["side"] commit = None first_line = converted_location["first_line"] last_line = converted_location["last_line"] if location_type == "commit-message": location = api.comment.CommitMessageLocation.make( critic, first_line, last_line, converted_location["commit"]) else: location = api.comment.FileVersionLocation.make( critic, first_line, last_line, converted_location["file"], changeset, side, commit) else: location = None result = [] def collectComment(comment): assert isinstance(comment, expected_class), repr(comment) result.append(comment) with api.transaction.Transaction(critic) as transaction: transaction \ .modifyReview(review) \ .createComment( comment_type=converted["type"], author=author, text=converted["text"], location=location, callback=collectComment) assert len(result) == 1, repr(result) return result[0], None @staticmethod def update(parameters, value, values, data): critic = parameters.critic path = parameters.subresource_path if value: comments = [value] else: comments = values if path: raise jsonapi.UsageError("Invalid PUT request") converted = jsonapi.convert( parameters, { "text?": str, "draft_changes?": { "new_state?": frozenset(["open", "resolved"]), }, }, data) with api.transaction.Transaction(critic) as transaction: for comment in comments: modifier = transaction \ .modifyReview(comment.review) \ .modifyComment(comment) if "text" in converted: modifier.setText(converted["text"]) draft_changes = converted.get("draft_changes") if draft_changes: if draft_changes.get("new_state") == "resolved": modifier.resolveIssue() elif draft_changes.get("new_state") == "open": modifier.reopenIssue() return value, values @staticmethod def delete(parameters, value, values): critic = parameters.critic path = parameters.subresource_path if value: comments = [value] else: comments = values if path: raise jsonapi.UsageError("Invalid DELETE request") with api.transaction.Transaction(critic) as transaction: for comment in comments: transaction \ .modifyReview(comment.review) \ .modifyComment(comment) \ .delete() @staticmethod def deduce(parameters): comment = parameters.context.get("comments") comment_parameter = parameters.getQueryParameter("comment") if comment_parameter is not None: if comment is not None: raise jsonapi.UsageError( "Redundant query parameter: comment=%s" % comment_parameter) comment = api.comment.fetch( parameters.critic, comment_id=jsonapi.numeric_id(comment_parameter)) return comment @staticmethod def setAsContext(parameters, comment): parameters.setContext(Comments.name, comment) return comment ================================================ FILE: src/jsonapi/v1/commits.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import datetime import re import api import jsonapi @jsonapi.PrimaryResource class Commits(object): """Commits in the Git repositories.""" name = "commits" contexts = (None, "repositories", "changesets") value_class = api.commit.Commit exceptions = (api.commit.CommitError, api.repository.InvalidRef) @staticmethod def json(value, parameters): """Commit { "id": integer, // the commit's id "sha1": string, // the commit's SHA-1 sum "summary": string, // (processed) commit summary "message": string, // full / raw commit message "parents": [integer], // list of commit ids "author": { "name": string, // author (full)name "email": string, // author email "timestamp": float, // seconds since epoch }, "committer": { "name": string, // committer (full)name "email": string, // committer email "timestamp": float, // seconds since epoch }, }""" parents_ids = [parent.id for parent in value.parents] # Important: # # We're returning parents as integers instead of as api.commit.Commit # objects here, to disable expansion of them as linked objects. Not # doing this would typically lead to recursively dumping all commits in # a repository a lot of the time, which wouldn't generally be useful. # # Limited sets of commits are returned as api.commit.Commit objects from # other resources, like reviews, which does enable expansion of them as # linked objects, just not recursively. def userAndTimestamp(user_and_timestamp): timestamp = jsonapi.v1.timestamp(user_and_timestamp.timestamp) return { "name": user_and_timestamp.name, "email": user_and_timestamp.email, "timestamp": timestamp } return parameters.filtered( "branches", { "id": value.id, "sha1": value.sha1, "summary": value.summary, "message": value.message, "parents": parents_ids, "author": userAndTimestamp(value.author), "committer": userAndTimestamp(value.committer) }) @staticmethod def single(parameters, argument): """Retrieve one (or more) commits from a Git repository. COMMIT_ID : integer Retrieve a commit identified by its unique numeric id. repository : REPOSITORY : - Specify repository to access, identified by its unique numeric id or short-name. Required unless a repository is specified in the resource path.""" repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "Commit reference must have repository specified.") return Commits.setAsContext(parameters, api.commit.fetch( repository, commit_id=jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve a single commit identified by its SHA-1 sum. sha1 : COMMIT_SHA1 : string Retrieve a commit identified by its SHA-1 sum. The SHA-1 sum can be abbreviated, but must be at least 4 characters long, and must be unambigious in the repository. repository : REPOSITORY : - Specify repository to access, identified by its unique numeric id or short-name. Required unless a repository is specified in the resource path.""" sha1_parameter = parameters.getQueryParameter("sha1") if sha1_parameter is None: raise jsonapi.UsageError("Missing required SHA-1 parameter.") if not re.match("[0-9A-Fa-f]{4,40}$", sha1_parameter): raise jsonapi.UsageError( "Invalid SHA-1 parameter: %r" % sha1_parameter) repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "Commit reference must have repository specified.") return api.commit.fetch(repository, sha1=sha1_parameter) @staticmethod def deduce(parameters): commit = parameters.context.get(Commits.name) commit_parameter = parameters.getQueryParameter("commit") if commit_parameter is not None: if commit is not None: raise jsonapi.UsageError( "Redundant query parameter: commit=%s" % commit_parameter) commit = Commits.fromParameter(commit_parameter, parameters) return commit @staticmethod def fromParameter(value, parameters): repository = jsonapi.deduce("v1/repositories", parameters) commit_id, ref = jsonapi.id_or_name(value) return api.commit.fetch(repository, commit_id, ref=ref) @staticmethod def setAsContext(parameters, commit): parameters.setContext(Commits.name, commit) return commit ================================================ FILE: src/jsonapi/v1/documentation.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import itertools import jsonapi import page.utils def splitAndDeindentDocstring(item, level, default=None): if item.__doc__ is None: if default: return [default] return def inner(): lines = item.__doc__.splitlines() yield lines[0] indentation_level = level * 4 + 3 expected_indentation = " " * indentation_level for line in lines[1:]: assert not line or line.startswith(expected_indentation) yield line[indentation_level:] return list(inner()) def extractResourceSummary(resource_class): lines = list(splitAndDeindentDocstring( resource_class, level=1, default="Undocumented")) try: return " ".join(lines[:lines.index("")]) except ValueError: return " ".join(lines) def popParagraph(lines, include_empty=False): try: index = lines.index("") except ValueError: index = len(lines) paragraph = lines[:index] if len(paragraph) == 1 and " : " in paragraph[0]: return None if include_empty: paragraph.append("") del lines[:index + 1] return paragraph def copyParagraph(destination, source, as_definition=False, include_empty=None): if include_empty is None: include_empty = not as_definition paragraph = popParagraph(source, include_empty=False) if paragraph is None: paragraph = ["Undocumented!"] if as_definition: paragraph = (["= " + paragraph[0]] + [" " + line for line in paragraph[1:]]) if include_empty: paragraph.append("") destination.extend(paragraph) return True def describeVersion(): lines = ["Critic JSON API: Version 1", "==========================", "", "Path", "----", "/api/v1", "", "Top-level resources", "-------------------"] supported_resources = [] for path, resource_class in jsonapi.HANDLERS.items(): if path.startswith("v1/"): supported_resources.append( (path[3:], extractResourceSummary(resource_class))) supported_resources.sort() for path, summary in supported_resources: lines.extend(["? /api/v1/[%s][%s]" % (path, path), "= " + summary]) lines.append("") for path, _ in supported_resources: lines.append("[%s]: /api/v1?describe=%s" % (path, path)) raise page.utils.DisplayFormattedText(lines) def listAlternativePaths(resource_class): for context in getattr(resource_class, "contexts", (None,)): if context is None: yield resource_class.name continue for context_class in jsonapi.find(context): for context_path in listAlternativePaths(context_class): yield "%s/%s" % (context_path, resource_class.name) def describeResource(resource_path): resource_class = jsonapi.lookup("v1/" + resource_path) # foo/bar/fie -> foo/A/bar/B/fie path_with_arguments = resource_path.split("/") placeholders = ("A", "B", "C", "D", "E", "F") for index in reversed(range(1, len(path_with_arguments))): path_with_arguments.insert( index, "%s" % placeholders[index - 1]) path_with_arguments = "/".join(path_with_arguments) lines = [resource_class.name.capitalize(), "=" * len(resource_class.name), ""] lines.extend(["Description", "-----------", ""] + splitAndDeindentDocstring(resource_class, level=1) + [""]) def search_and_filter_parameters(description, path=path_with_arguments): nparameters = 0 while description: try: key, name, expected_type = description[0].split(" : ") except ValueError: break else: del description[0] if not nparameters: lines.extend(["Search/filter parameters", "------------------------"]) nparameters += 1 if expected_type == "-": expected_type = "" else: expected_type = " (%s)" % expected_type lines.append("? api/v1/%s?%s=%s%s" % (path, key, name, expected_type)) assert not description.pop(0) copyParagraph(lines, description, as_definition=True) if nparameters > 1: lines.extend( ["", "Note: Unless noted otherwise, search/filter parameters", " can be combined."]) if nparameters > 0: lines.append("") return True return False if resource_class.single: lines.extend(["Single-resource access", "----------------------", ""]) description = splitAndDeindentDocstring(resource_class.single, 2) if description is None: lines.append("Undocumented!") else: lines.append("? api/v1/%s/ARGUMENT[,ARGUMENT,...]" % path_with_arguments) copyParagraph( lines, description, as_definition=True, include_empty=True) first_variant = True while description: try: name, expected_type = description[0].split(" : ") except ValueError: break else: del description[0] if first_variant: lines.extend(["Resource argument", "-----------------"]) first_variant = False lines.append("? api/v1/%s/%s (%s)" % (path_with_arguments, name, expected_type)) assert not description.pop(0) copyParagraph(lines, description, as_definition=True) lines.append("") search_and_filter_parameters( description, path="%s/ARGUMENT" % path_with_arguments) lines.extend(description) lines.append("") if resource_class.multiple: lines.extend(["Multiple-resource access", "------------------------", ""]) description = splitAndDeindentDocstring(resource_class.multiple, 2) if description is None: lines.extend(["Undocumented!", ""]) else: lines.append("? api/v1/%s" % path_with_arguments) copyParagraph(lines, description, as_definition=True) lines.append("") while description and not search_and_filter_parameters(description): copyParagraph(lines, description) lines.extend(["Resource structure", "------------------", ""]) structure_lines = splitAndDeindentDocstring(resource_class.json, level=2) if structure_lines: def massage_line(line): if line.strip().startswith("// "): return " // " + line.strip()[3:] + "" line, _, description = line.partition(" // ") if description: line += " %s" % description return line lines.extend("|| " + massage_line(line) for line in structure_lines) lines.append("") else: lines.extend(["Undocumented!", ""]) alternative_paths = sorted( set(listAlternativePaths(resource_class)) - set([resource_path])) if alternative_paths: lines.extend(["Alternative paths", "-----------------", "", "This class of resources is also accessible as:", ""]) for alternative_path in alternative_paths: lines.extend(["* api/v1/[%s][%s]" % (alternative_path, alternative_path), ""]) for alternative_path in alternative_paths: lines.append("[%s]: /api/v1?describe=%s" % (alternative_path, alternative_path)) lines.append("") subresources = [] prefix = ".../" + resource_class.name + "/" for path, subresource_class in jsonapi.HANDLERS.items(): if path.startswith(prefix): subresources.append( (path[len(prefix):], extractResourceSummary(subresource_class))) if subresources: lines.extend(["Sub-resources", "-------------", ""]) for path, summary in sorted(subresources): lines.extend(["? /api/v1/%s/[%s][%s]" % (path_with_arguments, path, path), "= " + summary]) lines.append("") for path, _ in subresources: lines.append("[%s]: /api/v1?describe=%s/%s" % (path, resource_path, path)) raise page.utils.DisplayFormattedText(lines) ================================================ FILE: src/jsonapi/v1/extensions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Extensions(object): """Extensions.""" name = "extensions" contexts = (None, "users") value_class = api.extension.Extension exceptions = (api.extension.ExtensionError,) @staticmethod def json(value, parameters): """Extension { "id": integer, "name": string, "key": string, "publisher": integer or null, }""" data = { "id": value.id, "name": value.name, "key": value.key, "publisher": value.publisher } return parameters.filtered("extensions", data) @staticmethod def single(parameters, argument): """Retrieve one (or more) extensions by id. EXTENSION_ID : integer Retrieve an extension identified by its unique numeric id.""" value = api.extension.fetch(parameters.critic, jsonapi.numeric_id(argument)) if "users" in parameters.context: if value.publisher != parameters.context["users"]: raise InvalidExtensionId(jsonapi.numeric_id(argument)) return value @staticmethod def multiple(parameters): """Retrieve a single extension by key or all extensions. key : KEY : string Retrieve only the extension with the given key. This is equivalent to accessing /api/v1/extensions/EXTENSION_ID with that extension's numeric id. When used, other parameters are ignored. installed_by : INSTALLED_BY : integer or string Retrieve only extensions installed by the specified user. The user can be identified by numeric id or username.""" key_parameter = parameters.getQueryParameter("key") if key_parameter: return api.extension.fetch(parameters.critic, key=key_parameter) installed_by = jsonapi.from_parameter( "v1/users", "installed_by", parameters) return api.extension.fetchAll( parameters.critic, publisher=jsonapi.deduce("v1/users", parameters), installed_by=installed_by) ================================================ FILE: src/jsonapi/v1/filechanges.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import api import jsonapi @jsonapi.PrimaryResource class FileChanges(object): """File changes for a changeset""" name = "filechanges" contexts = (None, "repositories", "changesets") value_class = api.filechange.FileChange exceptions = (api.filechange.FileChangeError,) @staticmethod def json(value, parameters): """{ "id": integer, // the file's id "path": string, // the file's path "changeset": integer, // the changeset's id "old_sha1": string, // the sha1 identifying the file's old blob "old_mode": string, // the old file permissions "new_sha1": string, // the sha1 identifying the file's new blob "new_mode": string, // the new file permissions }""" return parameters.filtered( "filechanges", { "file": value.file, "changeset": value.changeset, "old_sha1": value.old_sha1, "old_mode": value.old_mode, "new_sha1": value.new_sha1, "new_mode": value.new_mode }) @staticmethod def single(parameters, argument): """Retrieve one (or more) filechanges (changed files). FILE_ID : integer Retrieve the changes to a file identified by its unique numeric id. changeset : CHANGESET : - Retrieve the changes from a changeset identified by its unique numeric id. reposititory : REPOSITORY : - The repository in which the files exist.""" changeset = jsonapi.deduce("v1/changesets", parameters) file = api.file.fetch(parameters.critic, jsonapi.numeric_id(argument)) return FileChanges.setAsContext( parameters, api.filechange.fetch( parameters.critic, changeset, file)) @staticmethod def multiple(parameters): """Retrieve all filechanges (changed files) from a changeset. changeset : CHANGESET : - Retrieve the changed from a changeset indentified by its unique numeric id. reposititory : REPOSITORY : - The repository in which the files exist.""" changeset = jsonapi.deduce("v1/changesets", parameters) return api.filechange.fetchAll(parameters.critic, changeset) @staticmethod def deduce(parameters): changeset = jsonapi.deduce("v1/changesets", parameters) if changeset is None: raise jsonapi.UsageError( "changeset needs to be specified, ex. &changeset=") filechange = parameters.context.get(FileChanges.name) filechange_parameter = parameters.getQueryParameter("filechange") if filechange_parameter is not None: if filechange is not None: raise jsonapi.UsageError( "Redundant query parameter: filechange=%s" % filechange_parameter) filechange_id = jsonapi.numeric_id(filechange_parameter) filechange = api.filechange.fetch( parameters.critic, changeset, filechange_id) return filechange @staticmethod def setAsContext(parameters, filechange): parameters.setContext(FileChanges.name, filechange) return filechange @staticmethod def resource_id(value): return value.file.id ================================================ FILE: src/jsonapi/v1/filecontents.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Filecontents(object): """Context lines for a file in a commit""" name = "filecontents" contexts = (None, "repositories") value_class = api.filecontent.Filecontent exceptions = (api.filecontent.FilecontentError,) @staticmethod def json(value, parameters): """TODO: add documentation""" def part_as_dict(part): dict_part = { "type": part.type, "content": part.content } return dict_part def line_as_dict(line): return { "parts": [part_as_dict(part) for part in line.parts], "offset": line.offset } first = parameters.getQueryParameter("first", int, ValueError) last = parameters.getQueryParameter("last", int, ValueError) dict_lines = [line_as_dict(line) for line in value.getLines(first, last)] return parameters.filtered( "filecontents", {"lines": dict_lines}) @staticmethod def multiple(parameters): """TODO: add documentation""" commit = jsonapi.deduce("v1/commits", parameters) if commit is None: raise jsonapi.UsageError( "commit must be specified, ex. &commit=") file_obj = jsonapi.deduce("v1/files", parameters) blob_sha1 = commit.getFileInformation(file_obj).sha1 return api.filecontent.fetch( parameters.critic, commit.repository, blob_sha1, file_obj) ================================================ FILE: src/jsonapi/v1/filediffs.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Filediffs(object): """Source code for a filechange""" name = "filediffs" contexts = (None, "changesets") value_class = api.filediff.Filediff exceptions = (api.filediff.FilediffError, api.filechange.FileChangeError) @staticmethod def json(value, parameters): """TODO: add documentation""" def part_as_dict(part): if not part.type and not part.state: return part.content dict_part = { "content": part.content } if part.type: dict_part["type"] = part.type if part.state: dict_part["state"] = part.state return dict_part def line_as_dict(line): dict_line = { "type": line.type_string, "old_offset": line.old_offset, "new_offset": line.new_offset, } dict_line["content"] = [part_as_dict(part) for part in line.content] return dict_line def chunk_as_dict(chunk): return { "content": [line_as_dict(line) for line in chunk.lines], "old_offset": chunk.old_offset, "old_count": chunk.old_count, "new_offset": chunk.new_offset, "new_count": chunk.new_count } context_lines = parameters.getQueryParameter( "context_lines", int, ValueError) if context_lines is not None: if context_lines < 0: raise jsonapi.UsageError( "Negative number of context lines not supported") else: # TODO: load this from the user's config (or make it mandatory and # let the client handle config loading). context_lines = 3 comment = jsonapi.deduce("v1/comments", parameters) if comment is not None: comments = [comment] ignore_chunks = True else: review = jsonapi.deduce("v1/reviews", parameters) if review is not None: comments = api.comment.fetchAll( parameters.critic, review=review, changeset=value.filechange.changeset) else: comments = None ignore_chunks = False macro_chunks = value.getMacroChunks( context_lines, comments, ignore_chunks) dict_chunks = [chunk_as_dict(chunk) for chunk in macro_chunks] return parameters.filtered( "filediffs", { "file": value.filechange, "changeset": value.filechange.changeset, "macro_chunks": dict_chunks, "old_count": value.old_count, "new_count": value.new_count }) @staticmethod def single(parameters, argument): """TODO: add documentation""" changeset = jsonapi.deduce("v1/changesets", parameters) if changeset is None: raise jsonapi.UsageError( "changeset needs to be specified, ex. &changeset=") repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "repository needs to be specified, " "ex. &repository=") file = api.file.fetch(parameters.critic, jsonapi.numeric_id(argument)) filechange = api.filechange.fetch(parameters.critic, changeset, file) return api.filediff.fetch(parameters.critic, filechange) @staticmethod def multiple(parameters): """TODO: add documentation""" changeset = jsonapi.deduce("v1/changesets", parameters) if changeset is None: raise jsonapi.UsageError( "changeset needs to be specified, ex. &changeset=") repository = jsonapi.deduce("v1/repositories", parameters) if repository is None: raise jsonapi.UsageError( "repository needs to be specified, " "ex. &repository=") return api.filediff.fetchAll(parameters.critic, changeset) @staticmethod def resource_id(value): return value.filechange.file.id ================================================ FILE: src/jsonapi/v1/files.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Files(object): """Files (path <=> id mappings) in the system.""" name = "files" value_class = api.file.File exceptions = api.file.FileError @staticmethod def json(value, parameters): """{ "id": integer, "path": string }""" return parameters.filtered( "files", { "id": value.id, "path": value.path }) @staticmethod def single(parameters, argument): """Retrieve one (or more) files. FILE_ID : integer Retrieve a file identified by its unique numeric id.""" return api.file.fetch( parameters.critic, file_id=jsonapi.numeric_id(argument)) @staticmethod def multiple(parameters): """Retrieve a file by its path. path : PATH : string Retrieve the file with the specified path. (Required)""" path = parameters.getQueryParameter("path") if path is None: raise UsageError("No path parameter specified") return api.file.fetch(parameters.critic, path=path) @staticmethod def fromParameter(value, parameters): file_id, path = jsonapi.id_or_name(value) return api.file.fetch(parameters.critic, file_id, path=path) @staticmethod def deduce(parameters): file_obj = parameters.context.get((Files.name)) file_parameter = parameters.getQueryParameter("file") if file_parameter is not None: if file_obj is not None: raise jsonapi.UsageError( "Redundant query parameter: file=%s" % file_parameter) file_id = Files.fromParameter(file_parameter, parameters) file_obj = api.file.fetch(parameters.critic, file_id) return file_obj @staticmethod def setAsContext(parameters, file_obj): parameters.setContext(Files.name, file_obj) return file_obj ================================================ FILE: src/jsonapi/v1/labeledaccesscontrolprofiles.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import api import jsonapi api_module = api.labeledaccesscontrolprofile @jsonapi.PrimaryResource class LabeledAccessControlProfiles(object): """The labeled access control profile selectorss of this system.""" name = "labeledaccesscontrolprofiles" contexts = (None, "accesscontrolprofiles") value_class = api_module.LabeledAccessControlProfile exceptions = api_module.LabeledAccessControlProfileError @staticmethod def json(value, parameters, linked): """LabeledAccessControlProfile { "labels": [string], "profile": integer }""" # Make sure that only administrator users can access. api.PermissionDenied.raiseUnlessAdministrator(parameters.critic) return parameters.filtered("labeledaccesscontrolprofiles", { "labels": value.labels, "profile": value.profile.id, }) @staticmethod def single(parameters, argument): """Retrieve one (or more) access control profiles. LABELS : string Retrieve an access control profile identified by the profile selectors's set of labels. Separate labels with pipe ('|') characters.""" return api.accesscontrolprofile.fetch( parameters.critic, labels=argument.split("|")) @staticmethod def multiple(parameters): """Retrieve all labeled access control profile selectors in the system. profile : PROFILE_ID : integer Include only selectors selecting the given profile, identified by its unique numeric id.""" profile = jsonapi.deduce("v1/accesscontrolprofiles", parameters) return api.labeledaccesscontrolprofile.fetchAll( parameters.critic, profile=profile) @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) if parameters.subresource_path: raise jsonapi.UsageError("Invalid POST request") # Create a labeled access control profile selector. assert not (value or values) converted = jsonapi.convert(parameters, { "labels": [str], "profile": api.accesscontrolprofile.AccessControlProfile }, data) result = [] def collectLabeledAccessControlProfile(labeled_profile): assert isinstance( labeled_profile, api.labeledaccesscontrolprofile.LabeledAccessControlProfile) result.append(labeled_profile) with api.transaction.Transaction(critic) as transaction: transaction.createLabeledAccessControlProfile( converted["labels"], converted["profile"], callback=collectLabeledAccessControlProfile) assert len(result) == 1 return result[0], None ================================================ FILE: src/jsonapi/v1/rebases.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Rebases(object): """The review rebases in this system.""" name = "rebases" contexts = (None, "reviews") value_class = (api.log.rebase.MoveRebase, api.log.rebase.HistoryRewrite) exceptions = api.log.rebase.RebaseError @staticmethod def json(value, parameters): """{ "id": integer, "review": integer, "creator": integer, "type": "history-rewrite" or "move" "old_head": integer, "new_head": integer, // if |type| is "move": "old_upstream": integer, "new_upstream": integer, "equivalent_merge": integer or null, "replayed_rebase": integer or null, }""" old_head = value.old_head new_head = value.new_head data = { "id": value.id, "review": value.review, "creator": value.creator, "old_head": old_head, "new_head": new_head } if isinstance(value, api.log.rebase.HistoryRewrite): data.update({ "type": "history-rewrite" }) else: data.update({ "type": "move", "old_upstream": value.old_upstream, "new_upstream": value.new_upstream, "equivalent_merge": value.equivalent_merge, "replayed_rebase": value.replayed_rebase }) return parameters.filtered("rebases", data) @staticmethod def single(parameters, argument): """Retrieve one (or more) rebases in this system. REBASE_ID : integer Retrieve a rebase identified by its unique numeric id.""" return Rebases.setAsContext(parameters, api.log.rebase.fetch( parameters.critic, rebase_id=jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve all rebases in this system. review : REVIEW_ID : - Include only rebases of one review, identified by the review's unique numeric id.""" review = jsonapi.deduce("v1/reviews", parameters) return api.log.rebase.fetchAll(parameters.critic, review=review) @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = critic.actual_user converted = jsonapi.convert( parameters, { "new_upstream?": str, "history_rewrite?": bool }, data) new_upstream = converted.get("new_upstream") history_rewrite = converted.get("history_rewrite") if (new_upstream is None) == (history_rewrite is None): raise jsonapi.UsageError( "Exactly one of the arguments new_upstream and history_rewrite " "must be specified.") if history_rewrite == False: raise jsonapi.UsageError( "history_rewrite must be true, or omitted.") review = jsonapi.deduce("v1/reviews", parameters) if review is None: raise jsonapi.UsageError( "review must be specified when preparing a rebase") if history_rewrite is not None: expected_type = api.log.rebase.HistoryRewrite else: expected_type = api.log.rebase.MoveRebase result = [] def collectRebase(rebase): assert isinstance(rebase, expected_type), repr(rebase) result.append(rebase) with api.transaction.Transaction(critic) as transaction: transaction \ .modifyReview(review) \ .prepareRebase( user, new_upstream, history_rewrite, callback=collectRebase) assert len(result) == 1, repr(result) return result[0], None @staticmethod def delete(parameters, value, values): critic = parameters.critic if value is None: raise jsonapi.UsageError( "Only one rebase can currently be deleted per request") rebase = value with api.transaction.Transaction(critic) as transaction: transaction \ .modifyReview(rebase.review) \ .cancelRebase(rebase) @staticmethod def setAsContext(parameters, rebase): parameters.setContext(Rebases.name, rebase) # Also set the rebase's review (and repository and branch) as context. jsonapi.v1.reviews.Reviews.setAsContext(parameters, rebase.review) return rebase ================================================ FILE: src/jsonapi/v1/replies.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Replies(object): """Replies to comments in reviews.""" name = "replies" contexts = (None, "comments") value_class = api.reply.Reply exceptions = (api.comment.CommentError, api.reply.ReplyError) @staticmethod def json(value, parameters): """{ "id": integer, "is_draft": boolean, "author": integer, "timestamp": float, "text": string, }""" timestamp = jsonapi.v1.timestamp(value.timestamp) return parameters.filtered( "replies", { "id": value.id, "is_draft": value.is_draft, "author": value.author, "timestamp": timestamp, "text": value.text }) @staticmethod def single(parameters, argument): """Retrieve one (or more) replies to comments. REPLY_ID : integer Retrieve a reply identified by its unique numeric id.""" reply = api.reply.fetch( parameters.critic, reply_id=jsonapi.numeric_id(argument)) comment = jsonapi.deduce("v1/comments", parameters) if comment and comment != reply.comment: raise jsonapi.PathError( "Reply does not belong to specified comment") return reply @staticmethod def multiple(parameters): """Retrieve replies to a comment. comment : COMMENT_ID : integer Retrieve all replies to the specified comment.""" comment = jsonapi.deduce("v1/comments", parameters) if not comment: raise jsonapi.UsageError("A comment must be identified.") return comment.replies @staticmethod def create(parameters, value, values, data): critic = parameters.critic user = parameters.context.get("users", critic.actual_user) if value or values: raise jsonapi.UsageError("Invalid POST request") converted = jsonapi.convert( parameters, { "comment?": api.comment.Comment, "author?": api.user.User, "text": str }, data) comment = jsonapi.deduce("v1/comments", parameters) if not comment: if "comment" not in converted: raise jsonapi.UsageError("No comment specified") comment = converted["comment"] elif "comment" in converted and comment != converted["comment"]: raise jsonapi.UsageError("Conflicting comments specified") if "author" in converted: author = converted["author"] else: author = critic.actual_user if not converted["text"].strip(): raise jsonapi.UsageError("Empty reply") result = [] def collectReply(reply): assert isinstance(reply, api.reply.Reply) result.append(reply) with api.transaction.Transaction(critic) as transaction: transaction \ .modifyReview(comment.review) \ .modifyComment(comment) \ .addReply( author=author, text=converted["text"], callback=collectReply) assert len(result) == 1 return result[0], None @staticmethod def update(parameters, value, values, data): critic = parameters.critic path = parameters.subresource_path if value: replies = [value] else: replies = values if path: raise jsonapi.UsageError("Invalid PUT request") converted = jsonapi.convert( parameters, { "text": str }, data) with api.transaction.Transaction(critic) as transaction: for reply in replies: transaction \ .modifyReview(reply.comment.review) \ .modifyComment(reply.comment) \ .modifyReply(reply) \ .setText(converted["text"]) return value, values @staticmethod def delete(parameters, value, values): critic = parameters.critic path = parameters.subresource_path if value: replies = [value] else: replies = values if path: raise jsonapi.UsageError("Invalid DELETE request") with api.transaction.Transaction(critic) as transaction: for reply in replies: transaction \ .modifyReview(reply.comment.review) \ .modifyComment(reply.comment) \ .modifyReply(reply) \ .delete() @staticmethod def fromParameter(value, parameters): return api.reply.fetch(parameters.critic, jsonapi.numeric_id(value)) ================================================ FILE: src/jsonapi/v1/repositories.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi def from_argument(parameters, argument): repository_id, name = jsonapi.id_or_name(argument) return api.repository.fetch( parameters.critic, repository_id=repository_id, name=name) @jsonapi.PrimaryResource class Repositories(object): """The Git repositories on this system.""" name = "repositories" value_class = api.repository.Repository exceptions = (api.repository.RepositoryError,) @staticmethod def json(value, parameters): """Repository { "id": integer, // the repository's id "name": string, // the repository's (unique) short name "path": string, // absolute file-system path "relative_path": string, // relative file-system path "url": string, // the repository's URL }""" return parameters.filtered( "repositories", { "id": value.id, "name": value.name, "path": value.path, "relative_path": value.relative_path, "url": value.url }) @staticmethod def single(parameters, argument): """Retrieve one (or more) repositories on this system. REPOSITORY_ID : integer Retrieve a repository identified by its unique numeric id.""" return Repositories.setAsContext(parameters, api.repository.fetch( parameters.critic, repository_id=jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve a single named repository or all repositories on this system. name : SHORT_NAME : string Retrieve a repository identified by its unique short-name. This is equivalent to accessing /api/v1/repositories/REPOSITORY_ID with that repository's numeric id. When used, any other parameters are ignored. filter : highlighted : - If specified, retrieve only "highlighted" repositories. These are repositories that are deemed of particular interest for the signed-in user. (If no user is signed in, no repositories are highlighted.)""" name_parameter = parameters.getQueryParameter("name") if name_parameter: return api.repository.fetch(parameters.critic, name=name_parameter) filter_parameter = parameters.getQueryParameter("filter") if filter_parameter is not None: if filter_parameter == "highlighted": repositories = api.repository.fetchHighlighted( parameters.critic) else: raise jsonapi.UsageError( "Invalid repository filter parameter: %r" % filter_parameter) else: repositories = api.repository.fetchAll(parameters.critic) return repositories @staticmethod def deduce(parameters): repository = parameters.context.get("repositories") repository_parameter = parameters.getQueryParameter("repository") if repository_parameter is not None: if repository is not None: raise jsonapi.UsageError( "Redundant query parameter: repository=%s" % repository_parameter) repository = from_argument(parameters, repository_parameter) if repository is not None: return repository review = jsonapi.deduce("v1/reviews", parameters) if review is not None: return review.repository @staticmethod def fromParameter(value, parameters): repository_id, name = jsonapi.id_or_name(value) return api.repository.fetch(parameters.critic, repository_id, name=name) @staticmethod def setAsContext(parameters, repository): parameters.setContext(Repositories.name, repository) return repository ================================================ FILE: src/jsonapi/v1/reviewablefilechanges.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class ReviewableFileChanges(object): """Reviewable file changes""" name = "reviewablefilechanges" contexts = (None, "reviews", "changesets") value_class = api.reviewablefilechange.ReviewableFileChange exceptions = api.reviewablefilechange.ReviewableFileChangeError @staticmethod def json(value, parameters): """{ "id": integer, // the object's unique id "review": integer, "changeset": integer, "file": integer, "deleted_lines": integer, "inserted_lines": integer, "is_reviewed": boolean, "reviewed_by": integer, "assigned_reviewers": integer[], "draft_changes": DraftChanges or null, } DraftChanges { "author": integer, // author of draft changes "new_is_reviewed": boolean, "new_reviewed_by": integer, }""" draft_changes = value.draft_changes if draft_changes: draft_changes_json = { "author": draft_changes.author, "new_is_reviewed": draft_changes.new_is_reviewed, "new_reviewed_by": draft_changes.new_reviewed_by, } else: draft_changes_json = None return parameters.filtered( "reviewablefilechanges", { "id": value.id, "review": value.review, "changeset": value.changeset, "file": value.file, "deleted_lines": value.deleted_lines, "inserted_lines": value.inserted_lines, "is_reviewed": value.is_reviewed, "reviewed_by": value.reviewed_by, "assigned_reviewers": jsonapi.sorted_by_id( value.assigned_reviewers), "draft_changes": draft_changes_json, }) @staticmethod def single(parameters, argument): """Retrieve one (or more) reviewable file change. FILECHANGE_ID : integer Retrieve the reviewable changes to a file in a commit identified by its unique numeric id.""" return api.reviewablefilechange.fetch( parameters.critic, jsonapi.numeric_id(argument)) @staticmethod def multiple(parameters): """Retrieve all reviewable file changes in a review. review : REVIEW_ID : - Retrieve the reviewable changes in the specified review. changeset : CHANGESET_ID : - Retrieve the reviewable changes in the specified changeset. file : FILE : - Retrieve the reviewable changes in the specified file only. assignee : USER : - Retrieve reviewable changes assigned to the specified user only. state : STATE : "pending" or "reviewed" Retrieve reviewable changes in the specified state only.""" review = jsonapi.deduce("v1/reviews", parameters) changeset = jsonapi.deduce("v1/changesets", parameters) if not review: raise jsonapi.UsageError("Missing required parameter: review") file = jsonapi.from_parameter("v1/files", "file", parameters) assignee = jsonapi.from_parameter("v1/users", "assignee", parameters) state_parameter = parameters.getQueryParameter("state") if state_parameter is None: is_reviewed = None else: if state_parameter not in ("pending", "reviewed"): raise jsonapi.UsageError( "Invalid parameter value: state=%r " "(value values are 'pending' and 'reviewed')" % state_parameter) is_reviewed = state_parameter == "reviewed" return api.reviewablefilechange.fetchAll( parameters.critic, review, changeset, file, assignee, is_reviewed) @staticmethod def update(parameters, value, values, data): critic = parameters.critic filechanges = [value] if value else values reviews = set(filechange.review for filechange in filechanges) if len(reviews) > 1: raise jsonapi.UsageError("Multiple reviews updated") review = reviews.pop() converted = jsonapi.convert( parameters, { "draft_changes": { "new_is_reviewed": bool, }, }, data) is_reviewed = converted["draft_changes"]["new_is_reviewed"] with api.transaction.Transaction(critic) as transaction: modifier = transaction \ .modifyReview(review) if is_reviewed: for filechange in filechanges: modifier.markChangeAsReviewed(filechange) else: for filechange in filechanges: modifier.markChangeAsPending(filechange) ================================================ FILE: src/jsonapi/v1/reviews.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class Reviews(object): """The reviews in this system.""" name = "reviews" value_class = api.review.Review exceptions = (api.review.InvalidReviewId, api.repository.RepositoryError) lists = ("issues", "notes") @staticmethod def json(value, parameters): """Review { "id": integer, "state": string, "summary": string, "description": string or null, "repository": integer, "branch": integer, "owners": integer[], "active_reviewers": integer[], "assigned_reviewers": integer[], "watchers": integer[], "partitions": Partition[], "issues": integer[], "notes": integer[], "pending_rebase": integer or null, "progress": float, "progress_per_commit": CommitChangeCount[], } Partition { "commits": integer[], "rebase": integer or null, } CommitChangeCount { "commit_id": integer, "total_changes": integer, "reviewed_changes": integer, }""" def change_counts_as_dict(change_counts): return [{ "commit_id": change_count.commit_id, "total_changes": change_count.total_changes, "reviewed_changes": change_count.reviewed_changes, } for change_count in change_counts] partitions = [] def add_partition(partition): if partition.following: partition_rebase = partition.following.rebase else: partition_rebase = None partitions.append({ "commits": list(partition.commits.topo_ordered), "rebase": partition_rebase }) if partition.following: add_partition(partition.following.partition) add_partition(value.first_partition) return parameters.filtered( "reviews", { "id": value.id, "state": value.state, "summary": value.summary, "description": value.description, "repository": value.repository, "branch": value.branch, "owners": jsonapi.sorted_by_id(value.owners), "active_reviewers": jsonapi.sorted_by_id( value.active_reviewers), "assigned_reviewers": jsonapi.sorted_by_id( value.assigned_reviewers), "watchers": jsonapi.sorted_by_id(value.watchers), "partitions": partitions, "issues": jsonapi.sorted_by_id(value.issues), "notes": jsonapi.sorted_by_id(value.notes), "pending_rebase": value.pending_rebase, "progress": value.total_progress, "progress_per_commit": change_counts_as_dict(value.progress_per_commit)}) @staticmethod def single(parameters, argument): """Retrieve one (or more) reviews in this system. REVIEW_ID : integer Retrieve a review identified by its unique numeric id.""" return Reviews.setAsContext(parameters, api.review.fetch( parameters.critic, review_id=jsonapi.numeric_id(argument))) @staticmethod def multiple(parameters): """Retrieve all reviews in this system. repository : REPOSITORY : - Include only reviews in one repository, identified by the repository's unique numeric id or short-name. state : STATE[,STATE,...] : - Include only reviews in the specified state. Valid values are: open, closed, dropped.""" repository = jsonapi.deduce("v1/repositories", parameters) state_parameter = parameters.getQueryParameter("state") if state_parameter: state = set(state_parameter.split(",")) invalid = state - api.review.Review.STATE_VALUES if invalid: raise jsonapi.UsageError( "Invalid review state values: %s" % ", ".join(map(repr, sorted(invalid)))) else: state = None return api.review.fetchAll( parameters.critic, repository=repository, state=state) @staticmethod def create(parameters, value, values, data): critic = parameters.critic path = parameters.subresource_path review = value if review: if path == ["issues"] or path == ["notes"]: Reviews.setAsContext(parameters, review) if path == ["issues"]: comment_type = "issue" else: comment_type = "note" jsonapi.ensure(data, "type", comment_type) raise jsonapi.InternalRedirect("v1/comments") raise jsonapi.UsageError("Review creation not yet supported") @staticmethod def deduce(parameters): review = parameters.context.get("reviews") review_parameter = parameters.getQueryParameter("review") if review_parameter is not None: if review is not None: raise jsonapi.UsageError( "Redundant query parameter: review=%s" % review_parameter) review = api.review.fetch( parameters.critic, review_id=jsonapi.numeric_id(review_parameter)) return review @staticmethod def setAsContext(parameters, review): parameters.setContext(Reviews.name, review) # Also set the review's repository and branch as context. jsonapi.v1.repositories.Repositories.setAsContext( parameters, review.repository) jsonapi.v1.branches.Branches.setAsContext(parameters, review.branch) return review ================================================ FILE: src/jsonapi/v1/reviewsummaries.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2017 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi @jsonapi.PrimaryResource class ReviewSummaries(object): """Review summaries""" name = "reviewsummaries" value_class = api.reviewsummary.ReviewSummaryContainer exceptions = (api.reviewsummary.ReviewSummaryError,) @staticmethod def json(value, parameters): """ReviewSummaries { "reviews": ReviewSummary[], "more": bool // true if there are more reviews than the ones retrieved } ReviewSummary { "review": integer, "summary": string, // the review's summary (text) "latest_change": integer, // the timestamp of the latest commit or comment "progress": float, // reviewing progress as a number between 0 and 1 "issues": integer, // the number of open issues in the review }""" def review_summary_as_dict(review_summary): return { "review": review_summary.review, "summary": review_summary.review.summary, "latest_change": review_summary.latest_change, "progress": review_summary.review.total_progress, "issues": len(review_summary.review.open_issues)} return parameters.filtered( "review summaries", { "reviews": [review_summary_as_dict(review_summary) for review_summary in value.reviews], "more": value.more}) @staticmethod def multiple(parameters): """Retrieve review summaries.""" countParameter = parameters.getQueryParameter("count") offsetParameter = parameters.getQueryParameter("offset") count = int(countParameter) if countParameter is not None else None offset = int(offsetParameter) if offsetParameter is not None else None if count < 1 and count is not None: jsonapi.InputError("count parameter must be bigger than 0") if offset < 0 and offset is not None: jsonapi.InputError("offset can't be less than 0") search_type = parameters.getQueryParameter("type") user = parameters.critic.actual_user if user is None and search_type != "all": raise jsonapi.PermissionDenied( "You do not have the rights to access this resource") if search_type not in api.reviewsummary.ReviewSummary.TYPE_VALUES: raise jsonapi.UsageError( "Review summary type parameter must be specified and set to " "one of: " + \ ", ".join(api.reviewsummary.ReviewSummary.TYPE_VALUES)) return api.reviewsummary.fetchMany(parameters.critic, search_type, user, count, offset) ================================================ FILE: src/jsonapi/v1/sessions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2016 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import api import jsonapi import auth class Session(object): def __init__(self, user, session_type): self.user = user self.session_type = session_type class SessionError(jsonapi.Error): http_status = 403 title = "Session error" @jsonapi.PrimaryResource class Sessions(object): """The session of the accessing client.""" name = "sessions" value_class = Session anonymous_create = True @staticmethod def json(value, parameters): """Session { "user": integer, // the signed in user's id, or null "type": "normal" or "accesstoken", or null, "fields": [ { "identifier": string, // unique field identifier "label": string, // UI label "hidden": boolean, // true for passwords "description": string or null }, ... ] }""" fields = [] for db_field in auth.DATABASE.getFields(): hidden, identifier, label = db_field[:3] if len(db_field) == 4: description = db_field[3] else: description = None fields.append({ "identifier": identifier, "label": label, "hidden": hidden, "description": description }) return parameters.filtered( "sessions", { "user": value.user, "type": value.session_type, "fields": fields }) @staticmethod def single(parameters, argument): """Retrieve the current session. CURRENT : "current" Retrieve the current session.""" if argument != "current": raise jsonapi.UsageError('Resource argument must be "current"') user = parameters.critic.actual_user if parameters.critic.access_token: session_type = "accesstoken" elif user: session_type = "normal" else: session_type = None return Session(user, session_type) @staticmethod def create(parameters, value, values, data): fields = auth.DATABASE.getFields() converted = jsonapi.convert(parameters, { fieldname: str for hidden, fieldname, label in fields }, data) critic = parameters.critic try: auth.DATABASE.authenticate(critic.database, converted) except auth.AuthenticationFailed as error: raise SessionError(error.message) except auth.WrongPassword: raise SessionError("Wrong password") auth.createSessionId( critic.database, parameters.req, critic.database.user) return Session(critic.actual_user, "normal"), None @staticmethod def delete(parameters, value, values): critic = parameters.critic auth.deleteSessionId( critic.database, parameters.req, critic.database.user) ================================================ FILE: src/jsonapi/v1/users.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import api import jsonapi @jsonapi.PrimaryResource class Users(object): """The users of this system.""" name = "users" value_class = api.user.User exceptions = (api.user.UserError,) @staticmethod def json(value, parameters): """User { "id": integer, // the user's id "name": string, // the user's unique user name "fullname": string, // the user's full name "status": string, // the user's status: "current", "absent" or "retired" "email": string, // the user's primary email address }""" return parameters.filtered( "users", { "id": value.id, "name": value.name, "fullname": value.fullname, "status": value.status, "email": value.email }) @staticmethod def single(parameters, argument): """Retrieve one (or more) users of this system. USER_ID : integer or "me" Retrieve a user identified by the user's unique numeric id, or the identifier "me" to retrieve the current user.""" if argument == "me": user = parameters.critic.actual_user if user is None: raise api.user.UserError("'users/me' (not signed in)") else: user = api.user.fetch(parameters.critic, user_id=jsonapi.numeric_id(argument)) return Users.setAsContext(parameters, user) @staticmethod def multiple(parameters): """Retrieve a single named user or all users of this system. name : NAME : string Retrieve only the user with the given name. This is equivalent to accessing /api/v1/users/USER_ID with that user's numeric id. When used, any other parameters are ignored. status : USER_STATUS[,USER_STATUS,...] : string Include only users whose status is one of the specified. Valid values are: current, absent, retired. sort : SORT_KEY : string Sort the returned users by the specified key. Valid values are: id, name, fullname, email.""" name_parameter = parameters.getQueryParameter("name") if name_parameter: return api.user.fetch(parameters.critic, name=name_parameter) status_parameter = parameters.getQueryParameter("status") if status_parameter: status = set(status_parameter.split(",")) invalid = status - api.user.User.STATUS_VALUES if invalid: raise jsonapi.UsageError( "Invalid user status values: %s" % ", ".join(map(repr, sorted(invalid)))) else: status = None sort_parameter = parameters.getQueryParameter("sort") if sort_parameter: if sort_parameter not in ("id", "name", "fullname", "email"): raise jsonapi.UsageError("Invalid user sort parameter: %r" % sort_parameter) sort_key = lambda user: getattr(user, sort_parameter) else: sort_key = lambda user: user.id return sorted(api.user.fetchAll(parameters.critic, status=status), key=sort_key) @staticmethod def update(parameters, value, values, data): if values and len(values) != 1: raise UsageError("Updating multiple users not supported") critic = parameters.critic if values: value = values[0] converted = jsonapi.convert(parameters, { "fullname?": str }, data) with api.transaction.Transaction(critic) as transaction: if "fullname" in converted: new_fullname = converted["fullname"].strip() if not new_fullname: raise jsonapi.InputError("Empty new fullname") transaction.modifyUser(value).setFullname(new_fullname) @staticmethod def deduce(parameters): user = parameters.context.get("users") user_parameter = parameters.getQueryParameter("user") if user_parameter is not None: if user is not None: raise jsonapi.UsageError( "Redundant query parameter: user=%s" % user_parameter) user = Users.fromParameter(user_parameter, parameters) return user @staticmethod def fromParameter(value, parameters): user_id, name = jsonapi.id_or_name(value) return api.user.fetch(parameters.critic, user_id=user_id, name=name) @staticmethod def setAsContext(parameters, user): parameters.setContext(Users.name, user) return user @jsonapi.PrimaryResource class Emails(object): """A user's primary email addresses. A "primary" email address is one that Critic would send emails to. A user can have multiple primary email addresses registered, but at most one of them can be selected. Emails are only sent to a selected primary email address. A user also has a set of "Git" email addresses. Those are only compared against Git commit meta-data, and are never used when sending emails.""" name = "emails" contexts = ("users",) value_class = api.user.User.PrimaryEmail @staticmethod def json(value, parameters): """Email { "address": string, // the email address "selected": string, // true if address is selected "verified": string, // true if address is verified }""" return parameters.filtered( "emails", { "address": value.address, "selected": value.selected, "verified": value.verified }) @staticmethod def single(parameters, argument): """A primary email address by index. INDEX : integer Retrieve a primary email address identified by its index.""" emails = list(parameters.context["users"].primary_emails) try: return emails[jsonapi.numeric_id(argument) - 1] except IndexError: raise jsonapi.PathError("List index out of range") @staticmethod def multiple(parameters): """All primary email addresses.""" return parameters.context["users"].primary_emails @jsonapi.PrimaryResource class Filters(object): """A user's repository filters.""" name = "filters" contexts = ("users",) value_class = api.filters.RepositoryFilter exceptions = (api.repository.RepositoryError, KeyError) lists = frozenset(("delegates",)) @staticmethod def json(value, parameters): """Filter { "id": integer, // the filter's id "type": string, // "reviewer", "watcher" or "ignored" "path": string, // the filtered path "repository": integer, // the filter's repository's id "delegates": integer[], // list of user ids }""" return parameters.filtered("filters", { "id": value.id, "type": value.type, "path": value.path, "repository": value.repository, "delegates": jsonapi.sorted_by_id(value.delegates) }) @staticmethod def single(parameters, argument): """Retrieve one (or more) of a user's repository filters. FILTER_ID : integer Retrieve a filter identified by the filters's unique numeric id.""" user = parameters.context["users"] filter_id = jsonapi.numeric_id(argument) for repository_filters in user.repository_filters.values(): for repository_filter in repository_filters: if repository_filter.id == filter_id: return repository_filter raise KeyError("invalid filter id: %d" % filter_id) @staticmethod def multiple(parameters): """All repository filters. repository : REPOSITORY : - Include only filters for the specified repository, identified by its unique numeric id or short-name.""" user = parameters.context["users"] repository = jsonapi.deduce("v1/repositories", parameters) if repository: repository_filters = user.repository_filters.get( repository, []) else: repository_filters = itertools.chain( *user.repository_filters.values()) return jsonapi.sorted_by_id(repository_filters) @staticmethod def create(parameters, value, values, data): import reviewing.filters class FilterPath(jsonapi.check.StringChecker): def check(self, context, value): path = reviewing.filters.sanitizePath(value) try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: return error.message def convert(self, context, value): return reviewing.filters.sanitizePath(value) critic = parameters.critic subject = parameters.context["users"] if parameters.subresource_path: if value: repository_filters = [value] else: repository_filters = values assert parameters.subresource_path[0] == "delegates" assert len(parameters.subresource_path) == 1 converted = jsonapi.convert(parameters, api.user.User, data) with api.transaction.Transaction(critic) as transaction: for repository_filter in repository_filters: delegates = set(repository_filter.delegates) if converted not in delegates: delegates.add(converted) transaction \ .modifyUser(subject) \ .modifyFilter(repository_filter) \ .setDelegates(delegates) return value, values converted = jsonapi.convert( parameters, { "type": set(("reviewer", "watcher", "ignore")), "path": FilterPath, "repository": api.repository.Repository, "delegates?": [api.user.User] }, data) result = [] def collectFilter(repository_filter): assert isinstance(repository_filter, api.filters.RepositoryFilter) result.append(repository_filter) with api.transaction.Transaction(critic, result) as transaction: transaction \ .modifyUser(subject) \ .createFilter( filter_type=converted["type"], repository=converted["repository"], path=converted["path"], delegates=converted.get("delegates", []), callback=collectFilter) assert len(result) == 1 return result[0], None @staticmethod def update(parameters, value, values, data): critic = parameters.critic if parameters.subresource_path: assert parameters.subresource_path[0] == "delegates" if len(parameters.subresource_path) == 2: raise jsonapi.UsageError("can't update specific delegate") delegates = jsonapi.convert( parameters, [api.user.User], data) else: converted = jsonapi.convert( parameters, { "delegates?": [api.user.User] }, data) delegates = converted.get("delegates") if value: repository_filters = [value] else: repository_filters = values with api.transaction.Transaction(critic) as transaction: for repository_filter in repository_filters: if delegates is not None: transaction \ .modifyUser(repository_filter.subject) \ .modifyFilter(repository_filter) \ .setDelegates(delegates) return value, values @staticmethod def delete(parameters, value, values): critic = parameters.critic if parameters.subresource_path: assert value and not values assert parameters.subresource_path[0] == "delegates" repository_filter = value delegates = jsonapi.sorted_by_id(repository_filter.delegates) if len(parameters.subresource_path) == 1: # Delete all delegates. delegates = [] else: del delegates[parameters.subresource_path[1] - 1] with api.transaction.Transaction(critic) as transaction: transaction \ .modifyUser(repository_filter.subject) \ .modifyFilter(repository_filter) \ .setDelegates(delegates) # Remove the last component from the sub-resource path, since we've # just deleted the specified sub-resource(s). del parameters.subresource_path[-1] return value, values if value: repository_filters = [value] else: repository_filters = values with api.transaction.Transaction(critic) as transaction: for repository_filter in repository_filters: transaction \ .modifyUser(repository_filter.subject) \ .modifyFilter(repository_filter) \ .delete() ================================================ FILE: src/library/js/v8/critic-batch.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var batch_internals = {}; var batch_id_counter = 0; function CriticBatch(data) { var self = this; var review_id, review; var user_id, user; var chain_id, chain; var issues; var notes; var replies; var internal_id = batch_id_counter++; var internals = batch_internals[internal_id] = {}; Object.defineProperty(this, "__id__", { value: internal_id }); function getReview() { if (!review) review = new CriticReview(review_id); return review; } function getUser() { if (!user) user = new CriticUser(user_id); return user; } function getCommentChain() { if (chain === void 0) { if (chain_id === null) chain = null; else chain = new CriticCommentChain(chain_id, { batch: self, review: review }); } return chain; } function getIssues() { if (!issues) { issues = []; var result = db.execute(("SELECT id, review, batch, uid, time, type, state, origin, file, " + "first_commit, last_commit, closed_by, addressed_by " + "FROM commentchains " + "WHERE batch=%d " + "AND type='issue' " + "ORDER BY time ASC"), self.id); for (var index = 0; index < result.length; ++index) issues.push(new CriticCommentChain(result[index], { batch: self })); Object.freeze(issues); } return issues; } function getNotes() { if (!notes) { notes = []; var result = db.execute(("SELECT id, review, batch, uid, time, type, state, origin, file, " + "first_commit, last_commit, closed_by, addressed_by " + "FROM commentchains " + "WHERE batch=%d " + "AND type='note' " + "ORDER BY time ASC"), self.id); for (var index = 0; index < result.length; ++index) notes.push(new CriticCommentChain(result[index], { batch: self })); Object.freeze(notes); } return notes; } function getReplies() { if (!replies) { replies = []; var result = db.execute(("SELECT id, chain, uid, time, state, comment " + "FROM comments " + "WHERE batch=%d"), self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; replies.push(new CriticComment(row.chain, self.id, row.id, row.uid, row.time, row.state, row.comment, { batch: self })); } Object.freeze(replies); } return replies; } if (typeof data.id == "number") { this.id = data.id; var result = db.execute("SELECT review, uid, comment, time FROM batches WHERE id=%d", this.id)[0]; if (!result) throw CriticError(format("%d: invalid batch ID", this.id)); this.time = result.time; review_id = result.review; review = data.review || null; user_id = result.uid; user = data.user || null; chain_id = result.comment; chain = data.comment || null; Object.defineProperties(this, { review: { get: getReview, enumerable: true }, user: { get: getUser, enumerable: true }, commentChain: { get: getCommentChain, enumerable: true }, issues: { get: getIssues, enumerable: true }, notes: { get: getNotes, enumerable: true }, replies: { get: getReplies, enumerable: true }}); } else if (data.internals === batch_internals) { this.id = null; this.review = data.review; this.user = data.user; internals.filter_user_ids = {}; internals.filter_operations = []; internals.comment_operations = []; internals.assignments = { fileCount: 0 }; internals.added_filters = []; internals.removed_filters = []; internals.replied_to_chains = {}; internals.modified_issues = {}; internals.review_created = data.review_created; } else throw CriticError("invalid argument"); Object.freeze(this); } function commitFromFileVersion(file_version) { if (file_version instanceof CriticChangesetFileVersion) if (file_version == file_version.file.oldVersion) return file_version.file.changeset.parent; else return file_version.file.changeset.child; else return file_version.commit; } function createCommentChain(text, data, type) { data = data || {}; text = text && String(text); if (!(this instanceof CriticBatch)) throw CriticError("invalid this object; expected batch object"); if (!text) throw CriticError("invalid use: can't add empty comment"); var users = {}; function addUser(user) { users[user.id] = user; } function insertUsers(chain_id, comment_id, mark_comment_as_read) { for (var user_id in users) { db.execute("INSERT INTO commentchainusers (chain, uid) VALUES (%d, %d)", chain_id, user_id); if (!mark_comment_as_read) db.execute("INSERT INTO commentstoread (uid, comment) VALUES (%d, %d)", user_id, comment_id); } } addUser(this.user); this.review.owners.forEach(addUser); var operations = batch_internals[this.__id__].comment_operations; if (data.fileVersion) { if (!(data.fileVersion instanceof CriticFileVersion)) throw CriticError("data.fileVersion: invalid argument; expected file version object"); if (typeof data.lineIndex != "number") throw CriticError("data.lineIndex: invalid argument; expected number"); if (typeof data.lineCount != "number") throw CriticError("data.lineCount: invalid argument; expected number"); var file_version = data.fileVersion; var cli_command = { name: "propagate-comment", data: { review_id: this.review.id, commit_id: commitFromFileVersion(file_version).id, file_id: file_version.file.id, first_line: data.lineIndex + 1, last_line: data.lineIndex + data.lineCount } }; var cli_result = JSON.parse(executeCLI([cli_command])[0]); if (typeof cli_result == "string") throw CriticError(format("comment propagation failed: %s", cli_result)); var state, addressed_by; if (cli_result.status == "clean") { state = "open"; addressed_by = null; } else if (data.allowInitiallyAddressed) { state = 'addressed'; addressed_by = cli_result.addressed_by; } else { var addressed_by_commit = this.review.repository.getCommit(cli_result.addressed_by); throw CriticError(format("cannot raise issue; lines are modified by a later commit: %s", addressed_by_commit.sha1)); } var lines = cli_result.lines; var origin, parent, child; if (file_version instanceof CriticChangesetFileVersion) { origin = file_version == file_version.file.oldVersion ? "old" : "new"; parent = file_version.file.changeset.parent; child = file_version.file.changeset.child; } else { origin = "new"; parent = child = file_version.commit; } operations.push(function (batch_id, data) { var chain_id = db.execute("INSERT INTO commentchains (review, batch, uid, type, state, origin, file, first_commit, last_commit) VALUES (%d, %d, %d, %s, %s, %s, %d, %d, %d) RETURNING id", this.review.id, batch_id, this.user.id, type, state, origin, file_version.id, parent.id, child.id)[0].id; if (addressed_by) db.execute("UPDATE commentchains SET addressed_by=%d WHERE id=%d", addressed_by, chain_id); var comment_id = db.execute("INSERT INTO comments (chain, batch, uid, state, comment) VALUES (%d, %d, %d, 'current', %s) RETURNING id", chain_id, batch_id, this.user.id, text)[0].id; db.execute("UPDATE commentchains SET first_comment=%d WHERE id=%d", comment_id, chain_id); for (var index = 0; index < lines.length; ++index) { var line = lines[index]; db.execute("INSERT INTO commentchainlines (chain, uid, state, sha1, first_line, last_line) VALUES (%d, %d, 'current', %s, %d, %d)", chain_id, this.user.id, line[0], line[1], line[2]); } insertUsers(chain_id, comment_id, data.silent); }); } else if (data.commit) { if (typeof data.lineIndex != "number") throw CriticError("data.lineIndex: invalid argument; expected number"); if (typeof data.lineCount != "number") throw CriticError("data.lineCount: invalid argument; expected number"); var lines = data.commit.message.trim().split("\n"); if (data.lineIndex >= lines.length || data.lineIndex + data.lineCount > lines.length || data.lineCount == 0) throw CriticError("data.lineIndex/data.lineCount: out of range or invalid"); operations.push(function (batch_id, data) { var chain_id = db.execute("INSERT INTO commentchains (review, batch, uid, type, state, first_commit, last_commit) VALUES (%d, %d, %d, %s, 'open', %d, %d) RETURNING id", this.review.id, batch_id, this.user.id, type, data.commit.id, data.commit.id)[0].id; var comment_id = db.execute("INSERT INTO comments (chain, batch, uid, state, comment) VALUES (%d, %d, %d, 'current', %s) RETURNING id", chain_id, batch_id, this.user.id, text)[0].id; db.execute("UPDATE commentchains SET first_comment=%d WHERE id=%d", comment_id, chain_id); db.execute("INSERT INTO commentchainlines (chain, uid, state, commit, sha1, first_line, last_line) VALUES (%d, %d, 'current', %d, %s, %d, %d)", chain_id, this.user.id, data.commit.id, data.commit.sha1, data.lineIndex, data.lineIndex + data.lineCount - 1); insertUsers(chain_id, comment_id, data.silent); }); } else operations.push(function (batch_id, data) { var chain_id = db.execute("INSERT INTO commentchains (review, batch, uid, type, state) VALUES (%d, %d, %d, %s, 'open') RETURNING id", this.review.id, batch_id, this.user.id, type)[0].id; var comment_id = db.execute("INSERT INTO comments (chain, batch, uid, state, comment) VALUES (%d, %d, %d, 'current', %s) RETURNING id", chain_id, batch_id, this.user.id, text)[0].id; db.execute("UPDATE commentchains SET first_comment=%d WHERE id=%d", comment_id, chain_id); insertUsers(chain_id, comment_id, data.silent); }); } CriticBatch.prototype.raiseIssue = function (text, data) { createCommentChain.call(this, text, data, "issue"); }; CriticBatch.prototype.writeNote = function (text, data) { createCommentChain.call(this, text, data, "note"); }; CriticBatch.prototype.addReply = function (chain, text) { var internals = batch_internals[this.__id__]; var operations = internals.comment_operations; text = text && String(text); if (!(chain instanceof CriticCommentChain)) throw CriticError("invalid chain argument; expected CommentChain object"); if (chain.review.id != this.review.id) throw CriticError("invalid use; chain belongs to a different review"); if (!text) throw CriticError("invalid use: can't add empty comment"); if (chain.id in internals.replied_to_chains) throw CriticError("can't add two replies to a comment chain in one batch"); internals.replied_to_chains[chain.id] = true; operations.push(function (batch_id, data) { var comment_id = db.execute("INSERT INTO comments (chain, batch, uid, state, comment) VALUES (%d, %d, %d, 'current', %s) RETURNING id", chain.id, batch_id, this.user.id, text)[0].id; if (!data.silent) for (var user_id in chain.users) if (user_id != this.user.id) db.execute("INSERT INTO commentstoread (uid, comment) VALUES (%d, %d)", user_id, comment_id); if (!db.execute("SELECT 1 FROM commentchainusers WHERE chain=%d AND uid=%d", chain.id, this.user.id)[0]) db.execute("INSERT INTO commentchainusers (chain, uid) VALUES (%d, %d)", chain.id, this.user.id); }); }; CriticBatch.prototype.resolveIssue = function (chain) { var internals = batch_internals[this.__id__]; var operations = internals.comment_operations; if (!(chain instanceof CriticCommentChain)) throw CriticError("invalid chain argument; expected CommentChain object"); if (chain.review.id != this.review.id) throw CriticError("invalid use; chain belongs to a different review"); if (chain.state != CriticCommentChain.STATE_OPEN) throw CriticError("can't resolve issue; already addressed or resolved"); if (chain.id in internals.modified_issues) throw CriticError("can't modify the state of an issue more than once in a single batch"); internals.modified_issues[chain.id] = true; operations.push(function (batch_id) { db.execute("UPDATE commentchains SET state='closed', closed_by=%d WHERE id=%d", this.user.id, chain.id); db.execute("INSERT INTO commentchainchanges (batch, uid, chain, state, from_state, to_state) VALUES (%d, %d, %d, 'performed', %s, %s)", batch_id, this.user.id, chain.id, 'open', 'closed'); if (!db.execute("SELECT 1 FROM commentchainusers WHERE chain=%d AND uid=%d", chain.id, this.user.id)[0]) db.execute("INSERT INTO commentchainusers (chain, uid) VALUES (%d, %d)", chain.id, this.user.id); }); }; CriticBatch.prototype.reopenIssue = function (chain, data) { var internals = batch_internals[this.__id__]; var operations = internals.comment_operations; if (!(chain instanceof CriticCommentChain)) throw CriticError("invalid chain argument; expected CommentChain object"); if (chain.review.id != this.review.id) throw CriticError("invalid use; chain belongs to a different review"); if (chain.type != CriticCommentChain.TYPE_ISSUE) throw CriticError("invalid use: cannot reopen notes"); if (chain.id in internals.modified_issues) throw CriticError("can't modify the state of an issue more than once in a single batch"); internals.modified_issues[chain.id] = true; var current_state; switch (chain.state) { case CriticCommentChain.STATE_ADDRESSED: current_state = "addressed"; break; case CriticCommentChain.STATE_RESOLVED: current_state = "closed"; break; default: writeln(chain.state); throw CriticError("can't ropen issue; not addressed or resolved"); } var lines; if (chain.file) { if (chain.state == CriticCommentChain.STATE_ADDRESSED || data && data.fileVersion) { if (!data || !data.fileVersion || !(data.fileVersion instanceof CriticFileVersion)) throw CriticError("data.fileVersion: invalid argument; expected file version object"); if (typeof data.lineIndex != "number") throw CriticError("data.lineIndex: invalid argument; expected number"); if (typeof data.lineCount != "number") throw CriticError("data.lineCount: invalid argument; expected number"); var file_version = data.fileVersion; var cli_command = { name: "propagate-comment", data: { review_id: this.review.id, chain_id: chain.id, commit_id: commitFromFileVersion(file_version).id, file_id: file_version.file.id, first_line: data.lineIndex + 1, last_line: data.lineIndex + data.lineCount } }; var cli_result = JSON.parse(executeCLI([cli_command])[0]); if (typeof cli_result == "string") throw CriticError(format("comment propagation failed: %s", cli_result)); if (cli_result.status == "modified") { var addressed_by_commit = this.review.repository.getCommit(cli_result.addressed_by); throw CriticError(format("cannot reopen issue; lines are modified by a later commit: %s", addressed_by_commit.sha1)); } lines = cli_result.lines; } } else lines = null; operations.push(function (batch_id) { db.execute("UPDATE commentchains SET state='open', closed_by=null, addressed_by=null WHERE id=%d", chain.id); db.execute("INSERT INTO commentchainchanges (batch, uid, chain, state, from_state, to_state) VALUES (%d, %d, %d, 'performed', %s, %s)", batch_id, this.user.id, chain.id, current_state, 'open'); if (lines) for (var index = 0; index < lines.length; ++index) { var line = lines[index]; db.execute("INSERT INTO commentchainlines (chain, uid, state, sha1, first_line, last_line) VALUES (%d, %d, 'current', %s, %d, %d)", chain.id, this.user.id, line[0], line[1], line[2]); } }); }; CriticBatch.prototype.markIssueAddressedBy = function (chain, commit) { var internals = batch_internals[this.__id__]; var operations = internals.comment_operations; if (!(chain instanceof CriticCommentChain)) throw CriticError("invalid chain argument; expected CommentChain object"); if (chain.review.id != this.review.id) throw CriticError("invalid use; chain belongs to a different review"); if (!(commit instanceof CriticCommit)) throw CriticError("invalid commit argument; expected Commit object"); if (!(commit.sha1 in this.review.commits)) throw CriticError("invalid use; issues can only be addressed by commits in the review"); if (chain.state != CriticCommentChain.STATE_OPEN) throw CriticError("can't address issue; already addressed or resolved"); if (chain.id in internals.modified_issues) throw CriticError("can't modify the state of an issue more than once in a single batch"); internals.modified_issues[chain.id] = true; operations.push(function (batch_id) { db.execute("UPDATE commentchains SET state='addressed', closed_by=%d, addressed_by=%d WHERE id=%d", this.user.id, commit.id, chain.id); db.execute("INSERT INTO commentchainchanges (batch, uid, chain, state, from_state, to_state) VALUES (%d, %d, %d, 'performed', %s, %s)", batch_id, this.user.id, chain.id, 'open', 'addressed'); if (!db.execute("SELECT 1 FROM commentchainusers WHERE chain=%d AND uid=%d", chain.id, this.user.id)[0]) db.execute("INSERT INTO commentchainusers (chain, uid) VALUES (%d, %d)", chain.id, this.user.id); }); }; /* function changeReviewFileStatus(what, new_state) { if (what instanceof CriticChangeset) { if (what.review != this.review) throw CriticError("invalid changeset; must associated with the batch's review"); result = db.execute("SELECT id, file, deleted, inserted FROM reviewfiles WHERE review=%d AND changeset=%d AND state!=%s", this.review.id, what.id, new_state); } else if (what instanceof CriticChangesetFile) { if (what.changeset.review != this.review) throw CriticError("invalid file; must be part of a changeset associated with the batch's review"); result = db.execute("SELECT id, file, deleted, inserted FROM reviewfiles WHERE review=%d AND changeset=%d AND file=%d AND state!=%s", this.review.id, what.changeset.id, what.id, new_state); } else { result = []; for (var index = 0; index < what.length; ++index) { var rows = db.execute("SELECT reviewfiles.id, file, deleted, inserted FROM reviewfiles JOIN changesets ON (changeset=changesets.id) WHERE review=%d AND child=%d AND state!=%s", this.review.id, what[index].id, new_state); for (var row_index = 0; row_index < rows.length; ++row_index) result.push(rows[row_index]); } } } */ function changeAssignments(user, what, assigned) { var result; if (what instanceof CriticChangeset) { if (what.review != this.review) throw CriticError("invalid changeset; must associated with the batch's review"); result = db.execute("SELECT id, file, deleted, inserted FROM reviewfiles WHERE review=%d AND changeset=%d", this.review.id, what.id); } else if (what instanceof CriticChangesetFile) { if (what.changeset.review != this.review) throw CriticError("invalid file; must be part of a changeset associated with the batch's review"); result = db.execute("SELECT id, file, deleted, inserted FROM reviewfiles WHERE review=%d AND changeset=%d AND file=%d", this.review.id, what.changeset.id, what.id); } else { result = []; for (var index = 0; index < what.length; ++index) { var rows = db.execute("SELECT reviewfiles.id, file, deleted, inserted FROM reviewfiles JOIN changesets ON (changeset=changesets.id) WHERE review=%d AND child=%d", this.review.id, what[index].id); for (var row_index = 0; row_index < rows.length; ++row_index) result.push(rows[row_index]); } } var assignments = batch_internals[this.__id__].assignments; var files = assignments[user.id]; if (!files) { files = assignments[user.id] = {}; Object.defineProperty(files, "fileCount", { value: 0, writable: true }); } for (var index = 0; index < result.length; ++index) { var row = result[index]; var file = files[row.file]; if (!file) file = files[row.file] = { assignedFiles: {}, assignedDeleteCount: 0, assignedInsertCount: 0, unassignedFiles: {}, unassignedDeleteCount: 0, unassignedInsertCount: 0 }; if (assigned) { if (row.id in file.unassignedFiles) { delete file.unassignedFiles[row.id]; file.unassignedDeleteCount -= row.deleted; file.unassignedInsertCount -= row.inserted; } else { ++assignments.fileCount; ++files.fileCount; } file.assignedFiles[row.id] = true; file.assignedDeleteCount += row.deleted; file.assignedInsertCount += row.inserted; } else { if (row.id in file.assignedFiles) { delete file.assignedFiles[row.id]; file.assignedDeleteCount -= row.deleted; file.assignedInsertCount -= row.inserted; } else { ++assignments.fileCount; ++files.fileCount; } file.unassignedFiles[row.id] = true; file.unassignedDeleteCount += row.deleted; file.unassignedInsertCount += row.inserted; } } } CriticBatch.prototype.assignChanges = function (user, what) { changeAssignments.call(this, user, what, true); }; CriticBatch.prototype.unassignChanges = function (user, what) { changeAssignments.call(this, user, what, false); }; CriticBatch.prototype.addReviewFilter = function (user, type, path) { if (!(this instanceof CriticBatch)) throw CriticError("invalid this object; expected batch object"); type = String(type); path = String(path); if (!(user instanceof CriticUser)) throw CriticError("invalid user argument; expected User object"); if (type != "reviewer" && type != "watcher" && type != "ignored") throw CriticError("invalid type argument; expected 'reviewer', 'watcher' or 'ignored'"); if (/[^\/]\*\*|\*\*[^\/]/.test(path)) throw CriticError("invalid wildcards in path argument"); var internals = batch_internals[this.__id__]; var user_ids = internals.filter_user_ids; var operations = internals.filter_operations; var added_filters = internals.added_filters; var removed_filters = internals.removed_filters; for (var index = 0; index < removed_filters.length; ++index) { var removed_filter = removed_filters[index]; if (removed_filter.uid == user.id && removed_filter.path == path) throw CriticError("can't add filter; identical or conflicting filter removed in this batch"); } var result = db.execute("SELECT 1 FROM reviewfilters WHERE review=%d AND uid=%d AND path=%s", this.review.id, user.id, path); if (result.length != 0) throw CriticError("can't add filter; identical or conflicting filter already exists"); for (var index = 0; index < added_filters.length; ++index) { var added_filter = added_filters[index]; if (added_filter.uid == user.id && added_filter.path == path) throw CriticError("can't add filter; identical or conflicting filter added in this batch"); } user_ids[user.id] = user; added_filters.push({ uid: user.id, path: path, type: type, delegate: null }); operations.push(function (transaction_id) { db.execute("INSERT INTO reviewfilters (review, uid, path, type, creator) VALUES (%d, %d, %s, %s, %d)", this.review.id, user.id, path, type, this.user.id); db.execute("INSERT INTO reviewfilterchanges (transaction, uid, path, type, created) VALUES (%d, %d, %s, %s, true)", transaction_id, user.id, path, type); }); }; CriticBatch.prototype.removeReviewFilter = function (user, type, path) { if (!(this instanceof CriticBatch)) throw CriticError("invalid this object; expected batch object"); type = String(type); path = String(path); if (!(user instanceof CriticUser)) throw CriticError("invalid user argument; expected User object"); if (type != "reviewer" && type != "watcher" && type != "ignored") throw CriticError("invalid type argument; expected 'reviewer', 'watcher' or 'ignored'"); var internals = batch_internals[this.__id__]; var user_ids = internals.filter_user_ids; var operations = internals.filter_operations; var added_filters = internals.added_filters; var removed_filters = internals.removed_filters; for (var index = 0; index < added_filters.length; ++index) { var added_filter = added_filters[index]; if (added_filter.uid == user.id && added_filter.path == path) throw CriticError("can't remove filter; identical or conflicting filter added in this batch"); } var result = db.execute("SELECT 1 FROM reviewfilters WHERE review=%d AND uid=%d AND path=%s AND type=%s", this.review.id, user.id, path, type)[0]; if (!result) throw CriticError("can't remove filter; no such filter exists"); for (var index = 0; index < removed_filters.length; ++index) { var removed_filter = removed_filters[index]; if (removed_filter.uid == user.id && removed_filter.path == path && removed_filter.type == type) /* Already being removed; ignore this call. */ return; } user_ids[user.id] = user; removed_filters.push({ uid: user.id, path: path, type: type, delegate: null }); operations.push( function (transaction_id) { db.execute("DELETE FROM reviewfilters WHERE review=%d AND uid=%d AND path=%s", this.review.id, user.id, path); db.execute("INSERT INTO reviewfilterchanges (transaction, uid, type, created) VALUES (%d, %d, %s, %s, false)", transaction_id, user.id, path, type); }); }; function getReviewMessageId(review, to_user) { var result = db.execute("SELECT messageid, hostname FROM reviewmessageids WHERE review=%d AND uid=%d", review.id, to_user.id)[0]; if (result) return format("<%s@%s>", result.messageid, result.hostname.trim()); else return null; } function getCommentMessageId(comment, to_user) { var result = db.execute("SELECT messageid, hostname FROM commentmessageids WHERE comment=%d AND uid=%d", comment.id, to_user.id)[0]; if (result) return format("<%s@%s>", result.messageid, result.hostname.trim()); else return null; } CriticBatch.prototype.finish = function (data) { if (!(this instanceof CriticBatch)) throw CriticError("invalid this object; expected batch object"); /* We commit at the end of the function. To ensure we don't commit anything we didn't mean to, roll back the current transaction first. (Normally, there wouldn't be anything in the current transaction to commit, but better safe than sorry. */ db.rollback(); try { data = data || {}; var text = data.text && String(data.text); var internals = batch_internals[this.__id__]; var filter_operations = internals.filter_operations; var filter_user_ids = internals.filter_user_ids; var added_filters = internals.added_filters; var removed_filters = internals.added_filters; var comment_operations = internals.comment_operations; var assignments = internals.assignments; var review_id = this.review.id; var batch_id = null; var transaction_id = null; var progress_before = this.review.progress; if (comment_operations.length) batch_id = db.execute("INSERT INTO batches (review, uid) VALUES (%d, %d) RETURNING id", this.review.id, this.user.id)[0].id; if (filter_operations.length || assignments.fileCount) transaction_id = db.execute("INSERT INTO reviewassignmentstransactions (review, assigner) VALUES (%d, %d) RETURNING ID", this.review.id, this.user.id)[0].id; if (filter_operations.length) { var filters_before = new CriticFilters({ review: this.review }); var filters_after = new CriticFilters({ review: this.review, added_review_filters: added_filters, removed_review_filters: removed_filters }); for (var index = 0; index < filter_operations.length; ++index) { var followup = filter_operations[index].call(this, transaction_id); if (followup) filter_operations.push(followup); } var commits = this.review.commits; var changesets = []; for (var cindex = 0; cindex < commits.length; ++cindex) { var changeset = this.review.getChangeset(commits[cindex]); if (changeset instanceof CriticMergeChangeset) for (var index = 0; index < changeset.changesets.length; ++index) changesets.push(changeset.changesets[index]); else changesets.push(changeset); } for (var user_id in filter_user_ids) { var user = filter_user_ids[user_id]; for (var csindex = 0; csindex < changesets.length; ++csindex) { var changeset = changesets[csindex]; var files = changeset.files; for (var findex = 0; findex < files.length; ++findex) { var file = files[findex]; var reviewer_before = filters_before.isReviewer(user.id, file.id); var reviewer_after = filters_after.isReviewer(user.id, file.id); if (!reviewer_before && reviewer_after) { if (!user.isAuthor(changeset.child)) this.assignChanges(user, file); } else if (reviewer_before && !reviewer_after) this.unassignChanges(user, file); } } } } if (assignments.fileCount) for (var user_id in assignments) { var files = assignments[user_id], values; for (var file_id in files) { var file = files[file_id]; var assignedFiles = file.assignedFiles; var unassignedFiles = file.unassignedFiles; values = Object.keys(assignedFiles); if (values.length) { var result = db.execute(format("SELECT id, deleted, inserted FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE review=%d AND uid=%d", review_id, user_id)); for (var index = 0; index < result.length; ++index) { var row = result[index]; if (assignedFiles[row.id]) { delete assignedFiles[row.id]; file.assignedDeleteCount -= row.deleted; file.assignedInsertCount -= row.inserted; } } } values = Object.keys(assignedFiles).map(function (file_id) { return format("(%d,%d)", file_id, user_id); }); if (values.length) { if (db.execute("SELECT 1 FROM reviewusers WHERE review=%d AND uid=%d", review_id, user_id).length == 0) db.execute("INSERT INTO reviewusers (review, uid) VALUES (%d, %d)", review_id, user_id); db.execute("INSERT INTO reviewuserfiles (file, uid) VALUES " + values); db.execute("INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) VALUES " + Object.keys(assignedFiles).map(function (file_id) { return format("(%d,%d,%d,true)", transaction_id, file_id, user_id); })); } file.assignedFileCount = values.length; values = Object.keys(unassignedFiles); if (values.length) { var result = db.execute(format("SELECT COUNT(*) AS count, SUM(deleted) AS deleted, SUM(inserted) AS inserted FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE review=%%d AND uid=%%d AND id IN (%s)", values), review_id, user_id)[0]; db.execute(format("DELETE FROM reviewuserfiles WHERE file IN (%s) AND uid=%d", values, user_id)); db.execute("INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) VALUES " + Object.keys(unassignedFiles).map(function (file_id) { return format("(%d,%d,%d,false)", transaction_id, file_id, user_id); })); file.unassignedFileCount = result.count; file.unassignedDeleteCount = result.deleteCount; file.unassignedInsertCount = result.insertCount; } } } if (comment_operations.length) for (var index = 0; index < comment_operations.length; ++index) comment_operations[index].call(this, batch_id, data); db.execute("UPDATE reviews SET serial=serial+1 WHERE id=%d", this.review.id); var updated_review = new CriticReview(this.review.id); var progress_after = updated_review.progress; db.commit(); if (!data.silent) { var commands = []; if (batch_id !== null) { commands.push({ name: "generate-mails-for-batch", data: { batch_id: batch_id, was_accepted: progress_before.accepted, is_accepted: progress_after.accepted } }); } if (transaction_id !== null) { commands.push({ name: "generate-mails-for-assignments-transaction", data: { transaction_id: transaction_id } }); } var lines = executeCLI(commands); if (lines) { try { for (var index = 0; index < lines.length; ++index) JSON.parse(lines[index]).forEach(sendMail); } catch (error) { throw new Error(format("%r", lines)); } var pid = parseInt(IO.File.read(maildelivery_pid_path).decode().trim()); OS.Process.kill(pid, 1); } } } finally { /* If anything fails we don't want to have done anything at all to the database, so roll the transaction back. If we did finish, we just committed the transaction, in which case we aren't in a transaction right no, and the rollback() call is a no-op. */ db.rollback(); } }; ================================================ FILE: src/library/js/v8/critic-branch.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticBranch(options) { var repository, branch_id, name, result; if ("id" in options) { branch_id = options.id; result = db.execute("SELECT repository, name, head, base FROM branches WHERE id=%d", branch_id)[0]; if (!result) throw CriticError(format("%d: invalid branch ID", branch_id)); repository = options.repository || new CriticRepository(result.repository); name = result.name; } else if ("repository" in options && "name" in options) { repository = options.repository; name = options.name; result = db.execute("SELECT id, head, base FROM branches WHERE repository=%d AND name=%s", repository.id, name)[0]; if (!result) throw CriticError(format("%s: no such branch", name)); branch_id = result.id; } else throw CriticError("invalid argument; must specify either 'id' or 'repository'+'name'"); var self = this; var head_id = result.head, head = options.head || null; var base_id = result.base, base = options.base || null; var review = options.review || void 0; var commits; result = null; function getHead() { if (!head) head = self.repository.getCommit(head_id); return head; } function getBase() { if (!base) if (!base_id) return null; else base = new CriticBranch({ id: base_id, repository: self.repository }); return base; } function getReview() { if (review === void 0) { var result = db.execute("SELECT id FROM reviews WHERE branch=%d", self.id)[0]; if (result) review = new CriticReview(result.id); else review = null; } return review; } function getCommits() { if (!commits) { var result = db.execute("SELECT commit FROM reachable WHERE branch=%d LIMIT %d", branch_id, configuration.maxCommits + 1); if (result.length > configuration.maxCommits) throw CriticError(format("implementation limit; branch contains more than %d commits", configuration.maxCommits)); var all_commits = []; for (var index = 0; index < result.length; ++index) all_commits.push(repository.getCommit(result[index].commit)); commits = new CriticCommitSet(all_commits); } return commits; } this.repository = repository; this.id = branch_id; this.name = name; Object.defineProperties(this, { head: { get: getHead, enumerable: true }, base: { get: getBase, enumerable: true }, review: { get: getReview, enumerable: true }, commits: { get: getCommits, enumerable: true }}); Object.freeze(this); } Object.defineProperties(CriticBranch.prototype, { getWorkCopy: { writable: true, value: function () { return new CriticRepositoryWorkCopy(this.repository, this.name); } }, getCheckBranch: { writable: true, value: function (upstream) { return new CriticCheckBranch(this, upstream); } } }); function CriticCheckBranch(branch, upstream) { this.branch = branch; this.upstream = upstream; } Object.defineProperties(CriticCheckBranch.prototype, { addNote: { writable: true, value: function (commit, data) { var sha1, review_id, note, user; if (commit instanceof CriticCommit) sha1 = commit.sha1; else throw CriticError("invalid commit argument; expected critic.Commit object"); if (data.review) { if (data.review instanceof CriticReview) review_id = data.review.id; else throw CriticError("invalid data.review argument; expected critic.Review object"); } else review_id = null; if (data.note) note = String(note); else note = null; if (data.user) user = data.user; else user = global.user; db.execute("INSERT INTO checkbranchnotes (repository, branch, upstream, sha1, uid, review, text) VALUES (%d, %s, %s, %s, %d, %d, %s)", this.branch.repository.id, this.branch.name, this.upstream, sha1, user.id, review_id, note); } } }); ================================================ FILE: src/library/js/v8/critic-changeset.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var CriticChangesetLineConstants = { TYPE_CONTEXT: 0, TYPE_WHITESPACE: 1, TYPE_REPLACED: 2, TYPE_MODIFIED: 3, TYPE_DELETED: 4, TYPE_INSERTED: 5, OPERATION_REPLACE: 0, OPERATION_DELETE: 1, OPERATION_INSERT: 2 }; function CriticChangesetLine(type, old_index, old_lines, new_index, new_lines, operations) { this.type = type; this.oldIndex = old_index; this.oldText = old_lines && old_lines[old_index]; this.newIndex = new_index; this.newText = new_lines && new_lines[new_index]; operations = operations || null; var self = this; function getOperations() { if (typeof operations == "string") { operations = operations.split(","); for (var index = 0; index < operations.length; ++index) { var operation = operations[index], match; if (operation == "ws") continue; if (match = /^r(\d+)-(\d+)=(\d+)-(\d+)$/.exec(operation)) operation = [CriticChangesetLineConstants.OPERATION_REPLACE, parseInt(match[1]), parseInt(match[2]), parseInt(match[3]), parseInt(match[4])]; else if (match = /^d(\d+)-(\d+)$/.exec(operation)) operation = [CriticChangesetLineConstants.OPERATION_DELETE, parseInt(match[1]), parseInt(match[2])]; else if (match = /^i(\d+)-(\d+)$/.exec(operation)) operation = [CriticChangesetLineConstants.OPERATION_INSERT, parseInt(match[1]), parseInt(match[2])]; Object.freeze(operation); operations[index] = operation; } Object.freeze(operations); } return operations; } Object.defineProperty(this, "operations", { get: getOperations, enumerable: true }); Object.freeze(this); } function CriticChangesetChunk(changeset, file, delete_offset, delete_count, insert_offset, insert_count, analysis, whitespace) { this.changeset = changeset; this.file = file; this.deleteOffset = delete_offset; this.deleteCount = delete_count; this.insertOffset = insert_offset; this.insertCount = insert_count; var self = this; var lines; function getLines() { function fillLines(old_stop, new_stop) { while (old_offset < old_stop && new_offset < new_stop) lines.push(new CriticChangesetLine(CriticChangesetLineConstants.TYPE_REPLACED, self.deleteOffset + old_offset++, old_lines, self.insertOffset + new_offset++, new_lines)); while (old_offset < old_stop) lines.push(new CriticChangesetLine(CriticChangesetLineConstants.TYPE_DELETED, self.deleteOffset + old_offset++, old_lines, self.insertOffset + new_offset, null)); while (new_offset < new_stop) lines.push(new CriticChangesetLine(CriticChangesetLineConstants.TYPE_INSERTED, self.deleteOffset + old_offset, null, self.insertOffset + new_offset++, new_lines)); } if (!lines) { var old_lines = file.oldVersion.lines; var new_lines = file.newVersion.lines; var old_offset = 0, new_offset = 0; lines = []; if (analysis) { var mappings = analysis.split(";"); for (var mapping_index = 0; mapping_index < mappings.length; ++mapping_index) { var match = /^(\d+)=(\d+)(?::(.*))?$/.exec(mappings[mapping_index]); var old_mapped_offset = parseInt(match[1]); var new_mapped_offset = parseInt(match[2]); var operations = match[3]; fillLines(old_mapped_offset, new_mapped_offset); lines.push(new CriticChangesetLine(CriticChangesetLineConstants.TYPE_MODIFIED, self.deleteOffset + old_offset++, old_lines, self.insertOffset + new_offset++, new_lines, operations)); } } fillLines(self.deleteCount, self.insertCount); } return lines; } Object.defineProperty(this, "lines", { get: getLines, enumerable: true }); Object.freeze(this); } CriticChangesetChunk.prototype.toString = function () { var result = format("@@ -%d,%d +%d,%d @@\n", this.deleteOffset + 1, this.deleteCount, this.insertOffset + 1, this.insertCount); for (var index = 0; index < this.lines.length; ++index) { var line = this.lines[index]; switch (line.type) { case CriticChangesetLineConstants.TYPE_REPLACED: case CriticChangesetLineConstants.TYPE_MODIFIED: case CriticChangesetLineConstants.TYPE_DELETED: result += format("-%s%s\n", line.oldText, line.operations ? " " + JSON.stringify(line.operations) : ""); } } for (var index = 0; index < this.lines.length; ++index) { var line = this.lines[index]; switch (line.type) { case CriticChangesetLineConstants.TYPE_REPLACED: case CriticChangesetLineConstants.TYPE_MODIFIED: case CriticChangesetLineConstants.TYPE_INSERTED: result += format("+%s%s\n", line.newText, line.operations ? " " + JSON.stringify(line.operations) : ""); } } return result; }; function CriticChangesetFile(changeset, file_id, old_sha1, new_sha1, old_mode, new_mode) { CriticFile.call(this, { id: file_id }); this.changeset = changeset; if (old_sha1 != '0000000000000000000000000000000000000000') this.oldVersion = new CriticChangesetFileVersion(changeset, this, old_mode, null, old_sha1); else this.oldVersion = null; if (new_sha1 != '0000000000000000000000000000000000000000') this.newVersion = new CriticChangesetFileVersion(changeset, this, new_mode, null, new_sha1); else this.newVersion = null; var self = this; var chunks, deleteCount, insertCount; var reviewers; function getChunks() { if (chunks === void 0) { if (self.oldVersion === null || self.newVersion === null) chunks = null; else { chunks = []; var result = db.execute("SELECT deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace FROM chunks WHERE changeset=%d AND file=%d ORDER BY deleteOffset ASC", self.changeset.id, self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; chunks.push(new CriticChangesetChunk(self.changeset, self, row.deleteOffset - 1, row.deleteCount, row.insertOffset - 1, row.insertCount, row.analysis, !!row.whitespace)); } } } return chunks; } function fetchCounts() { var result = db.execute("SELECT SUM(deletecount) AS deleteCount, SUM(insertcount) AS insertCount FROM chunks WHERE changeset=%d AND file=%d", self.changeset.id, self.id)[0]; deleteCount = result.deleteCount; insertCount = result.insertCount; } function getDeleteCount() { if (deleteCount === void 0) fetchCounts(); return deleteCount; } function getInsertCount() { if (insertCount === void 0) fetchCounts(); return insertCount; } function getReviewers() { if (!reviewers) if (!self.changeset.review) return null; else { reviewers = {}; Object.defineProperties(reviewers, { pending: { value: {} }, reviewed: { value: {} }}); var result = db.execute("SELECT assignee, state, SUM(deleted) AS deleted, SUM(inserted) AS inserted" + " FROM fullreviewuserfiles" + " WHERE review=%d AND changeset=%d AND file=%d AND (state='pending' OR reviewer=assignee)" + " GROUP BY assignee, state", self.changeset.review.id, self.changeset.id, self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; var user_id = row.assignee; if (!(user_id in reviewers)) reviewers[user_id] = new CriticUser(user_id); var counts = Object.freeze(Object.create(null, { deleteCount: { value: row.deleted, enumerable: true }, insertCount: { value: row.inserted, enumerable: true }})); if (row.state == "pending") reviewers.pending[user_id] = counts; else reviewers.reviewed[user_id] = counts; } Object.freeze(reviewers.pending); Object.freeze(reviewers.reviewed); Object.freeze(reviewers); } return reviewers; } var isReviewed, reviewedBy; if (!changeset.review) isReviewed = reviewedBy = null; function fetchReviewStatus() { var result = db.execute("SELECT state, reviewer FROM reviewfiles WHERE review=%d AND changeset=%d AND file=%d", self.changeset.review.id, self.changeset.id, self.id)[0]; isReviewed = result.state == "reviewed"; reviewedBy = isReviewed ? new CriticUser({ id: result.reviewer }) : null; } function getIsReviewed() { if (isReviewed === void 0) fetchReviewStatus(); return isReviewed; } function getReviewedBy() { if (reviewedBy === void 0) fetchReviewStatus(); return reviewedBy; } Object.defineProperties(this, { chunks: { get: getChunks, enumerable: true }, deleteCount: { get: getDeleteCount, enumerable: true }, insertCount: { get: getInsertCount, enumerable: true }, reviewers: { get: getReviewers, enumerable: true }, isReviewed: { get: getIsReviewed, enumerable: true }, reviewedBy: { get: getReviewedBy, enumerable: true }}); Object.freeze(this); } CriticChangesetFile.prototype = Object.create(CriticFile.prototype); function CriticChangesetFileVersion(changeset, file, mode, size, sha1) { CriticFileVersion.call(this, changeset.repository, file.path, mode, size, sha1, { review: changeset.review }); this.changeset = changeset; this.file = file; Object.freeze(this); } CriticChangesetFileVersion.prototype = Object.create(CriticFileVersion.prototype); CriticChangesetFile.prototype.toString = function () { return format("CriticChangesetFile(path=%s)", this.path); }; function CriticChangeset(repository, data) { var changeset_id, parent, child; if (typeof data.id == "number") { changeset_id = data.id; var result = db.execute("SELECT parent, child FROM changesets WHERE id=%d", changeset_id)[0]; if (!result) throw CriticError(format("%d: invalid changeset ID", changeset_id)); parent = data.parent || repository.getCommit(result.parent); child = data.child || repository.getCommit(result.child); result = null; } else if (data.parent && data.child) { parent = data.parent; child = data.child; var result = db.execute("SELECT id FROM changesets WHERE parent=%d AND child=%d AND type IN ('direct', 'custom')", parent.id, child.id)[0]; if (!result) throw CriticError(format("%s..%s: changeset not cached", parent.sha1, child.sha1)); changeset_id = result.id; result = null; } else throw CriticError("invalid use: either changeset ID or parent/child commits must be provided"); this.repository = repository; this.review = data.review || null; this.id = changeset_id; this.parent = parent; this.child = child; this.commits = data.commits; var self = this; var files = null, filtered_files = data.files || false; var reviewers = null; var actuals = null; function getFiles() { if (!files) { files = []; var file_filter; if (filtered_files) file_filter = format(" AND file IN (%s)", filtered_files.map(parseInt).join(", ")); else file_filter = ""; var result = db.execute("SELECT file, old_sha1, new_sha1, old_mode, new_mode FROM fileversions WHERE changeset=%d" + file_filter, self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; var file = new CriticChangesetFile(self, row.file, row.old_sha1, row.new_sha1, row.old_mode, row.new_mode); files.push(file); Object.defineProperty(files, file.path, { value: file }); } Object.freeze(files); } return files; } function getReviewers() { if (!reviewers) if (!self.review) return null; else { reviewers = {}; Object.defineProperties(reviewers, { pending: { value: {} }, reviewed: { value: {} }}); var result = db.execute("SELECT assignee, state, SUM(deleted) AS deleted, SUM(inserted) AS inserted" + " FROM fullreviewuserfiles" + " WHERE review=%d AND changeset=%d AND (state='pending' OR reviewer=assignee)" + " GROUP BY assignee, state", self.review.id, self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; var user_id = row.assignee; if (!(user_id in reviewers)) reviewers[user_id] = new CriticUser(user_id); var counts = Object.freeze(Object.create(null, { deleteCount: { value: row.deleted, enumerable: true }, insertCount: { value: row.inserted, enumerable: true }})); if (row.state == "pending") reviewers.pending[user_id] = counts; else reviewers.reviewed[user_id] = counts; } Object.freeze(reviewers.pending); Object.freeze(reviewers.reviewed); Object.freeze(reviewers); } return reviewers; } function getActuals() { if (actuals === null) if (!self.review) return null; else { actuals = []; if (db.execute("SELECT 1 FROM reviewchangesets WHERE review=%d AND changeset=%d", self.review.id, self.id)[0]) actuals.push(self); else { var commits = self.review.commits.restrict([self.child], [self.parent]); for (var index = 0; index < commits.length; ++index) { if (commits[index].parents.length > 1) { var merge = self.review.repository.getMergeChangeset(commits[index], { review: self.review }); merge.changesets.forEach(function (changeset) { actuals.push(changeset); }); } else actuals.push(self.review.repository.getChangeset({ commit: commits[index], review: self.review })); } } Object.freeze(actuals); } return actuals; } Object.defineProperties(this, { files: { get: getFiles, enumerable: true }, reviewers: { get: getReviewers, enumerable: true }, actuals: { get: getActuals, enumerable: true }}); Object.freeze(this); } function CriticMergeChangeset(changesets) { this.repository = changesets[0].repository; this.review = changesets[0].review; this.commit = changesets[0].child; this.changesets = []; for (var index = 0; index < changesets.length; ++index) { this.changesets.push(changesets[index]); this.changesets[changesets[index].parent.sha1] = changesets[index]; } Object.freeze(this.changesets); Object.freeze(this); } ================================================ FILE: src/library/js/v8/critic-cli.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2014 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function executeCLI(commands) { var argv = [python_executable, "-m", "cli"], stdin = ""; if (typeof user_id === "number") argv.push("-u", user_id.toString()); authentication_labels.forEach( function (authentication_label) { argv.push("-l", authentication_label); }); commands.forEach( function (command) { argv.push(command.name); stdin += format("%r\n", command.data); }); var process = new OS.Process(python_executable, { argv: argv, environ: { PYTHONPATH: python_path }}); return process.call(stdin).trim().split("\n"); } ================================================ FILE: src/library/js/v8/critic-comment.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticComment(chain_id, batch_id, comment_id, user_id, time, state, text, data) { this.id = comment_id; this.user = new CriticUser(user_id); this.time = time; this.text = text; var self = this; var chain = data.chain || null; var batch = data.batch || null; function getChain() { if (!chain) chain = new CriticCommentChain(chain_id, { comments: { comment_id: self }}); return chain; } function getBatch() { if (!batch) batch = new CriticBatch(batch_id); return batch; } Object.defineProperties(this, { chain: { get: getChain, enumerable: true }, batch: { get: getBatch, enumerable: true }}); Object.freeze(this); } function CriticCommentChain(result_or_chain_id, data) { var chain_id, result; if (typeof result_or_chain_id == "number") { chain_id = result_or_chain_id; result = db.execute("SELECT id, review, batch, uid, time, type, state, origin, file, first_commit, last_commit, closed_by, addressed_by FROM commentchains WHERE id=%d", chain_id)[0]; } else { result = result_or_chain_id; chain_id = result.id; } if (!result) throw CriticError(format("%d: invalid comment chain ID", chain_id)); data = data || {}; this.id = chain_id; this.type = result.type == "issue" ? CriticCommentChain.TYPE_ISSUE : CriticCommentChain.TYPE_NOTE; switch (result.state) { case "open": this.state = CriticCommentChain.STATE_OPEN; break; case "closed": this.state = CriticCommentChain.STATE_RESOLVED; break; case "addressed": this.state = CriticCommentChain.STATE_ADDRESSED; break; } var self = this; var review_id = result.review, review = data.review || null; var batch_id = result.batch, batch = data.batch || null; var user_id = result.uid, user = data.user || null; var users = null; var closed_by_id = result.closed_by, closed_by = null; var file_id, file; var commit_id, first_commit_id, last_commit_id, addressed_by_id; var changeset, addressed_by, commit, lines, context; var comments; function getReview() { if (!review) review = new CriticReview(review_id); return review; } function getBatch() { if (!batch) batch = new CriticBatch(batch_id); return batch; } function getUser() { if (!user) user = new CriticUser(user_id); return user; } function getUsers() { if (!users) { users = {}; var result = db.execute("SELECT uid FROM commentchainusers WHERE chain=%d", self.id); for (var index = 0; index < result.length; ++index) users[result[index].uid] = new CriticUser(result[index].uid); Object.freeze(users); } return users; } function getClosedBy() { if (!closed_by) if (!closed_by_id) return null; else closed_by = new CriticUser(closed_by_id); return closed_by; } function getChangeset() { if (changeset === void 0) if (first_commit_id === last_commit_id) changeset = null; else { var review = getReview(); var first_commit = review.repository.getCommit(first_commit_id); var last_commit = review.repository.getCommit(last_commit_id); changeset = new CriticChangeset(review.repository, { parent: first_commit, child: last_commit, files: [file_id] }); } return changeset; } function getAddressedBy() { if (!addressed_by) if (!addressed_by_id) return null; else { var review = getReview(); addressed_by = review.repository.getCommit(addressed_by_id); } return addressed_by; } function getCommit() { if (!commit) { var review = getReview(); commit = review.repository.getCommit(commit_id); } return commit; } function getFile() { if (!file) if (self.changeset) file = self.changeset.files[0]; else file = review.repository.getCommit(first_commit_id).getFile(file_id); return file; } function getLines() { if (!lines) { lines = {}; var result = db.execute("SELECT sha1, first_line, last_line FROM commentchainlines WHERE chain=%d AND state='current'", chain_id); for (var index = 0; index < result.length; ++index) { var row = result[index]; lines[row.sha1] = Object.freeze({ firstLine: row.first_line - 1, lastLine: row.last_line - 1 }); } Object.freeze(lines); } return lines; } function getContext(minimized) { if (context === void 0) { var version; if (self.file instanceof CriticFileVersion) version = self.file; else if (self.origin == "old") version = self.file.oldVersion; else version = self.file.newVersion; var position = self.lines[version.sha1]; var result = db.execute("SELECT context FROM codecontexts WHERE sha1=%s AND first_line<=%d AND last_line>=%d ORDER BY first_line DESC LIMIT 1", version.sha1, position.firstLine + 1, position.lastLine + 1)[0]; if (result) context = result.context; else context = null; } if (minimized && context) return context.replace(/\(.*(?:\)|...$)/, "(...)"); else return context; } function getComments() { if (!comments) { comments = []; var result = db.execute("SELECT id, batch, uid, time, state, comment FROM comments WHERE chain=%d AND state='current' ORDER BY time ASC", chain_id); for (var index = 0; index < result.length; ++index) { var row = result[index]; comments.push(data.comments && data.comments[row.id] || new CriticComment(chain_id, row.batch, row.id, row.uid, row.time, row.state, row.comment, { chain: self })); } Object.freeze(comments); } return comments; } if (result.file) { this.origin = result.origin; file_id = result.file; file = data.file || null; first_commit_id = result.first_commit; last_commit_id = result.last_commit; addressed_by_id = result.addressed_by; Object.defineProperties(this, { changeset: { get: getChangeset, enumerable: true }, addressedBy: { get: getAddressedBy, enumerable: true }, file: { get: getFile, enumerable: true }, lines: { get: getLines, enumerable: true }, context: { get: getContext, enumerable: true }, minimizedContext: { get: function () { return getContext(true); }, enumerable: true }}); } else if (result.first_commit && result.last_commit) { commit_id = result.first_commit; var result2 = db.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%d", chain_id)[0]; this.firstLine = result2.first_line; this.lastLine = result2.last_line; result2 = null; Object.defineProperty(this, "commit", { get: getCommit, enumerable: true }); } result = null; Object.defineProperties(this, { review: { get: getReview, enumerable: true }, batch: { get: getBatch, enumerable: true }, user: { get: getUser, enumerable: true }, users: { get: getUsers, enumerable: true }, closedBy: { get: getClosedBy, enumerable: true }, comments: { get: getComments, enumerable: true }}); Object.freeze(this); } Object.defineProperties(CriticCommentChain, { TYPE_ISSUE: { value: 0 }, TYPE_NOTE: { value: 1 }, STATE_OPEN: { value: 0 }, STATE_RESOLVED: { value: 1 }, STATE_ADDRESSED: { value: 2 }}); CriticCommentChain.find = function (data) { var commit; if (typeof data.commit == "object" && data.commit instanceof CriticCommit) commit = data.commit; else { var repository = data.repository; if (!repository || !(repository instanceof CriticRepository)) throw CriticError("invalid argument; data.commit must be a Commit object, or data.repository must be a Repository object"); commit = repository.getCommit(data.commit); } var review = null; if ("review" in data) { if (typeof data.review == "object" && data.review instanceof CriticReview) review = data.review; else review = new CriticReview(data.review); } var result = []; if ("file" in data) { var file; if (typeof data.file == "object" && data.file instanceof CriticFile) file = data.file; else file = CriticFile.find(data.file); var fileversion = commit.getFile(file.path); var args = ["SELECT DISTINCT commentchains.review, commentchainlines.chain, commentchainlines.first_line, commentchainlines.last_line " + " FROM commentchains " + " JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) " + " WHERE commentchainlines.sha1=%s " + " AND commentchains.state IN ('open', 'closed', 'addressed')", fileversion.sha1]; if (review !== null) { args[0] += " AND commentchains.review=%d"; args.push(review.id); } result = db.execute.apply(db, args); } else if (review) { var preliminary = db.execute("SELECT DISTINCT commentchains.id, commentchains.file, commentchainlines.sha1, " + " commentchainlines.first_line, commentchainlines.last_line " + " FROM commentchains " + " JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) " + " WHERE commentchains.review=%d " + " AND commentchains.state IN ('open', 'closed', 'addressed')", review.id); var files_in_commit = {}; preliminary.apply( function (chain_id, file_id, sha1, first_line, last_line) { if (!(file_id in files_in_commit)) try { files_in_commit[file_id] = commit.getFile(CriticFile.find(file_id).path).sha1; } catch (error) { files_in_commit[file_id] = ""; } if (sha1 == files_in_commit[file_id]) result.push({ review: review.id, chain: chain_id, first_line: first_line, last_line: last_line }); }); } var chains = []; var reviews = {}; for (var index = 0; index < result.length; ++index) { var review_id = result[index].review; var review = reviews[review_id] || (reviews[review_id] = new CriticReview(review_id)); chains.push({ chain: new CriticCommentChain(result[index].chain, { review: review }), lineIndex: result[index].first_line - 1, lineCount: result[index].last_line - result[index].first_line + 1 }); } return chains; }; CriticCommentChain.prototype.getComment = function (id) { id = ~~id; var result = db.execute("SELECT batch, uid, time, state, comment FROM comments WHERE chain=%d AND id=%d AND state='current'", this.id, id)[0]; return new CriticComment(this.id, result.batch, id, result.uid, result.time, result.state, result.comment, { chain: this }); }; ================================================ FILE: src/library/js/v8/critic-commitset.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticCommitSet(all) { Object.defineProperties(this, { parents: { value: {} }, children: { value: {} }, heads: { value: [] }, tails: { value: [] }}); this.repository = null; for (var index = 0; index < all.length; ++index) { var commit = all[index]; if (!this.repository) this.repository = commit.repository; else if (this.repository.id != commit.repository.id) throw CriticError(format("invalid use: commits from multiple repositories in source array ('%s' and '%s')", this.repository.name, commit.repository.name)); if (commit.sha1 in this) throw CriticError(format("invalid use: commit %s occurs multiple times in source array", commit.sha1)); Object.defineProperty(this, commit.sha1, { value: commit }); this.parents[commit.sha1] = []; this.children[commit.sha1] = []; } for (var index = 0; index < all.length; ++index) { var commit = all[index]; var count = 0; for (var pindex = 0; pindex < commit.parents.length; ++pindex) { var parent = commit.parents[pindex]; if (parent.sha1 in this) { ++count; this.parents[commit.sha1].push(parent); } else if (!(parent.sha1 in this.children)) this.children[parent.sha1] = []; this.children[parent.sha1].push(commit); } } for (var index = 0; index < all.length; ++index) { var commit = all[index]; if (this.children[commit.sha1].length == 0) { this.heads.push(commit); Object.defineProperty(this.heads, commit.sha1, { value: commit }); } if (this.parents[commit.sha1].length < commit.parents.length) { for (var index1 = 0; index1 < commit.parents.length; ++index1) { var parent = commit.parents[index1]; if (!(parent.sha1 in this) && !(parent.sha1 in this.tails)) { this.tails.push(parent); Object.defineProperty(this.tails, parent.sha1, { value: parent }); } } } Object.freeze(this.parents[commit.sha1]); Object.freeze(this.children[commit.sha1]); } var added = {}; for (var index = 0; index < this.heads.length; ++index) { var stack = [this.heads[index]]; var stack_offset = 0; while (stack_offset < stack.length) { var commit = stack[stack_offset++]; if (commit.sha1 in added) continue; do { var parents = this.parents[commit.sha1]; this.push(commit); added[commit.sha1] = true; commit = null; for (var pindex = 0; pindex < parents.length; ++pindex) { var parent = parents[pindex]; if (parent.sha1 in added) continue; else if (commit === null) commit = parent; else stack.push(parent); } } while (commit); } } var self = this; var upstreams = null; function getUpstreams() { if (!upstreams) { upstreams = self.tails.slice(); if (upstreams.length > 1) { for (var index1 = 0; index1 < upstreams.length; ++index1) for (var index2 = 0; index2 < self.heads.length; ++index2) if (self.heads[index2].isAncestorOf(upstreams[index1])) { upstreams[index1] = null; break; } for (var index1 = 0; index1 < upstreams.length; ++index1) if (upstreams[index1]) for (var index2 = 0; index2 < upstreams.length; ++index2) if (index1 != index2 && upstreams[index2]) if (upstreams[index1].isAncestorOf(upstreams[index2])) { upstreams[index1] = null; break; } upstreams = upstreams.filter(function (commit) { return commit !== null; }); } Object.freeze(upstreams); } return upstreams; } Object.defineProperty(this, "upstreams", { get: getUpstreams }); Object.freeze(this.parents); Object.freeze(this.children); Object.freeze(this.heads); Object.freeze(this.tails); Object.freeze(this); } var properties = { restrict: { value: function (heads, tails) { var reachable = []; var self = this; var exclude = {}; if (tails) for (var index = 0; index < tails.length; ++index) { var tail = tails[index]; if (!(tail.sha1 in this) && !(tail.sha1 in this.tails)) throw CriticError("CommitSet.restrict: invalid tail commits; not member or tail of commit-set"); exclude[tail.sha1] = true; } function add(commit) { if (!(commit.sha1 in reachable) && !(commit.sha1 in exclude)) { reachable.push(commit); reachable[commit.sha1] = commit; var parents = self.parents[commit.sha1]; for (var index = 0; index < parents.length; ++index) add(parents[index]); } } for (var index = 0; index < heads.length; ++index) if (heads[index].sha1 in this) add(heads[index]); return new CriticCommitSet(reachable); }, writable: true, configurable: true }, without: { value: function (commits) { var remaining = []; for (var index = 0; index < this.length; ++index) if (!(this[index].sha1 in commits)) remaining.push(this[index]); return new CriticCommitSet(remaining); }, writable: true, configurable: true }, getChangeset: { value: function (data) { if (this.length == 0) return null; if (this.heads.length != 1) throw CriticError(format("commit-set has multiple heads: %d", this.heads.length)); if (this.upstreams.length != 1) throw CriticError("commit-set has multiple upstreams"); data = data || {}; data.parent = this.upstreams[0]; data.child = this.heads[0]; data.commits = this; return this.repository.getChangeset(data); }, writable: true, configurable: true } }; CriticCommitSet.prototype = Object.create(Array.prototype, properties); ================================================ FILE: src/library/js/v8/critic-dashboard.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticDashboard(user) { if (!(user instanceof CriticUser)) throw CriticError("invalid user argument; expected User object"); this.user = user; var self = this; var owned_finished = null; var owned_accepted = null; var owned_pending = null; var owned_dropped = null; var active = null; var inactive = null; function createReviews(result, filter) { var reviews = []; for (var index = 0; index < result.length; ++index) { var review = new CriticReview(result[index].id); if (!filter || filter(review)) reviews.push(review); } return Object.freeze(reviews); } function getOwnedFinished() { if (!owned_finished) owned_finished = createReviews(db.execute("SELECT id FROM reviews JOIN reviewusers ON (review=id) WHERE uid=%d AND owner AND state='closed' ORDER BY id ASC", self.user.id)); return owned_finished; } function getOwnedAccepted() { if (!owned_accepted) { var owned_open = createReviews(db.execute("SELECT id FROM reviews JOIN reviewusers ON (review=id) WHERE uid=%d AND owner AND state='open' ORDER BY id ASC", self.user.id)); owned_accepted = []; owned_pending = []; for (var index = 0; index < owned_open.length; ++index) { var review = owned_open[index]; if (review.progress.accepted) owned_accepted.push(review); else owned_pending.push(review); } Object.freeze(owned_accepted); Object.freeze(owned_pending); } return owned_accepted; } function getOwnedPending() { if (!owned_pending) /* Populates owned_pending as a side-effect. */ getOwnedAccepted(); return owned_pending; } function getOwnedDropped() { if (!owned_dropped) owned_dropped = createReviews(db.execute("SELECT id FROM reviews JOIN reviewusers ON (review=id) WHERE uid=%d AND owner AND state='dropped' ORDER BY id ASC", self.user.id)); return owned_dropped; } function getActive() { if (!active) { active = []; Object.defineProperties(active, { hasPendingChanges: { value: Object.create(null) }, hasUnreadComments: { value: Object.create(null) }, unsharedPendingChanges: { value: Object.create(null) }, sharedPendingChanges: { value: Object.create(null) }, unreadComments: { value: Object.create(null) }, isReviewer: { value: Object.create(null) }, isWatcher: { value: Object.create(null) }}); var assignments = db.execute("SELECT DISTINCT reviews.id AS id, fullreviewuserfiles.state AS state FROM reviews JOIN fullreviewuserfiles ON (review=id) WHERE assignee=%d AND reviews.state='open'", self.user.id); var is_reviewer = {}; for (var index = 0; index < assignments.length; ++index) { var row = assignments[index]; var review_id = row.id; if (row.state == "pending") { var review = new CriticReview(review_id); active.push(review); active.hasPendingChanges[review_id] = review; active.isReviewer[review_id] = review; } is_reviewer[review_id] = true; } var before = Date.now(); for (var review_id in active.hasPendingChanges) { var pending = db.execute("SELECT SUM(reviewfiles.deleted) AS deleted, " + " SUM(reviewfiles.inserted) AS inserted, " + " EXTRACT('epoch' FROM (NOW() - MIN(reviewuserfiles.time))) AS seconds, " + " reviewfilesharing.reviewers<2 AS unshared " + "FROM reviews " + "JOIN reviewfiles ON (reviewfiles.review=reviews.id) " + "JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id " + " AND reviewuserfiles.uid=%d) " + "JOIN reviewfilesharing ON (reviewfilesharing.review=reviews.id " + " AND reviewfilesharing.file=reviewfiles.id) " + "WHERE reviews.id=%d " + " AND reviewfiles.state='pending' " + "GROUP BY reviewfilesharing.reviewers<2", self.user.id, review_id); for (var index = 0; index < pending.length; ++index) { var row = pending[index]; if (row.unshared) active.unsharedPendingChanges[review_id] = Object.freeze({ deleted: row.deleted, inserted: row.inserted, seconds: row.seconds }); else active.sharedPendingChanges[review_id] = Object.freeze({ deleted: row.deleted, inserted: row.inserted, seconds: row.seconds }); } } var after = Date.now(); active.qt = after - before; var with_unread = db.execute("SELECT reviews.id AS id, COUNT(comments.id) AS count FROM reviews JOIN commentchains ON (commentchains.review=reviews.id) JOIN comments ON (comments.chain=commentchains.id) JOIN commentstoread ON (commentstoread.comment=comments.id) WHERE commentstoread.uid=%d AND reviews.state='open' GROUP BY reviews.id", self.user.id); for (var index = 0; index < with_unread.length; ++index) { var review_id = with_unread[index].id, review = active.hasPendingChanges[review_id]; if (!review) active.push(review = new CriticReview(review_id)); active.hasUnreadComments[review_id] = review; active.unreadComments[review_id] = with_unread[index].count; if (is_reviewer[review_id]) active.isReviewer[review_id] = review; else active.isWatcher[review_id] = review; } active.sort(function (a, b) { switch (true) { case a.id < b.id: return -1; case a.id > b.id: return 1; default: return 0; } }); Object.freeze(active.hasPendingChanges); Object.freeze(active.hasUnreadComments); Object.freeze(active.unsharedPendingChanges); Object.freeze(active.sharedPendingChanges); Object.freeze(active.unreadComments); Object.freeze(active.isReviewer); Object.freeze(active.isWatcher); Object.freeze(active); } return active; } function getInactive() { if (!inactive) { inactive = []; Object.defineProperties(inactive, { isReviewer: { value: {} }, isWatcher: { value: {} }}); var is_reviewer = db.execute("SELECT DISTINCT reviews.id AS id, fullreviewuserfiles.state AS state FROM reviews JOIN fullreviewuserfiles ON (review=id) WHERE assignee=%d AND reviews.state='open'", self.user.id); var include = {}, exclude = {}; for (var index = 0; index < is_reviewer.length; ++index) { var review_id = is_reviewer[index].id; if (is_reviewer[index].state == 'pending') exclude[review_id] = true; else include[review_id] = true; } for (var review_id in include) if (!exclude[review_id]) { var review = new CriticReview(~~review_id); inactive.push(review); inactive.isReviewer[review_id] = review; } var is_watcher = db.execute("SELECT id FROM reviews JOIN reviewusers ON (review=id) WHERE uid=%d AND state='open'", self.user.id); for (var index = 0; index < is_watcher.length; ++index) { var review_id = is_watcher[index].id; if (!include[review_id] && !exclude[review_id]) { var review = new CriticReview(review_id); inactive.push(review); inactive.isWatcher[review_id] = review; } } inactive.sort(function (a, b) { switch (true) { case a.id < b.id: return -1; case a.id > b.id: return 1; default: return 0; } }); Object.freeze(inactive.isReviewer); Object.freeze(inactive.isWatcher); Object.freeze(inactive); } return inactive; } this.owned = Object.create(null, { finished: { get: getOwnedFinished, enumerable: true }, accepted: { get: getOwnedAccepted, enumerable: true }, pending: { get: getOwnedPending, enumerable: true }, dropped: { get: getOwnedDropped, enumerable: true }}); Object.defineProperties(this, { active: { get: getActive, enumerable: true }, inactive: { get: getInactive, enumerable: true }}); Object.freeze(this.owned); Object.freeze(this); } ================================================ FILE: src/library/js/v8/critic-file.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; /* Constructor for internal use. External code (and in practice most internal code as well) uses CriticFile.find() to create or find previously created CriticFile objects. That function is also exposed as the File "constructor" externally, to emulate the real constructors old behavior. */ function CriticFile(data) { var result; if ("id" in data) { result = db.execute("SELECT id, path FROM files WHERE id=%d", data.id)[0]; if (!result) throw CriticError(format("%s: invalid file ID", data.id)); } else if ("path" in data) { result = db.execute("SELECT id, path FROM files WHERE MD5(path)=MD5(%s)", data.path)[0]; if (!result) throw CriticError(format("%s: no such file", data.path)); } else throw CriticError("invalid use; expected data.id or data.path"); this.id = result.id; this.path = result.path; /* CriticFile has sub-classes whose constructors will first call this function and then extend the object further. If that's the case, we don't freeze the object (expecting the sub-class constructor to do so instead.) */ var is_subclass = Object.getPrototypeOf(this) !== CriticFile.prototype; if (!is_subclass) Object.freeze(this); } Object.defineProperties( CriticFile.prototype, { toString: { value: function () { return this.path; }, writable: true, configurable: true }, valueOf: { value: function () { return this.id; }, writable: true, configurable: true } } ); var file_cache_by_id = {}; var file_cache_by_path = {}; CriticFile.find = function (path_or_id) { var cached; if (typeof path_or_id == "string") { /* Normalize forward slashes in the path so that the cache lookup always uses the same format. */ var path = path_or_id.replace(/^\/+|\/+(?=\/)|\/+$/g, ""); if (cached = file_cache_by_path[path]) return cached; else return file_cache_by_path[path] = new CriticFile({ path: path }); } else { var id = ~~path_or_id; if (cached = file_cache_by_id[id]) return cached; else return file_cache_by_path[id] = new CriticFile({ id: id }); } }; /* Externally, CriticFile.find() doubles as the File constructor, so set its prototype property to make the instanceof operator work as expected. */ CriticFile.find.prototype = CriticFile.prototype; /* It also needs to reference itself... */ CriticFile.find.find = CriticFile.find; function CriticFileVersion(repository, file, mode, size, sha1, data) { CriticFile.call(this, { path: file }); this.repository = repository; this.mode = mode; this.size = size; this.sha1 = sha1; var self = this; var review = null; var bytes, lines; var commentChains = null; if (data) if (data.review) review = data.review; function isBinary(data) { /* Check if the file appears to be binary. Using information from .gitattributes would be nice, but git doesn't seem to have a command that directly exposes information from it, and parsing it here is quite sub-optimal. And if the file really is binary then this function will find that out too typically, so using .gitattributes is mostly and optimization anyway. This code is essentially a copy of git's heuristics for determining if a file is binary, in convert.c::gather_stats() and convert.c::is_binary(). */ var printable = 0, nonprintable = 0; for (var index = 0; index < data.length; ++index) { var byte = data[index]; if (byte < 32) { switch (byte) { case 0: return true; case 8: case 9: case 10: case 12: case 13: case 27: ++printable; break; default: ++nonprintable; } } else if (byte == 127) ++nonprintable; else ++printable; } return (printable >> 7) < nonprintable; } function getBytes() { if (bytes === void 0) bytes = self.repository.fetch(self.sha1).data; return bytes; } function getLines() { if (lines === void 0) { /* Re-use 'bytes' if set, but if not don't set 'bytes' since that keeps the array (which might be huge) alive longer than what is probably necessary. */ var data = bytes || self.repository.fetch(self.sha1).data; if (isBinary(data)) lines = null; else { var source = data.decode(); lines = source.split(/\r\n|\n/g); /* If the file ends in a line-break (as it typically should) the last element in the array will be empty. We don't want to keep that empty element; it will only make it seem like there's an empty line at the end of the file. */ if (lines.length && !lines[lines.length - 1]) lines.pop(); Object.freeze(lines); } } return lines; } function getCommentChains() { if (!commentChains) { commentChains = []; var result = db.execute("SELECT DISTINCT id, first_line, last_line FROM commentchains JOIN commentchainlines ON (chain=id) WHERE commentchains.state!='draft' AND commentchainlines.state!='draft' AND review=%d AND sha1=%s ORDER BY first_line ASC, last_line ASC", review.id, self.sha1); for (var index = 0; index < result.length; ++index) commentChains.push(new CriticCommentChain(result[index].id, { review: review })); Object.freeze(commentChains); } return commentChains; } Object.defineProperties(this, { lines: { get: getLines, enumerable: true }, bytes: { get: getBytes, enumerable: true }}); if (review) Object.defineProperty(this, "commentChains", { get: getCommentChains, enumerable: true }); else this.commentChains = null; var is_subclass = Object.getPrototypeOf(this) !== CriticFileVersion.prototype; if (!is_subclass) Object.freeze(this); } CriticFileVersion.prototype = Object.create(CriticFile.prototype); ================================================ FILE: src/library/js/v8/critic-filters.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticFilters(data) { var cli_input = {}; if (data.review) { cli_input.review_id = data.review.id; if (data.added_review_filters) { cli_input.added_review_filters = []; for (var index = 0; index < data.added_review_filters.length; ++index) { var filter = data.added_review_filters[index]; cli_input.added_review_filters.push([filter.uid, filter.path, filter.type, filter.delegate]); } } if (data.removed_review_filters) { cli_input.removed_review_filters = []; for (var index = 0; index < data.removed_review_filters.length; ++index) { var filter = data.removed_review_filters[index]; cli_input.removed_review_filters.push([filter.uid, filter.path, filter.type, filter.delegate]); } } } else if (data.repository) { cli_input.repository_id = data.repository.id; cli_input.recursive = !!data.recursive; cli_input.file_ids = data.files.map( function (item) { if (!(item instanceof CriticFile)) item = CriticFile.find(item); return item.id; }); } if (data.user) cli_input.user_id = data.user.id; cli_input = JSON.stringify(cli_input) + "\n"; var cli_args = [python_executable, "-m", "cli", "apply-filters"]; var cli_process = new OS.Process(python_executable, { argv: cli_args, environ: { PYTHONPATH: python_path }}); var cli_output = cli_process.call(cli_input); this.files = JSON.parse(cli_output.trim()); for (var file_id in this.files) { Object.freeze(this.files[file_id]); for (var user_id in this.files[file_id]) Object.freeze(this.files[file_id][user_id]); } Object.freeze(this); } function getUserFileAssociation(filters, user_id, file_id) { user_id = Number(user_id); file_id = Number(file_id); var data; if ((data = filters.files[file_id]) && (data = data[user_id])) return data[0]; else return null; } CriticFilters.prototype.isReviewer = function (user_id, file_id) { return getUserFileAssociation(this, user_id, file_id) == "reviewer"; }; CriticFilters.prototype.isWatcher = function (user_id, file_id) { return getUserFileAssociation(this, user_id, file_id) == "watcher"; }; CriticFilters.prototype.isRelevant = function (user_id, file_id) { var association = getUserFileAssociation(this, user_id, file_id); return association == "reviewer" || association == "watcher"; }; CriticFilters.prototype.listUsers = function (file_id) { var data = this.files[file_id]; if (data) return data; else return {}; }; ================================================ FILE: src/library/js/v8/critic-filterstransaction.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var filterstransaction_internals = {}; var filterstransaction_id_counter = 0; function CriticFiltersTransaction(repository) { var internal_id = filterstransaction_id_counter++; filterstransaction_internals[internal_id] = { transaction: this, added: {}, removed: {} }; Object.defineProperty(this, "__id__", { value: internal_id }); this.repository = repository; } function CriticFiltersTransaction_getInternals(transaction) { var internals = filterstransaction_internals[transaction.__id__]; if (!internals || transaction != internals.transaction) throw CriticError("invalid use"); return internals; } CriticFiltersTransaction.prototype.addFilter = function (filter) { var internals = CriticFiltersTransaction_getInternals(this); if (!filter || typeof filter != "object") throw CriticError("invalid 'data' argument; expected object"); var user = filter.user || {}; var what = filter.what || {}; var type = filter.type || ""; var delegates = filter.delegates || null; if (!(user instanceof CriticUser)) throw CriticError("invalid 'data.user' argument; expected User object"); if (!(what instanceof CriticDirectory || what instanceof CriticFile)) throw CriticError("invalid 'data.what' argument; expected Directory or File object"); type = String(type); if (type != "reviewer" && type != "watcher") throw CriticError("invalid 'type' argument; expected \"reviewer\" or \"watcher\""); if (delegates && !Array.prototype.every.call(delegates, function (item) { return item instanceof CriticUser; })) throw CriticError("invalid 'delegates' argument; expected array of User objects"); var existing = this.repository.filters.users[user.name]; var removed = internals.removed[user.id]; function isNonConflicting(filter) { return what.path != filter.path || type != filter.type; } if (removed && !removed.every(isNonConflicting)) throw CriticError("added filter is identical to existing filter removed in this transaction"); if (existing && !existing.every(isNonConflicting)) throw CriticError("added filter is identical to existing filter"); var added = internals.added[user.id]; if (!added) added = internals.added[user.id] = []; added.push({ path: what.path, directory_id: what instanceof CriticDirectory ? what.id : 0, file_id: what instanceof CriticFile ? what.id : 0, type: type, delegate: delegates.map(function (user) { return user.name; }).join(",") }); }; CriticFiltersTransaction.prototype.removeFilter = function (filter) { var internals = CriticFiltersTransaction_getInternals(this); if (!filter || typeof filter != "object") throw CriticError("invalid 'data' argument; expected object"); var user = filter.user || {}; var what = filter.what || {}; var type = filter.type || ""; if (!(user instanceof CriticUser)) throw CriticError("invalid 'user' argument; expected User object"); if (!(what instanceof CriticDirectory || what instanceof CriticFile)) throw CriticError("invalid 'what' argument; expected Directory or File object"); type = String(type); if (type != "reviewer" && type != "watcher") throw CriticError("invalid 'type' argument; expected \"reviewer\" or \"watcher\""); function isNonConflicting(filter) { return what.path != filter.path || type != filter.type; } var added = internals.added[user.id]; if (added && !added.every(isNonConflicting)) throw CriticError("removed filter is identical to existing filter added in this transaction"); }; ================================================ FILE: src/library/js/v8/critic-git.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var RE_LSTREE_LINE = /^([0-9]{6}) (blob|tree|commit) ([0-9a-f]{40}) *([0-9]+|-)\t(.*)$/ var all_repositories = []; function GitObject(sha1, type, size, data) { this.sha1 = sha1; this.type = type; this.size = size; this.data = data; Object.freeze(this); } function CriticCommitFileVersion(commit, path_or_id, data) { var path; if (typeof path_or_id == "number") path = CriticFile.find(path_or_id).path; else path = path_or_id; var match = /((?:[^\/]+\/)*[^\/]+)/.exec(path); if (!match) throw CriticError(format("invalid path: %s", path)); path = match[1]; var match = /^(?:(.*)\/)?([^/]+)$/.exec(path); var dirname = match[1] || ""; var basename = match[2]; var line = commit.repository.run("ls-tree", "--long", format("%s:%s", commit.sha1, dirname), basename); try { match = RE_LSTREE_LINE.exec(line.trim()); } catch (e) { throw new TypeError(Object.prototype.toString.apply(line)); } if (!match) throw CriticError(format("path doesn't exist in commit: '%s'", path)); var mode = match[1]; var type = match[2]; var sha1 = match[3]; var size = match[4]; var name = match[5]; if (type != "blob") throw CriticError(format("path names a directory: '%s'", path)); CriticFileVersion.call(this, commit.repository, path, mode, size, sha1, data); this.commit = commit; Object.freeze(this); } CriticCommitFileVersion.prototype = Object.create(CriticFileVersion.prototype); function CriticCommitDirectory(commit, path) { if (path == "/") path = ""; else { var match = /^((?:[^\/]+\/)*[^\/]+)/.exec(path); if (!match) throw CriticError(format("invalid path: %s", path)); path = match[1]; } var data = commit.repository.run("ls-tree", "--long", format("%s:%s", commit.sha1, path)); var lines = data.trim().split("\n"); var low_directories = [], low_files = []; for (var index = 0; index < lines.length; ++index) { var match = RE_LSTREE_LINE.exec(lines[index]); if (!match) throw Error(format("Unexpected line: %r", lines[index])); var mode = match[1]; var type = match[2]; var sha1 = match[3]; var size = match[4]; var name = match[5]; if (type == "blob") low_files.push([mode, sha1, size, name]); else if (type == "tree") low_directories.push(name); } this.path = path || "/"; this.commit = commit; var self = this; var parent; var directories; var files; function getParent() { if (parent === void 0) { var match = /(.+)\/[^\/]+/.exec(self.path); if (!match) parent = null; else parent = new CriticCommitDirectory(self.commit, match[1]); } return parent; } function getDirectories() { if (!directories) { directories = []; for (var index = 0; index < low_directories.length; ++index) { var path = low_directories[index]; if (self.path != "/") path = format("%s/%s", self.path, path); directories.push(new CriticCommitDirectory(self.commit, path)); } } return directories; } function getFiles() { if (!files) { files = []; for (var index = 0; index < low_files.length; ++index) { var path = low_files[index][3]; if (self.path != "/") path = format("%s/%s", self.path, path); files.push(new CriticCommitFileVersion(self.commit, path)); } } return files; } Object.defineProperties(this, { parent: { get: getParent, enumerable: true }, directories: { get: getDirectories, enumerable: true }, files: { get: getFiles, enumerable: true }}); Object.freeze(this); } function runGitCommand(path, args) { var env; if (typeof args[args.length - 1] == "object") env = args.pop(); else env = {}; env["REMOTE_USER"] = global.user.name; var stdin; if ("stdin" in env) { stdin = env["stdin"]; delete env["stdin"]; } var argv = [git_executable]; [].push.apply(argv, args); var process = new OS.Process(git_executable, { argv: argv, cwd: path, environ: env }); if (stdin !== void 0) process.stdin = new IO.MemoryFile(stdin, "r"); process.stdout = new IO.MemoryFile; process.stderr = new IO.MemoryFile; try { process.run(); } catch (error) { var message; if (process.exitStatus !== null) message = format("Git exited with status %d", process.exitStatus); else message = format("Git terminated by signal %d", process.terminationSignal); var stderr = process.stderr.value.decode(); if (stderr.trim()) message += format("\n%s", stderr); throw CriticError(message); } return process.stdout.value.decode(); } function CriticRepository(name_or_id) { var repository_id; if (typeof name_or_id == "number") repository_id = name_or_id; else { var result = db.execute("SELECT id FROM repositories WHERE name=%s", name_or_id); if (result.length) repository_id = result[0].id; else throw CriticError(format("%s: no such repository", name_or_id)); } for (var index = 0; index < all_repositories.length; ++index) if (all_repositories[index].id == repository_id) return all_repositories[index]; var result = db.execute("SELECT name, path, parent FROM repositories WHERE id=%d", repository_id)[0]; if (!result) throw CriticError(format("%s: invalid repository ID", repository_id)); var command = { name: "check-repository-access", data: { repository_id: repository_id } }; this.access = JSON.parse(executeCLI([command])[0]); if (!this.access.read) throw CriticError(format("access denied")); this.id = repository_id; this.name = result.name; this.path = result.path; var self = this; var catfile = null, catfile_in, catfile_out, catfile_buffer; var filters = null; this.fetch = function (sha1) { if (!catfile) { var stdin = new IO.Pipe(); var stdout = new IO.Pipe(); var argv = [git_executable, "cat-file", "--batch"]; catfile = new OS.Process(git_executable, { argv: argv, cwd: self.path }); catfile.stdin = stdin.input; catfile.stdout = stdout.output; catfile.stderr = new IO.MemoryFile(); catfile_in = stdin.output; catfile_in.setCloseOnExec(true); catfile_out = stdout.input; catfile_buffer = new IO.Buffered(catfile_out); catfile.start(); } try { catfile_in.write(format("%s\n", sha1)); var line = catfile_buffer.readln(); if (!line) throw CriticError(format("failed to fetch %s: empty response", sha1)); var match = /^([0-9a-f]{40}) (commit|tree|blob|tag) (\d+)$/.exec(line); if (!match || match[1] != sha1) throw CriticError(format("failed to fetch %s: invalid response: %s", sha1, JSON.stringify(line))); var type = match[2]; var size = parseInt(match[3]); var data = catfile_buffer.read(size); catfile_buffer.read(1); return new GitObject(sha1, type, size, data); } catch (error) { this.shutdown(); throw error; } }; this.shutdown = function () { if (catfile) { try { catfile.kill(9); catfile.wait(); } catch (e) {} try { catfile_in.close(); } catch (e) {} try { catfile_out.close(); } catch (e) {} catfile = catfile_in = catfile_out = catfile_buffer = null; } }; all_repositories.push(this); function getFilters() { var users = {}; function getUser(name_or_id) { var user = users[name_or_id]; if (user) return user; else user = new CriticUser(name_or_id); return users[user.id] = users[user.name] = user; } if (!filters) { filters = []; Object.defineProperties(filters, { users: { value: {} }, paths: { value: {} }}); var result = db.execute("SELECT uid, path, type, delegate FROM filters WHERE repository=%d", self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; var user = getUser(row.uid); var path = row.path; var delegates = []; if (row.delegate) row.delegate.split(",").forEach( function (name) { try { delegates.push(getUser(name)); } catch (error) { /* Ignore invalid user names in the 'delegate' column. */ } }); var filter = Object.freeze({ user: user, path: path, type: row.type, delegates: delegates }); filters.push(filter); (filters.users[user.name] || (filters.users[user.name] = [])).push(filter); (filters.paths[path] || (filters.paths[path] = [])).push(filter); } for (var name in filters.users) Object.freeze(filters.users[name]); Object.freeze(filters.users); for (var path in filters.paths) Object.freeze(filters.paths[path]); Object.freeze(filters.paths); Object.freeze(filters); } return filters; } Object.defineProperties(this, { filters: { get: getFilters, enumerable: true }}); Object.freeze(this); } function GitUserTime(fullname, email, utc) { this.fullname = fullname; this.email = email; this.time = new Date(parseInt(utc) * 1000); var self = this; var user = null; function getUser() { if (!user) try { user = new CriticUser({ email: self.email }); } catch (error) { return null; } return user; } Object.defineProperty(this, "user", { get: getUser, enumerable: true }); Object.freeze(this); } GitUserTime.prototype.toString = function () { return format("%s <%s> at %s", this.fullname, this.email, this.time); }; function CriticCommit(repository, sha1) { var object = repository.fetch(sha1); while (object.type == "tag") { sha1 = object.data.decode().split("\n")[0].split(" ")[1]; object = repository.fetch(sha1); } if (object.type != "commit") throw CriticError("not a commit"); var text = object.data.decode(); var tree, author, committer, parents_sha1 = [], message; while (true) { var line_length = text.indexOf("\n"); if (line_length == 0) { message = text.substring(1); break; } else { var line = text.substring(0, line_length), match; if (match = /^tree ([0-9a-f]{40})$/i.exec(line)) tree = match[1]; else if (match = /^parent ([0-9a-f]{40})$/i.exec(line)) parents_sha1.push(match[1]); else if (match = /^author (.+?) <([^>]+)> (\d+) ([-+]\d+)$/i.exec(line)) author = new GitUserTime(match[1], match[2], match[3]); else if (match = /^committer (.+?) <([^>]+)> (\d+) ([-+]\d+)$/i.exec(line)) committer = new GitUserTime(match[1], match[2], match[3]); text = text.substring(line_length + 1); } } var self = this; var commit_id = null; var parents = null; function getId() { if (commit_id === null) { var result = db.execute("SELECT id FROM commits WHERE sha1=%s", sha1); if (result.length) commit_id = result[0].id; else throw CommitError(format("%s: commit not registered in the database", sha1)); } return commit_id; } function getParents() { if (parents === null) parents = parents_sha1.map(function (sha1) { return new CriticCommit(repository, sha1); }); return parents; } function getSummary() { var match = /^(fixup|squash)! [^\n]+\n+([^\n]+)/.exec(message); if (match) return format("[%s] %s", match[1], match[2]); else return /^[^\n]*/.exec(message)[0]; } function getShort() { return repository.revparse(self.sha1, true); } Object.defineProperties(this, { id: { get: getId, enumerable: true }, parents: { get: getParents, enumerable: true }, summary: { get: getSummary, enumerable: true }, short: { get: getShort, enumerable: true }}); this.repository = repository; this.sha1 = sha1; this.tree = tree; this.author = author; this.committer = committer; this.message = message; Object.freeze(this); } Object.defineProperties(CriticCommit.prototype, { toString: { value: function () { return format("%s: %s", this.sha1.substring(0, 8), this.summary); }, writable: true, configurable: true }, getDirectory: { value: function (path) { return new CriticCommitDirectory(this, path); }, writable: true, configurable: true }, getFile: { value: function (path, data) { return new CriticCommitFileVersion(this, path, data); }, writable: true, configurable: true }, isAncestorOf: { value: function (other) { if (!(this instanceof CriticCommit)) throw CriticError("invalid this object: expected Commit object"); if (!(other instanceof CriticCommit)) throw CriticError("invalid 'other' argument: expected Commit object"); if (this.repository.id != other.repository.id) return false; else if (this.sha1 == other.sha1) return true; else return this.repository.run("merge-base", this.sha1, other.sha1).trim() == this.sha1; }, writable: true, configurable: true } }); CriticRepository.prototype.run = function () { return runGitCommand(this.path, [].slice.apply(arguments)); }; CriticRepository.prototype.run.supportsInput = true; CriticRepository.prototype.revparse = function (ref, use_short) { if (!ref || typeof ref != "string") throw CriticError("invalid ref argument: expected non-empty string"); else { var short = "--short=40"; if (use_short) if (use_short === true) short = "--short"; else short = format("--short=%d", use_short); return this.run("rev-parse", "--verify", "--quiet", short, ref).trim(); } }; CriticRepository.prototype.revlist = function (data) { var args = ["rev-list"]; if (data.options) { [].push.apply(args, [].map.call(data.options, function (option) { if (!/-[a-z]|--[a-z-]+(?:=.*)/.test(option)) throw CriticError("invalid option: " + option); return option; })); } if (data.included) { [].push.apply(args, [].map.call(data.included, String)); if (data.excluded) [].push.apply(args, [].map.call(data.excluded, function (ref) { return "^" + String(ref); })); } else if (data.range) { var range = String(data.range); if (range.match(/\.\.\.?/g).length != 1) throw CriticError("invalid range"); args.push(range); } else throw CriticError("invalid argument: data.included or data.range must be specified"); return this.run.apply(this, args).trim().split("\n"); }; CriticRepository.prototype.getCommit = function (ref_or_id) { var sha1; if (typeof ref_or_id == "number") sha1 = db.execute("SELECT sha1 FROM commits WHERE id=%d", ref_or_id)[0].sha1; else sha1 = this.revparse(ref_or_id); return new CriticCommit(this, sha1); }; CriticRepository.prototype.getBranch = function (name) { return new CriticBranch({ repository: this, name: name }); }; function createChangeset(repository, type, parent, child) { var socket = new IO.Socket("unix", "stream"); var request = { repository_name: repository.name, changeset_type: type }; switch (type) { case "direct": case "merge": request.child_sha1 = child.sha1; break; case "custom": case "conflicts": request.parent_sha1 = parent.sha1; request.child_sha1 = child.sha1; } socket.connect(IO.SocketAddress.unix(changeset_address)); socket.send(JSON.stringify([request])); socket.shutdown("write"); var result = ""; while (true) { var data = socket.recv(4096); if (data === null) break; result += data.decode(); } result = JSON.parse(result); if (!(result instanceof Array) || result.length != 1 || "error" in result[0]) throw CriticError(format("failed to create changeset!")); } CriticRepository.prototype.getChangeset = function (data) { var data_argument = data; if (typeof data == "number") data = { id: data }; else if (typeof data.id == "number") { data = { id: data.id }; if (data_argument.parent) data.parent = data_argument.parent; if (data_argument.child) data.child = data_argument.child; if (data_argument.commit) data.commit = data_argument.commit; } else { var commit, parent, child, type; if (data instanceof CriticCommit) commit = data; else if (data.commit) if (data.commit instanceof CriticCommit) commit = data.commit; else commit = this.getCommit(data.commit); if (commit) { if (commit.parents.length != 1) throw CriticError("invalid use; commit is a merge, use getMergeChangeset() instead"); child = commit; parent = commit.parents[0]; type = "direct"; } else { if (data.parent instanceof CriticCommit) parent = data.parent; else parent = this.getCommit(data.parent); if (data.child instanceof CriticCommit) child = data.child; else child = this.getCommit(data.child); if (child.parents.length == 1 && child.parents[0].sha1 == parent.sha1) type = "direct"; else type = "custom"; } for (var attempt = 0; attempt < 2; ++attempt) { var result = db.execute("SELECT id FROM changesets WHERE parent=%d AND child=%d AND type IN ('direct', 'custom')", parent.id, child.id); if (result.length) { data = { id: result[0].id, parent: parent, child: child }; break; } else if (attempt == 0) createChangeset(this, type, parent, child); } if (attempt == 2) throw CriticError("mysterious error creating/finding cached changeset"); } if (typeof data_argument == "object") for (var name in data_argument) switch (name) { case "id": case "commit": case "child": case "parent": break; default: data[name] = data_argument[name]; } return new CriticChangeset(this, data); }; CriticRepository.prototype.getMergeChangeset = function (commit, data) { var commit; data = data || {}; if (!(commit instanceof CriticCommit)) commit = this.getCommit(commit); if (commit.parents.length < 2) throw CriticError(format("invalid use; %s is not a merge commit", commit.sha1)); for (var attempt = 0; attempt < 2; ++attempt) { var result = db.execute("SELECT id FROM changesets WHERE child=%d AND type='merge'", commit.id); if (result.length) { var changesets = []; for (var index = 0; index < result.length; ++index) { data.id = result[index].id; data.child = commit; changesets.push(new CriticChangeset(this, data)); } return new CriticMergeChangeset(changesets); } else if (attempt == 0) createChangeset(this, "merge", null, commit); } throw CriticError("mysterious error creating/finding cached changeset"); }; function CriticRepositoryWorkCopy(repository, branch) { if (!repository_work_copy_path || !IO.File.isDirectory(repository_work_copy_path) || global.user.id === null) throw CriticError("operation not available"); var name = format("%s/%d/%s", global.user.name, extension_id, repository.name); var path = repository_work_copy_path + "/" + name; this.repository = repository; this.path = path; if (IO.File.isDirectory(path)) { this.run("clean", "-d", "-x", "-f", "-f"); this.run("reset", "--hard"); if (branch) { this.run("fetch", "origin", "refs/heads/" + branch); this.run("checkout", "-q", "FETCH_HEAD"); try { this.run("branch", "-D", branch); } catch (e) {} this.run("checkout", "-q", "-b", branch); } else { try { var ref = repository.run("symblic-ref", "--quiet", "HEAD").trim(); this.run("fetch", "origin", ref); ref = "FETCH_HEAD"; } catch (error) { ref = repository.run("rev-parse", "HEAD").trim(); } this.run("checkout", "-q", ref); } IO.File.utimes(path); } else { var argv = [git_executable, "clone"]; if (branch) argv.push("-b", branch); argv.push(repository.path, name); var process = new OS.Process(git_executable, { argv: argv, cwd: repository_work_copy_path }); return process.call(); } } CriticRepositoryWorkCopy.prototype.run = function () { return runGitCommand(this.path, [].slice.apply(arguments)); }; CriticRepositoryWorkCopy.prototype.run.supportsInput = true; CriticRepository.prototype.getWorkCopy = function () { return new CriticRepositoryWorkCopy(this); }; CriticRepository.prototype.startFiltersTransaction = function () { return new CriticFiltersTransaction(this); }; ================================================ FILE: src/library/js/v8/critic-html.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var standard_stylesheets = { "third-party/jquery-ui.css": 1, "overrides.css": 1, "basic.css": 1 }; var standard_scripts = { "third-party/jquery.js": 1, "third-party/jquery-ui.js": 1, "basic.js": 1 }; var standard_links = { home: "Home", dashboard: "Dashboard", branches: "Branches", services: "Services", repositories: "Repositories", search: "Search", manageextensions: "Extensions", config: "Config", tutorial: "Tutorial" }; var entities = { "<": "<", "&": "&", ">": ">", "'": "'", '"': """ }; function htmlify(text) { return String(text).replace(/[<&>'"]/g, function (ch) { return entities[ch]; }); } function writeStandardHeader(title, data) { if (title instanceof CriticUser) throw CriticError("API change: the user argument was moved to the HeaderData dictionary (and is now optional)"); write("%s", htmlify(title)); data = data || {}; var user = data.user || global.user; for (var stylesheet in standard_stylesheets) write("", stylesheet); if (data.stylesheets) for (var index = 0; index < data.stylesheets.length; ++index) write("", htmlify(data.stylesheets[index])); if (data.links) for (var link in data.links) { var match = /^rel=(.*)$/.exec(link); if (match) write("", match[1], data.links[link]); } write("", is_development ? "-dev" : ""); write("", user.getPreference("style.defaultFont")); for (var script in standard_scripts) write("", script); if (data.scripts) for (var index = 0; index < data.scripts.length; ++index) write("", htmlify(data.scripts[index])); if (typeof data.review == "object") write("", data.review.id, data.review.repository.id); var opera_class = "opera"; if (is_development) opera_class += " development"; write(""); } function writeStandardFooter(data) { var result = db.execute("SELECT extensions.name, users.fullname " + "FROM extensions " + "JOIN users ON (users.id=extensions.author) " + "WHERE extensions.id=%d", extension_id)[0]; write(""); } function PaleYellowTable(title) { this.title = title; this.rows = []; } PaleYellowTable.prototype.addHeading = function (title) { this.rows.push(format("

%s

", htmlify(title))); }; PaleYellowTable.prototype.addItem = function (data) { if (data.html) this.rows.push(format("%s", data.html)); else if (data.name) { var buttons_html; if (data.buttons) { buttons_html = "
"; for (var button_name in data.buttons) buttons_html += format("", htmlify(data.buttons[button_name]), htmlify(button_name)); buttons_html += "
"; } else buttons_html = ""; this.rows.push(format("%s:%s%s", htmlify(data.name), data.value, buttons_html)); this.rows.push(format("%s", htmlify(data.description))); } else if (data.buttons) { var buttons_html = "
"; for (var button_name in data.buttons) buttons_html += format("", htmlify(data.buttons[button_name]), htmlify(button_name)); buttons_html += "
"; this.rows.push(format("%s", buttons_html)); } else if (data.separator) this.rows.push("
"); else throw CriticError("invalid argument"); }; PaleYellowTable.prototype.write = function () { write("
"); write("", htmlify(this.title)); for (var index = 0; index < this.rows.length; ++index) write(this.rows[index]); write("

%s

"); }; var CriticHtml = { writeStandardHeader: writeStandardHeader, writeStandardFooter: writeStandardFooter, PaleYellowTable: PaleYellowTable, escape: htmlify }; Object.freeze(CriticHtml); ================================================ FILE: src/library/js/v8/critic-launcher-fork.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var data = JSON.parse(readln()); var critic = new Module(); critic.load(data.criticjs_path); critic.close(); var server_socket = new IO.Socket("unix", "stream"); server_socket.bind(IO.SocketAddress.unix(data.address)); server_socket.listen(4); writeln("listening"); function Child(socket) { this.socket = socket; this.stdout = IO.File.pipe(); this.stderr = IO.File.pipe(); this.check = IO.File.pipe(); } Child.prototype.start = function () { this.socket.sendfd(this.stdout.input); this.stdout.input.close(); this.socket.sendfd(this.stderr.input); this.stderr.input.close(); this.process = new Process(); this.process.start(); if (this.process.isSelf) { this.check.input.close(); File.dup2(this.socket, 0); this.socket.close(); File.dup2(this.stdout.output, 1); this.stdout.output.close(); File.dup2(this.stderr.output, 2); this.stderr.output.close(); this.execute(); this.check.output.close(); Process.exit(0); } else { this.stdout.output.close(); this.stderr.output.close(); this.check.output.close(); } }; Child.prototype.execute = function () { try { var line = read().decode(); var data = JSON.parse(line); } catch (e) { throw JSON.stringify(line); } critic.setup(data); try { var script = new Module(); script.global.critic = critic; script.load(data.script_path); script.global[data.fn].apply(null, eval(data.argv)); } finally { critic.shutdown(); } }; Child.prototype.finish = function () { if (this.process.wait(true)) { var result = JSON.stringify({ exitStatus: this.process.exitStatus, terminationSignal: this.process.terminationSignal }); this.socket.send(result + "\n"); this.socket.close(); return true; } else return false; }; var children = {}; var poll = new IO.Poll(); poll.register(server_socket); while (true) { if (poll.poll(1000)) { poll.read.forEach(function (file) { if (file == server_socket) { var client_socket = server_socket.accept(); writeln("%.3f: client connection opened", Date.now()); var child = new Child(client_socket); child.start(); children[child.process.pid] = child; poll.register(child.check.input); } }); } for (var pid in children) { if (children[pid].finish()) { poll.unregister(child.check.input); child.check.input.close(); delete children[pid]; writeln("%.3f: child process finished", Date.now()); } } } ================================================ FILE: src/library/js/v8/critic-launcher.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var critic = new Module(); critic.load(IO.Path.absolute("critic.js")); var line = readln(); var data = JSON.parse(line); critic.setup(data); critic.close(); function run() { try { var script = new Module({ PostgreSQL: false }); script.global.critic = critic; script.eval("var Encodings = { decode: function (bytes) { return typeof bytes == 'object' ? bytes.decode.apply(bytes, [].slice.call(arguments, 1)) : bytes; } };"); try { script.load(data.script_path); } catch (error) { IO.File.stderr.write(format("Failed to load '%s':\n %s", data.script_path, error)); return 1; } try { script.global[data.fn].apply(null, eval(data.argv)); } catch (error) { IO.File.stderr.write(format("Failed to call '%s::%s()':\n %s\n %s", data.script_path, data.fn, error, error.stack.replace(/\n/g, "\n "))); return 1; } return 0; } finally { critic.shutdown(); } } OS.Process.exit(run()); ================================================ FILE: src/library/js/v8/critic-log.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticLog(user) { this.write = function () { var text, options, category = "default"; if (typeof arguments[arguments.length - 1] == "object") { options = arguments[arguments.length - 1]; text = format.apply(null, Array.prototype.slice.call(arguments, 0, arguments.length - 1)); } else text = format.apply(null, arguments); if (options) category = "" + options.category; db.execute("INSERT INTO extensionlog (extension, uid, category, text) VALUES (%d, %d, %s, %s)", extension_id, user.id, category, text); db.commit(); }; function getQuery(data) { var terms = ["extensionlog.extension = %d", "extensionlog.uid = %d"]; var parameters = [extension_id, global.user.id]; if (data && typeof data == "object") { if (data.timeStart) { if (typeof data.timeStart == "object") { terms.push("extensionlog.time >= %s::timestamp"); parameters.push(Date.prototype.toSQLTimestamp.call(data.timeStart)); } else { terms.push("extensionlog.time >= now() - %s::interval"); parameters.push(String(data.timeStart)); } } if (data.timeEnd) { if (typeof data.timeEnd == "object") { terms.push("extensionlog.time <= %s::timestamp"); parameters.push(Date.prototype.toSQLTimestamp.call(data.timeEnd)); } else { terms.push("extensionlog.time <= now() - %s::interval"); parameters.push(String(data.timeEnd)); } } if (data.category) { terms.push("extensionlog.category = %s"); parameters.push(String(data.category)); } } return { where: terms.join(" AND "), parameters: parameters }; } this.fetch = function (data) { var query = getQuery(data); var result = db.execute.apply(db, ["SELECT uid, category, time, text FROM extensionlog WHERE " + query.where + " ORDER BY time ASC"].concat(query.parameters)); var users = {}; var log = []; for (var index = 0; index < result.length; ++index) { var row = result[index]; var user = users[row.uid]; if (!user) user = users[row.uid] = new CriticUser({ id: row.uid }); log.push(Object.freeze({ user: user, time: row.time, category: row.category, text: row.text })); } return log; }; this.remove = function (data) { var query = getQuery(data); db.execute.apply(db, ["DELETE FROM extensionlog WHERE " + query.where].concat(query.parameters)); db.commit(); }; } ================================================ FILE: src/library/js/v8/critic-mail.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function sendMail(filename) { IO.File.rename(filename, /^(.*)\.pending$/.exec(filename)[1]); } function CriticMailTransaction() { this.mails = []; } CriticMailTransaction.prototype.add = function (data) { if (!("to" in data) && !("review" in data)) throw CriticError("invalid argument; at least one of data.to or data.review must be specified"); if (!("subject" in data)) throw CriticError("invalid argument; data.subject is required"); if (!("body" in data)) throw CriticError("invalid argument; data.body is required"); var mail = { subject: String(data.subject), body: String(data.body) }; if ("to" in data) { mail.recipients = []; if (typeof data.to == "object" && "length" in data.to) { for (var index = 0; index < data.to.length; ++index) mail.recipients.push((new CriticUser(data.to[index])).id); } else mail.recipients.push((new CriticUser(data.to)).id); } if ("from" in data) mail.sender = (new CriticUser(data.from)).id; else mail.sender = global.user.id; if ("review" in data) mail.review_id = (new CriticReview(data.review)).id; if ("headers" in data) { mail.headers = {}; for (var name in data.headers) mail.headers[name] = String(data.headers[name]); } this.mails.push(mail); }; CriticMailTransaction.prototype.finish = function () { var argv = [python_executable, "-m", "cli", "generate-custom-mails"]; var stdin_data = format("%r\n", this.mails); var process = new OS.Process(python_executable, { argv: argv, environ: { PYTHONPATH: python_path }}); /* process.stdout = new IO.MemoryFile; process.stderr = new IO.MemoryFile; process.start(); process.wait(); if (process.exitStatus !== 0) throw CriticError(process.stderr.value.decode()); var stdout_data = process.stdout.value.decode().trim(); */ var stdout_data = process.call(stdin_data).trim(); var response = JSON.parse(stdout_data); if (typeof response == "string") throw CriticError(response); response.forEach(sendMail); var maildelivery_pid = parseInt(IO.File.read(maildelivery_pid_path).decode().trim()); OS.Process.kill(maildelivery_pid, 1); }; ================================================ FILE: src/library/js/v8/critic-review.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; var review_internals = {}; function CriticReview_isCreated(review) { return review_internals[review.id].created; } function CriticReviewFilter(review, user_id, path, type, creator_id) { user_id = Number(user_id); path = String(path); creator_id = creator_id || Number(creator_id); this.review = review; this.path = path; this.type = type; var user = null; var creator = null; function getUser() { if (!user) user = new CriticUser(user_id); return user; } function getCreator() { if (!creator) if (creator_id) creator = new CriticUser(creator_id); else return null; return creator; } Object.defineProperties(this, { user: { get: getUser, enumerable: true }, creator: { get: getCreator, enumerable: true }}); Object.freeze(this); } function CriticReviewRebase(review, user_id, old_head_id, new_head_id, new_upstream_id, equivalent_merge_id, replayed_rebase_id, branch_name) { this.review = review; this.user = new CriticUser(user_id); this.oldHead = review.repository.getCommit(old_head_id); this.newHead = review.repository.getCommit(new_head_id); this.newUpstream = new_upstream_id && review.repository.getCommit(new_upstream_id); this.equivalentMerge = equivalent_merge_id && review.repository.getCommit(equivalent_merge_id); this.replayedRebase = replayed_rebase_id && review.repository.getCommit(replayed_rebase_id); this.branchName = branch_name; Object.freeze(this); } function CriticReviewCreated(review_id) { this.id = review_id; } function CriticReview(arg) { var review_id, created = false; if (arg && typeof arg == "object") { if (arg instanceof CriticReview) return arg; else if (arg instanceof CriticReviewCreated) { review_id = arg.id; created = true; } else review_id = ~~arg; } else review_id = ~~arg; var result = db.execute("SELECT branch, state, closed_by, dropped_by, summary, description FROM reviews WHERE id=%d", review_id)[0]; if (!result) throw CriticError(format("%d: invalid review ID", review_id)); this.id = review_id; this.owners = []; this.state = result.state; this.closedBy = result.closed_by && new CriticUser(result.closed_by); this.droppedBy = result.dropped_by && new CriticUser(result.dropped_by); this.summary = result.summary || ""; this.description = result.description || ""; this.branch = new CriticBranch({ id: result.branch, review: this }); this.repository = this.branch.repository; var owners = db.execute("SELECT uid FROM reviewusers WHERE review=%d AND owner", review_id); for (var index = 0; index < owners.length; ++index) this.owners.push(new CriticUser(owners[index].uid)); var self = this; var commits = null; var comment_chains = null; var users = null; var reviewers = null; var watchers = null; var progress = null; var batches = null; var filters = null; var rebases = null; var trackedBranch; var internal = review_internals[review_id] = {}; internal.filters = filters; internal.created = created; result = null; owners = null; function getCommits() { if (!commits) { var all = []; var result = db.execute("SELECT DISTINCT child FROM changesets JOIN reviewchangesets ON (changeset=id) WHERE review=%d", self.id); for (var index = 0; index < result.length; ++index) all.push(self.repository.getCommit(result[index].child)); commits = new CriticCommitSet(all); } return commits; } function getCommentChains() { if (!comment_chains) { comment_chains = []; comment_chains.issues = []; comment_chains.notes = []; var result = db.execute("SELECT id, review, batch, uid, time, type, state, origin, file, first_commit, last_commit, closed_by, addressed_by FROM commentchains WHERE review=%d AND state NOT IN ('draft', 'empty') ORDER BY time ASC", review_id); for (var index = 0; index < result.length; ++index) { var chain = new CriticCommentChain(result[index], { review: self }); comment_chains.push(chain); if (chain.type == CriticCommentChain.TYPE_ISSUE) comment_chains.issues.push(chain); else comment_chains.notes.push(chain); } Object.freeze(comment_chains.issues); Object.freeze(comment_chains.notes); Object.freeze(comment_chains); } return comment_chains; } function getUsers() { if (!users) { users = []; users.type = {}; var result = db.execute("SELECT uid, type FROM reviewusers WHERE review=%d", self.id); for (var index = 0; index < result.length; ++index) { var user = new CriticUser(result[index].uid); users.push(user); users.type[user.id] = result[index].type; } Object.freeze(users.type); Object.freeze(users); } return users; } function getReviewers() { if (!reviewers) { reviewers = {}; var result = db.execute("SELECT DISTINCT assignee FROM fullreviewuserfiles WHERE review=%d", self.id); for (var index = 0; index < result.length; ++index) { var user = new CriticUser(result[index].assignee); reviewers[user.id] = user; reviewers[user.name] = user; } Object.freeze(reviewers); } return reviewers; } function getWatchers() { if (!watchers) { watchers = {}; var result = db.execute("SELECT reviewusers.uid FROM reviewusers LEFT OUTER JOIN fullreviewuserfiles ON (reviewusers.review=fullreviewuserfiles.review AND reviewusers.uid=fullreviewuserfiles.assignee) WHERE reviewusers.review=%d AND fullreviewuserfiles.assignee IS NULL", self.id); for (var index = 0; index < result.length; ++index) { var user = new CriticUser(result[index].uid); watchers[user.id] = user; watchers[user.name] = user; } Object.freeze(watchers); } return watchers; } function getBatches() { if (!batches) { batches = []; var result = db.execute("SELECT id FROM batches WHERE review=%d ORDER BY id ASC", self.id); for (var index = 0; index < result.length; ++index) batches.push(new CriticBatch({ id: result[index].id, review: self })); Object.freeze(batches); } return batches; } function getFilters() { if (!filters) { filters = []; var result = db.execute("SELECT uid, path, type, creator FROM reviewfilters WHERE review=%d", self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; filters.push(new CriticReviewFilter(self, row.uid, row.path, row.type, row.creator)); } Object.freeze(filters); } return filters; } function getRebases() { if (!rebases) { rebases = []; var result = db.execute("SELECT id, uid, old_head, new_head, new_upstream, equivalent_merge, replayed_rebase, branch FROM reviewrebases WHERE review=%d AND new_head IS NOT NULL ORDER BY id DESC", self.id); for (var index = 0; index < result.length; ++index) { var row = result[index]; rebases.push(new CriticReviewRebase(self, row.uid, row.old_head, row.new_head, row.new_upstream, row.equivalent_merge, row.replayed_rebase, row.branch)); } Object.freeze(rebases); } return rebases; } function getProgress() { if (!progress) { var pending_lines = 0; var pending_files = 0; var reviewed_lines = 0; var reviewed_files = 0; var issues; var result = db.execute("SELECT state, SUM(deleted) + SUM(inserted) AS count FROM reviewfiles WHERE review=%d GROUP BY state", self.id); for (var index = 0; index < result.length; ++index) if (result[index].state == "pending") pending_lines = result[index].count; else reviewed_lines = result[index].count; var result = db.execute("SELECT state, COUNT(*) AS count FROM reviewfiles WHERE review=%d GROUP BY state", self.id); for (var index = 0; index < result.length; ++index) if (result[index].state == "pending") pending_files = result[index].count; else reviewed_files = result[index].count; result = null; issues = db.execute("SELECT COUNT(id) AS count FROM commentchains WHERE review=%d AND type='issue' AND state='open'", self.id)[0].count; progress = { accepted: self.state == "open" && pending_files == 0 && issues == 0, finished: self.state == "closed", dropped: self.state == "dropped", pendingLines: pending_lines, pendingFiles: pending_files, reviewedLines: reviewed_lines, reviewedFiles: reviewed_files, openIssues: issues, toString: function () { if (this.finished) return "Finished!"; else if (this.accepted) return "Accepted!"; else if (this.dropped) return "Dropped..."; var percent, pending = this.pendingLines, reviewed = this.reviewedLines; if (pending_lines == 0 && reviewed_lines == 0) percent = "?? %"; else { var percent_exact = 100 * reviewed / (pending + reviewed); var percent_rounded = Math.round(percent_exact); if (percent_exact == 100) percent = "100 %"; else if (reviewed == 0) percent = "No progress"; else if (percent_rounded > 0 && percent_rounded < 100) percent = format("%d %%", percent_rounded); else { for (var precision = 1; precision < 10; ++precision) { percent = format(format("%%.%df", precision), percent_exact); if (percent.charAt(percent.length - 1) != '0') break; } percent += " %"; } } if (this.openIssues) return format("%s and %d issue%s", percent, this.openIssues, this.openIssues > 1 ? "s" : ""); else return percent; }}; Object.freeze(progress); } return progress; } function getTrackedBranch() { if (trackedBranch === void 0) { var result = db.execute("SELECT id FROM trackedbranches WHERE repository=%d AND local_name=%s", self.repository.id, self.branch.name)[0]; if (result) trackedBranch = new CriticTrackedBranch(result.id, { repository: self.repository, review: self }); else trackedBranch = null; } return trackedBranch; } Object.defineProperties(this, { commits: { get: getCommits, enumerable: true }, commentChains: { get: getCommentChains, enumerable: true }, users: { get: getUsers, enumerable: true }, reviewers: { get: getReviewers, enumerable: true }, watchers: { get: getWatchers, enumerable: true }, batches: { get: getBatches, enumerable: true }, filters: { get: getFilters, enumerable: true }, rebases: { get: getRebases, enumerable: true }, progress: { get: getProgress, enumerable: true }, trackedBranch: { get: getTrackedBranch, enumerable: true }}); Object.freeze(this); } CriticReview.prototype.getBatch = function (id) { return new CriticBatch({ id: Number(id) }); }; CriticReview.prototype.getCommentChain = function (id) { return new CriticCommentChain(id, { review: this }); }; CriticReview.prototype.getComment = function (id) { id = ~~id; var result = db.execute("SELECT chain, batch, uid, time, state, comment FROM comments WHERE id=%d AND state='current'", id)[0]; var chain = new CriticCommentChain(result.chain, { review: this }); return new CriticComment(chain.id, result.batch, id, result.uid, result.time, result.state, result.comment, { chain: chain }); }; CriticReview.prototype.getChangeset = function (commit) { if (commit.parents.length > 1) { var result = db.execute("SELECT id FROM changesets WHERE child=%d AND type='merge'", commit.id); var changesets = []; for (var index = 0; index < result.length; ++index) changesets.push(new CriticChangeset(this.repository, { id: result[index].id, child: commit, review: this })); return new CriticMergeChangeset(changesets); } else { var result = db.execute("SELECT id FROM changesets WHERE child=%d AND type='direct'", commit.id)[0]; return new CriticChangeset(this.repository, { id: result.id, parent: commit.parents[0], child: commit, review: this }); } }; CriticReview.prototype.startBatch = function (user) { user = user || global.user; if (!(user instanceof CriticUser)) throw CriticError("invalid argument; expected user object"); return new CriticBatch({ internals: batch_internals, review: this, user: user, review_created: CriticReview_isCreated(this) }); }; CriticReview.prototype.generateSubjectLine = function (user, preference) { var data = { id: format("r/%s", this.id), summary: this.summary, progress: String(this.progress), branch: this.branch.name }; var user_format = user.getPreference(preference); try { return format(user_format, data); } catch (e) { var default_format = db.execute("SELECT default_string FROM preferences WHERE item=%s", preference); return format(default_format, data); } }; CriticReview.prototype.getReviewableCommits = function (user) { user = user || global.user; if (!(user instanceof CriticUser)) throw CriticError("invalid argument; expected user object"); var result = db.execute("SELECT DISTINCT child FROM changesets JOIN fullreviewuserfiles ON (changeset=id) WHERE review=%d AND assignee=%d", this.id, user.id); var commits = []; for (var index = 0; index < result.length; ++index) commits.push(this.repository.getCommit(result[index].child)); return new CriticCommitSet(commits); }; CriticReview.prototype.getFullChangeset = function () { return this.branch.commits.getChangeset({ review: this }); }; CriticReview.prototype.getReviewableChangeset = function (user) { var commits = this.getReviewableCommits(user); if (commits.heads.length > 1 && commits.upstreams.length == 1) commits = this.commits.restrict(commits.heads, commits.upstreams); return commits.getChangeset({ review: this }); }; CriticReview.prototype.increaseSerial = function () { db.rollback(); db.execute("UPDATE reviews SET serial=serial+1 WHERE id=%d", this.id); db.commit(); }; function CriticPartition(review, commits, rebase) { this.review = review; this.commits = commits; this.rebase = rebase; Object.freeze(this); } CriticReview.prototype.getCommitPartitions = function () { var rebases = this.rebases; if (rebases.length == 0) return new CriticPartition(this, this.commits, null); var partition_commits = this.commits.restrict([this.branch.head]); var remaining_commits = this.commits.without(partition_commits); var partitions = []; for (var index = rebases.length - 1; index >= 0; --index) { var rebase = rebases[index]; partitions.push(new CriticPartition(this, partition_commits, rebase)); partition_commits = remaining_commits.restrict([rebase.oldHead]); if (partition_commits.length != 0) remaining_commits = remaining_commits.without(partition_commits); } partitions.push(new CriticPartition(this, partition_commits, null)); return Object.freeze(partitions); }; CriticReview.prototype.prepareRebase = function (data) { db.rollback(); if (db.execute("SELECT 1 FROM reviewrebases WHERE review=%d AND new_head IS NULL", this.id).length != 0) throw CriticError("review rebase already in progress"); var user = data.user || global.user; if (!!data.historyRewrite + !!data.singleCommit + !!data.newUpstream != 1) throw CriticError("invalid argument; exactly one of data.historyRewrite, data.singleCommit and data.newUpstream must be specified"); var old_head_id = this.branch.head.id; if (data.historyRewrite) db.execute("INSERT INTO reviewrebases (review, old_head, uid) VALUES (%d, %d, %d)", this.id, old_head_id, user.id); else { var upstreams = this.branch.commits.upstreams; if (upstreams.length > 1) throw CriticError("rebase not supported; review has multiple upstreams"); var branch = data.branch || null; var old_upstream = upstreams[0]; if (data.singleCommit) db.execute("INSERT INTO reviewrebases (review, old_head, old_upstream, uid, branch) VALUES (%d, %d, %d, %d, %s)", this.id, old_head_id, old_upstream.id, user.id, branch); else db.execute("INSERT INTO reviewrebases (review, old_head, old_upstream, new_upstream, uid, branch) VALUES (%d, %d, %d, %d, %d, %s)", this.id, old_head_id, old_upstream.id, data.newUpstream.id, user.id, branch); } db.execute("UPDATE reviews SET serial=serial + 1 WHERE id=%d", this.id); db.commit(); }; CriticReview.prototype.cancelRebase = function (data) { var result = db.execute("SELECT id FROM reviewrebases WHERE review=%d AND new_head IS NULL", this.id)[0]; if (!result) throw CriticError("no review rebase in progress"); db.execute("DELETE FROM reviewrebases WHERE id=%d", result.id); db.commit(); }; function setReviewState(review, user, new_state, verb) { user = user || global.user; if (!(user instanceof CriticUser)) throw CriticError("invalid argument; expected user object"); db.rollback(); var error = executeCLI([{ name: "set-review-state", data: { user_id: user.id, review_id: review.id, old_state: review.state, new_state: new_state }}])[0]; if (error) throw CriticError(format("error encountered while %s review: %s", verb, error)); db.commit(); } CriticReview.prototype.close = function (user) { if (this.state != "open") throw CriticError("review is not open"); if (!this.progress.accepted) throw CriticError("review is not accepted"); setReviewState(this, user, "closed", "closing"); }; CriticReview.prototype.drop = function (user) { if (this.state != "open") throw CriticError("review is not open"); if (this.progress.accepted) throw CriticError("review is accepted"); setReviewState(this, user, "dropped", "dropping"); }; CriticReview.prototype.reopen = function (user) { if (this.state == "open") throw CriticError("review is already open"); setReviewState(this, user, "open", "opening"); }; CriticReview.create = function (data) { if (!("upstream" in data)) throw CriticError("missing argument: upstream"); if (!("summary" in data)) throw CriticError("missing argument: summary"); if (!("branch" in data)) throw CriticError("missing argument: branch"); if (!("owner" in data)) throw CriticError("missing argument: owner"); var upstream = data.upstream; var summary = String(data.summary); var description = data.description || null; var branch = String(data.branch); var owner = data.owner; if (!(upstream instanceof CriticCommit)) throw CriticError("invalid argument: upstream"); if (branch.substring(0, 2) != "r/") throw CriticError("invalid argument: branch (doesn't have 'r/' prefix)"); if (!(owner instanceof CriticUser)) throw CriticError("invalid argument: owner"); var repository = data.repository; try { repository.revparse(branch); branch = false; } catch (exception) { } if (!branch) throw CriticError("invalid argument: branch (already exists)"); repository.run("branch", branch, upstream.sha1); var branch_id = db.execute("INSERT INTO branches (name, head, tail, repository) VALUES (%s, %d, %d, %d, %d) RETURNING id", branch, upstream.id, upstream.id, repository.id)[0].id; var review_id = db.execute("INSERT INTO reviews (type, branch, state, summary, description) VALUES (%s, %d, %s, %s, %s) RETURNING id", "official", branch_id, "open", summary, description)[0].id; db.execute("INSERT INTO reviewusers (review, uid, owner) VALUES (%d, %d, TRUE)", review_id, owner.id); return new CriticReview(review_id); }; CriticReview.find = function (data) { var result; if (data.repositoryURL && data.branchName) { if (data.repositoryURL.substring(0, hostname.length + 1) == hostname + ":") result = db.execute("SELECT reviews.id " + " FROM reviews " + " JOIN branches ON (branches.id=reviews.branch) " + " JOIN repositories ON (repositories.id=branches.repository) " + " WHERE branches.name=%s " + " AND repositories.path=%s", data.branchName, data.repositoryURL.substring(hostname.length + 1)); else result = db.execute("SELECT reviews.id " + " FROM reviews " + " JOIN branches ON (branches.id=reviews.branch) " + " JOIN trackedbranches ON (trackedbranches.repository=branches.repository " + " AND trackedbranches.local_name=branches.name) " + " WHERE trackedbranches.remote_name=%s " + " AND trackedbranches.remote=%s", data.branchName, data.repositoryURL); } return scoped( result, function () { return this.apply( function (review_id) { return new CriticReview(review_id); }); }); }; CriticReview.list = function (data) { data = data || {}; var tables = ["reviews"]; var conditions = ["TRUE"]; var argv = []; if (data.repository) { var repository_id, repository_name; if (data.repository instanceof CriticRepository) repository_id = data.repository.id; else if (parseInt(data.repository) === data.repository) repository_id = data.repository; else repository_name = String(data.repository); tables.push("branches ON (branches.id=reviews.branch)"); if (repository_id !== void 0) { conditions.push("branches.repository=%d"); argv.push(repository_id); } else { tables.push("repositories ON (repositories.id=branches.repository)"); conditions.push("repositories.name=%s"); argv.push(repository_name); } } if (data.state) { var valid_states = { open: true, closed: true, dropped: true }; if (!(data.state in valid_states)) throw CriticError(format("invalid argument: data.state=%r not valid", String(data.state))); conditions.push("reviews.state=%s"); argv.push(data.state); } if (data.owner) { var owner_id, owner_name; if (data.owner instanceof CriticUser) owner_id = data.owner.id; else if (parseInt(data.owner) === data.owner) owner_id = data.owner; else owner_name = String(data.owner); tables.push("reviewusers ON (reviewusers.review=reviews.id)"); conditions.push("reviewusers.owner"); if (owner_id !== void 0) { conditions.push("reviewusers.uid=%d"); argv.push(owner_id); } else { tables.push("users ON (users.id=reviewusers.uid)"); conditions.push("users.name=%s"); argv.push(owner_name); } } var query = format( "SELECT reviews.id FROM %(tables)s WHERE %(conditions)s ORDER BY reviews.id", { tables: tables.join(" JOIN "), conditions: conditions.join(" AND ") }); return scoped( db.execute.bind(db, query).apply(null, argv), function () { return this.apply( function (review_id) { return new CriticReview(review_id); }); }); }; ================================================ FILE: src/library/js/v8/critic-statistics.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticStatistics() { this.review = null; this.repository = null; this.interval_start = null; this.interval_end = null; this.user = null; this.directories = null; this.files = null; Object.seal(this); } CriticStatistics.prototype.setReview = function (review) { if (!(review instanceof CriticReview)) throw CriticError("invalid argument; expected Review object"); this.review = review; }; CriticStatistics.prototype.setRepository = function (repository) { if (!(repository instanceof CriticRepository)) throw CriticError("invalid argument; expected Repository object"); this.repository = repository; }; CriticStatistics.prototype.setInterval = function (start, end) { function mangle(item, argument) { if (Object.prototype.toString.call(item) == "[object Date]") return item; var string = String(item); if (/^\d+ (?:second|minute|hour|day|week|month|year)s?$/.test(string)) return string; else throw CriticError(format("invalid %s argument; expected Date object or string specifying an interval", argument)); } this.interval_start = mangle(start); if (end) this.interval_end = mangle(end); }; CriticStatistics.prototype.setUser = function (user) { if (!(user instanceof CriticUser)) throw CriticError("invalid argument; expected User object"); if (this.user) throw CriticError("invalid use; user already set"); this.user = user; }; CriticStatistics.prototype.addDirectory = function (directory) { if (!this.directories) this.directories = []; this.directories.push(/^(.*)\/+$/.exec(String(directory))[1]); }; CriticStatistics.prototype.addFile = function (file) { if (!this.files) this.files = []; this.files.push(String(file)); }; CriticStatistics.prototype.getReviewedLines = function (data) { var grouping = (data && data.grouping) || ["user"]; if (!("length" in grouping)) throw CriticError("invalid data.grouping argument; expected Array object"); else if (!grouping.length) throw CriticError("invalid data.grouping argument; expected non-empty array"); var grouping_columns = []; for (var index = 0; index < grouping.length; ++index) switch (grouping[index]) { case "user": grouping_columns.push("reviewfilechanges.uid"); break; case "file": grouping_columns.push("reviewfiles.file"); break; case "review": grouping_columns.push("reviewfiles.review"); break; default: throw CriticError(format("invalid data.grouping[%d] value; expected 'user', 'file' or 'review'", index)); } grouping_columns = grouping_columns.join(", "); var filteredfiles_join; if (this.directories || this.files) filteredfiles_join = " filteredfiles JOIN reviewfiles ON (filteredfiles.file=reviewfiles.file)"; else filteredfiles_join = " reviewfiles"; var repository_join; if (this.repository) repository_join = " JOIN reviews ON (reviews.id=reviewfiles.review) JOIN branches ON (branches.id=reviews.branch)"; else repository_join = ""; var query = "SELECT " + grouping_columns + ", SUM(deleted) AS deleted, SUM(inserted) AS inserted FROM" + filteredfiles_join + " JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id)" + repository_join + " WHERE reviewfilechanges.state='performed' AND reviewfilechanges.to='reviewed'"; var params = {}; if (this.review) { query += " AND (reviewfiles.review=%(review.id)d)"; params["review.id"] = this.review.id; } if (this.repository) { query += " AND (branches.repository=%(repository.id)d)"; params["repository.id"] = this.repository.id; } if (this.user) { query += " AND (reviewfilechanges.uid=%(user.id)d)"; params["user.id"] = this.user.id; } if (this.directories || this.files) { db.execute("CREATE TEMPORARY TABLE filteredfiles ( file INTEGER PRIMARY KEY ) ON COMMIT DROP"); if (this.directories) for (var index = 0; index < this.directories.length; ++index) db.execute("INSERT INTO filteredfiles (file) SELECT id FROM files WHERE path LIKE %s", this.directories[index] + "/%"); if (this.files) for (var index = 0; index < this.files.length; ++index) db.execute("INSERT INTO filteredfiles (file) SELECT id FROM files WHERE MD5(path)=MD5(%s)", this.files[index]); } if (this.interval_start) { if (typeof this.interval_start === "object") { query += " AND (reviewfilechanges.time >= %(start)s::timestamp)"; params["start"] = Date.prototype.toSQLTimestamp.call(this.interval_start); } else { query += " AND (reviewfilechanges.time >= now() - %(start)s::interval)"; params["start"] = this.interval_start; } } if (this.interval_end) { if (typeof this.interval_end === "object") { query += " AND (reviewfilechanges.time <= %(end)s::timestamp)"; params["end"] = Date.prototype.toSQLTimestamp.call(this.interval_end); } else { query += " AND (reviewfilechanges.time <= now() - %(end)s::interval)"; params["end"] = this.interval_end; } } query += " GROUP BY " + grouping_columns; var result = db.execute(query, params); var all_data = {}; for (var row_index = 0; row_index < result.length; ++row_index) { var row = result[row_index], data = all_data; for (var column_index = 0; column_index < grouping.length - 1; ++column_index) data = data[row[column_index]] || (data[row[column_index]] = {}); var counts = data[row[column_index]] || (data[row[column_index]] = Object.create(null, { deleteCount: { value: 0, writable: true }, insertCount: { value: 0, writable: true }})); counts.deleteCount += row.deleted; counts.insertCount += row.inserted; } /* This drops the temporary table created above. */ db.rollback(); return all_data; }; CriticStatistics.prototype.getWrittenComments = function () { var filteredfiles_join; if (this.directories || this.files) filteredfiles_join = " filteredfiles JOIN commentchains ON (filteredfiles.file=commentchains.file)"; else filteredfiles_join = " commentchains"; var chains_query = "SELECT uid, type, COUNT(id) AS count FROM" + filteredfiles_join + " WHERE state!='draft' AND state!='empty'"; var comments_query = "SELECT comments.uid AS uid, COUNT(comments.id) AS count, SUM(CHARACTER_LENGTH(comments.comment)) AS characters FROM" + filteredfiles_join + " JOIN comments ON (comments.chain=commentchains.id) WHERE comments.state='current'"; var params = {}; if (this.review) { chains_query += " AND (review=%(review.id)d)"; comments_query += " AND (review=%(review.id)d)"; params["review.id"] = this.review.id; } if (this.user) { chains_query += " AND (uid=%(user.id)d)"; comments_query += " AND (comments.uid=%(user.id)d)"; params["user.id"] = this.user.id; } if (this.directories || this.files) { db.execute("CREATE TEMPORARY TABLE filteredfiles ( file INTEGER PRIMARY KEY ) ON COMMIT DROP"); if (this.directories) for (var index = 0; index < this.directories.length; ++index) db.execute("INSERT INTO filteredfiles (file) SELECT id FROM files WHERE path LIKE %s", this.directories[index] + "/%"); if (this.files) for (var index = 0; index < this.files.length; ++index) db.execute("INSERT INTO filteredfiles (file) SELECT id FROM files WHERE MD5(path)=MD5(%s)", this.files[index]); } if (this.interval_start) { if (typeof this.interval_start === "object") { chains_query += " AND (time >= %(start)s::timestamp)"; comments_query += " AND (comments.time >= %(start)s::timestamp)"; params["start"] = this.interval_start.toSQLTimestamp(); } else { chains_query += " AND (time >= now() - %(start)s::interval)"; comments_query += " AND (comments.time >= now() - %(start)s::interval)"; params["start"] = this.interval_start; } } if (this.interval_end) { if (typeof this.interval_end === "object") { chains_query += " AND (time <= %(end)s::timestamp)"; comments_query += " AND (comments.time <= %(end)s::timestamp)"; params["end"] = this.interval_end.toSQLTimestamp(); } else { chains_query += " AND (time <= now() - %(end)s::interval)"; comments_query += " AND (comments.time <= now() - %(end)s::interval)"; params["end"] = this.interval_end; } } chains_query += " GROUP BY uid, type"; comments_query += " GROUP BY comments.uid"; var data = {}; function getPerUser(user_id) { return data[user_id] || (data[user_id] = Object.create(null, { raisedIssues: { value: 0, writable: true }, writtenNotes: { value: 0, writable: true }, totalComments: { value: 0, writable: true }, totalCharacters: { value: 0, writable: true }})); } var result = db.execute(chains_query, params); for (var index = 0; index < result.length; ++index) { var row = result[index]; var per_user = getPerUser(row.uid) if (row.type == "issue") per_user.raisedIssues += row.count; else per_user.writtenNotes += row.count; } var result = db.execute(comments_query, params); for (var index = 0; index < result.length; ++index) { var row = result[index]; var per_user = getPerUser(row.uid) per_user.totalComments += row.count; per_user.totalCharacters += row.characters; } /* This drops the temporary table created above. */ db.rollback(); return data; }; ================================================ FILE: src/library/js/v8/critic-storage.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticStorage(user) { this.has = function (key) { if (key.length > 64) throw CriticError(format("%s: key length exceeds 64 characters", key)); var result = db.execute("SELECT 1 FROM extensionstorage WHERE extension=%d AND uid=%d AND key=%s", extension_id, user.id, key)[0]; if (result) return true; else return false; }; this.get = function (key) { if (key.length > 64) throw CriticError(format("%s: key length exceeds 64 characters", key)); var result = db.execute("SELECT text FROM extensionstorage WHERE extension=%d AND uid=%d AND key=%s", extension_id, user.id, key)[0]; if (result) return result.text; else return null; }; this.set = function (key, text) { if (key.length > 64) throw CriticError(format("%s: key length exceeds 64 characters", key)); /* Roll the current transaction back first just to make sure we don't commit anything else by mistake. The current transaction (if there is one) should be "empty," and if it isn't, we don't want to keep it. */ db.rollback(); var result = db.execute("SELECT 1 FROM extensionstorage WHERE extension=%d AND uid=%d AND key=%s", extension_id, user.id, key)[0]; if (result) db.execute("UPDATE extensionstorage SET text=%s WHERE extension=%d AND uid=%d AND key=%s", text, extension_id, user.id, key); else db.execute("INSERT INTO extensionstorage (extension, uid, key, text) VALUES (%d, %d, %s, %s)", extension_id, user.id, key, text); /* This code has a possible race-condition: someone else might insert a row for this extension-user-key triple between the SELECT and the INSERT above. It would be excessively unlikely, though, and it would simply cause the transaction commit to fail with a constraint violation error (the extension-user-key triple is the table's primary key.) */ db.commit(); }; this.remove = function (key) { /* Roll the current transaction back first just to make sure we don't commit anything else by mistake. The current transaction (if there is one) should be "empty," and if it isn't, we don't want to keep it. */ db.rollback(); db.execute("DELETE FROM extensionstorage WHERE extension=%d AND uid=%d AND key=%s", extension_id, user.id, key); db.commit(); }; this.list = function (data) { var condition, value; if (!data) { condition = "%s"; value = true; } else if (data.like) { condition = "key LIKE %s"; value = data.like; } else if (data.regexp) { condition = "key ~ %s"; value = data.regexp; } else throw new CriticError("invalid arguments"); var result = db.execute("SELECT key FROM extensionstorage WHERE extension=%d AND uid=%d AND " + condition + " ORDER BY key ASC", extension_id, user.id, value); return result.apply(function (key) { return key; }); }; } ================================================ FILE: src/library/js/v8/critic-text.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function reflow(text, line_length, indent) { text = text.replace("\r", "\n"); indent = indent || ""; /* Zero line-length means reflowing is disabled (by user configuration.) */ if (line_length == 0) return text; var paragraphs = text.split(/\n\n+/g); paragraph_loop: for (var pindex = 0; pindex < paragraphs.length; ++pindex) { var lines = paragraphs[pindex].split("\n"); for (var lindex = 0; lindex < lines.length; ++lindex) { var line = lines[lindex]; if (/^[ \t\-*]/.test(line) || lindex != lines.length - 1 && line.length < line_length * .5) { /* Paragraph seems to be something other than plain text; don't reflow. */ if (indent) paragraphs[pindex] = lines.map(function (line) { return indent + line; }).join("\n"); continue paragraph_loop; } } var new_lines = []; var new_line = indent; var words = paragraphs[pindex].split(/(\s+)/g); var ws = ""; for (var windex = 0; windex < words.length; ++windex) { var word = words[windex]; if (!word.trim()) if (word.indexOf("\n") != -1) ws = " "; else ws = word; else { if (new_line != indent) if (new_line.length + ws.length + word.length > line_length) { new_lines.push(new_line); new_line = indent; } else new_line += ws; new_line += word; } } if (new_line) new_lines.push(new_line); paragraphs[pindex] = new_lines.join("\n"); } return paragraphs.join(format("\n%s\n", indent.trim())); } function repeat(s, n) { return Array(n + 1).join(s); } function spaces(n) { return repeat(" ", n); } /* items = [(path, deleted, inserted), ...] */ function renderFilesLines(items, indent) { items = items.slice().sort(function (x, y) { x = x[0]; y = y[0]; switch (true) { case x < y: return -1; case x > y: return 1; default: return 0; } }); var path_width = Math.max.apply(null, items.map(function (item) { return item[0].length; })); var delete_max = Math.max.apply(null, items.map(function (item) { return item[1]; })); var delete_width = delete_max ? String(delete_max).length : 0; var insert_max = Math.max.apply(null, items.map(function (item) { return item[2]; })); var insert_width = insert_max ? String(insert_max).length : 0; function renderLines(item) { var result; if (item[1]) result = format("%s-%d", spaces(delete_width - String(item[1]).length), item[1]); else if (item[2]) result = spaces(delete_width + 1); if (item[2]) result += format(" %s+%d", spaces(insert_width - String(item[2]).length), item[2]); return result; } var item = items[0]; var path = item[0]; var result = format("%s%s%s %s\n", indent, path, spaces(path_width - path.length), renderLines(item)); var previous = path.split("/"); for (var iindex = 1; iindex < items.length; ++iindex) { item = items[iindex]; path = item[0]; var components = path.split("/"); var common_prefix_length = 0; for (var cindex = 0, ccount = Math.min(previous.length, components.length); cindex < ccount; ++cindex) if (previous[cindex] == components[cindex]) common_prefix_length += 1 + components[cindex].length; else break; if (common_prefix_length > 4) path = format("%s.../%s", spaces(common_prefix_length - 4), path.substring(common_prefix_length)); result += format("%s%s%s %s\n", indent, path, spaces(path_width - path.length), renderLines(item)); previous = components; } return result; } ================================================ FILE: src/library/js/v8/critic-trackedbranch.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2014 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function signalBranchTracker() { if (branchtracker_pid_path) { var pid = parseInt(IO.File.read(branchtracker_pid_path)); var SIGHUP = 1; OS.Process.kill(pid, SIGHUP); } } function CriticTrackedBranch(id, data) { data = data || {}; var self = this; var branch_id, branch = data.branch || void 0; var review_id, review = data.review || void 0; var remote_name, disabled, pending; this.id = id; scoped( db.execute(("SELECT branches.id, remote, remote_name, disabled, updating," + " next IS NULL AS pending" + " FROM trackedbranches" + " JOIN branches ON (branches.repository=trackedbranches.repository" + " AND branches.name=trackedbranches.local_name)" + " WHERE trackedbranches.id=%d"), id), function () { var row = this[0]; if (!row) throw CriticError(format("invalid tracked branch id: %d", id)); branch_id = row.id; remote_name = row.remote_name; disabled = row.disabled; pending = row.pending; self.remote = row.remote; self.forced = row.forced; self.updating = row.updating; }); if (!review) { scoped( db.execute(("SELECT id" + " FROM reviews" + " WHERE branch=%d"), branch_id), function () { var row = this[0]; if (row) review_id = row.id; }); } function getRemoteName() { return remote_name; } function getDisabled() { return disabled; } function getPending() { return pending; } function getBranch() { if (branch === void 0) branch = new CriticBranch({ id: branch_id }); return branch; } function getReview() { if (review === void 0) { if (review_id !== void 0) review = new CriticReview(review_id); else review = null; } return review; } this.enable = function (new_name) { disabled = false; if (new_name) remote_name = new_name; else new_name = remote_name; db.execute(("UPDATE trackedbranches" + " SET remote_name=%s," + " disabled=FALSE" + " WHERE id=%d"), new_name, this.id); this.triggerUpdate(); }; this.disable = function () { disabled = true; db.execute(("UPDATE trackedbranches" + " SET disabled=TRUE" + " WHERE id=%d"), this.id); db.commit(); }; this.triggerUpdate = function () { pending = true; db.execute(("UPDATE trackedbranches" + " SET next=NULL" + " WHERE id=%d"), this.id); db.commit(); signalBranchTracker(); }; Object.defineProperties(this, { name: { get: getRemoteName, enumerable: true }, disabled: { get: getDisabled, enumerable: true }, pending: { get: getPending, enumerable: true }, branch: { get: getBranch, enumerable: true }, review: { get: getReview, enumerable: true } }); Object.freeze(this); } CriticTrackedBranch.prototype.getLogEntry = function (value) { if (value === void 0) value = this.branch.head.sha1; var result = db.execute((" SELECT time, from_sha1, to_sha1, hook_output, " + " successful" + " FROM trackedbranchlog" + " WHERE branch=%d" + " AND to_sha1=%s" + "ORDER BY time DESC" + " LIMIT 1"), this.id, value); var row = result[0]; if (!row) return null; var from_commit = null, to_commit = null; if (row.from_sha1 && !/0{40}/.test(row.from_sha1)) from_commit = this.branch.repository.getCommit(row.from_sha1); if (row.to_sha1 && !/0{40}/.test(row.to_sha1)) to_commit = this.branch.repository.getCommit(row.to_sha1); return Object.freeze({ time: row.time, oldValue: from_commit, newValue: to_commit, hookOutput: row.hook_output, successful: row.successful }); }; CriticTrackedBranch.find = function (data) { var result; if (data.branch) { result = db.execute(("SELECT id" + " FROM trackedbranches" + " WHERE repository=%d" + " AND local_name=%s"), data.branch.repository.id, data.branch.name); } else if (data.remote && data.name) { result = db.execute(("SELECT id" + " FROM trackedbranches" + " WHERE remote=%s" + " AND remote_name=%s"), data.remote, data.name); } else { throw CriticError("invalid input"); } var row = result[0]; if (row) return new CriticTrackedBranch(row.id, data); else return null; }; ================================================ FILE: src/library/js/v8/critic-user.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function CriticFilter(user, repository, data) { this.id = data.id; this.user = user; this.repository = repository; this.path = data.path; this.type = data.type; if (this.type == "reviewer") if (data.delegates) { this.delegates = data.delegates.split(/\s*,\s*|\s+/g).map( function (name) { return new CriticUser({ name: name }); }); Object.freeze(this.delegates); } else this.delegates = []; else this.delegates = null; Object.freeze(this); } function CriticUser(data) { var user_id; if (data && typeof data == "object" && data instanceof CriticUser) return data; else if (typeof data == "number") user_id = data; else if (data.id) user_id = data.id; else { var result, name; if (typeof data == "string") name = data; if (name || data.name) { result = db.execute("SELECT id FROM users WHERE name=%s", name || data.name)[0]; if (!result) throw CriticError(format("%s: no such user", name || data.name)); } else throw CriticError("invalid argument; dictionary must specify one of id and name"); user_id = result.id; } var result = db.execute("SELECT name, useremails.email, verified, fullname " + "FROM users " + " LEFT OUTER JOIN useremails ON (useremails.id=users.email) " + "WHERE users.id=%d", user_id)[0]; if (!result) throw CriticError(format("%d: invalid user ID", user_id)); this.id = user_id; this.name = result.name; this.email = result.verified !== false ? result.email : null; this.fullname = result.fullname; this.isAnonymous = false; if (data.name && this.name != data.name) throw CriticError("invalid argument; name in dictionary doesn't match id in dictionary"); result = null; Object.freeze(this); } Object.defineProperties(CriticUser.prototype, { toString: { value: function () { return format("%(fullname)s <%(email)s>", this); } }, valueOf: { value: function () { return this.id; } }, getPreference: { value: function (item) { var result = db.execute("SELECT type FROM preferences WHERE item=%s", item)[0]; if (!result) throw CriticError(format("%s: no such preference", item)); var value_column, value_filter = function (value) { return value; }; switch (result.type) { case "boolean": value_filter = Boolean; case "number": value_column = "integer"; break; case "string": value_column = "string"; } var result = db.execute(" SELECT " + value_column + " AS value" + " FROM userpreferences" + " WHERE item=%s" + " AND (uid=%d OR uid IS NULL)" + " AND repository IS NULL" + " AND filter IS NULL " + "ORDER BY uid NULLS LAST", item, this.id)[0]; return value_filter(result.value); } }, isAuthor: { value: function (commit) { if (this.isAnonymous) return false; return this.email == commit.author.email; } }, hasRole: { value: function (role) { if (this.isAnonymous) return false; return Boolean(db.execute("SELECT 1 FROM userroles WHERE uid=%d AND role=%s", this.id, role)[0]); } }, getFilters: { value: function (repository) { if (this.isAnonymous) throw CriticError("not supported; user is anonymous"); if (!(repository instanceof CriticRepository)) throw CriticError("invalid argument: expected Repository object"); var result = db.execute("SELECT id, " + " path, " + " type, " + " delegate AS delegates " + " FROM filters " + " WHERE uid=%d " + " AND repository=%d", this.id, repository.id); var filters = []; for (var index = 0; index < result.length; ++index) filters.push(new CriticFilter(this, repository, result[index])); Object.freeze(filters); return filters; } }, addFilter: { value: function (repository, path, filter_type, delegates) { if (this.isAnonymous) throw CriticError("not supported; user is anonymous"); if (!(repository instanceof CriticRepository)) throw CriticError("invalid argument: expected Repository object"); path = String(path); filter_type = String(filter_type); if (filter_type != "reviewer" && filter_type != "watcher" && filter_type != "ignored") throw CriticError("invalid argument: filter type must be 'reviewer', 'watcher' or 'ignored'"); if (/[^\/]\*\*|\*\*[^\/]/.test(path)) throw CriticError("invalid wildcards in path argument"); if (delegates) { delegates = String(delegates).split(/\s*,\s*|\s+/g); for (var index = 0; index < delegates.length; ++index) { if (db.execute("SELECT 1 FROM users WHERE name=%s", delegates[index]).length == 0) throw CriticError(format("invalid delegate '%s': no such user", delegates[index])); } delegates = delegates.join(","); } if (!delegates) delegates = null; if (filter_type != "reviewers" && delegates) throw CriticError(format("'%s' filter should not have delegates", filter_type)); var filter_id = db.execute("INSERT INTO filters (repository, uid, path, type, delegate) " + " VALUES (%d, %d, %s, %s, %s) " + " RETURNING id", repository.id, this.id, path, filter_type, delegates); db.commit(); return new CriticFilter(this, repository, { id: filter_id, path: path, type: filter_type, delegates: delegates }); } } }); function CriticAnonymousUser() { this.id = null; this.name = null; this.email = null; this.fullname = null; this.isAnonymous = true; Object.freeze(this); } CriticAnonymousUser.prototype = Object.create(CriticUser.prototype); Object.defineProperty(CriticUser, "current", { get: function () { return global.user; }, enumerable: true }); ================================================ FILE: src/library/js/v8/critic.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; Date.prototype.format = function () { return format("%04d-%02d-%02d %02d:%02d", this.getFullYear(), this.getMonth() + 1, this.getDate(), this.getHours(), this.getMinutes()); }; Date.prototype.toSQLTimestamp = function () { return format("%04d-%02d-%02d %02d:%02d:%02d", this.getFullYear(), this.getMonth() + 1, this.getDate(), this.getHours(), this.getMinutes(), this.getSeconds()); }; function CriticError(message, exception) { if (!this) return new CriticError(message, exception); this.name = "CriticError"; this.message = message; if (exception) { this.message += " (" + exception.message + ")"; this.stack = exception.stack; } else { try { /* Trigger a native exception to copy a stack trace from. */ "foo"(); } catch (exception) { this.stack = exception.stack.replace(/^([^\n]+\n){2}/, ""); } } } CriticError.prototype = Object.create(Error.prototype); if (!String.prototype.startsWith) { String.prototype.startsWith = function (prefix) { return this.length >= prefix.length && this.substring(0, prefix.length) == prefix; }; } Error.prepareStackTrace = function (error, callsites) { /* Calling methods on CallSite objects sometimes throws an exception saying "illegal access." Unknown whether this is a bug in V8 or in v8-jsshell, but the latter seems more likely. This has in particular been observed when calling the "isConstructor" method. */ function checked(obj, name) { try { return obj[name](); } catch (error) { return format("<%s: %s>", name, String(error)); } } function describeCallSite(callsite) { if (!callsite || typeof callsite == "string") return ""; if (callsite.isEval()) return format("eval at %s", describeCallSite(callsite.getEvalOrigin())); var filename = checked(callsite, "getFileName"); var where; if (filename) { if (filename.startsWith(library_path + "/")) filename = "/" + filename.substring(library_path.length + 1); else filename = "/" + filename; where = format("at %s:%d", filename, checked(callsite, "getLineNumber")); } else if (checked(callsite, "isNative")) where = "in native code"; else where = "at unknown location"; var fn = checked(callsite, "getFunction"); var fnname = checked(callsite, "getFunctionName"); var what; if (fnname) { if (checked(callsite, "isConstructor") === true) what = format("new %s()", fnname); else what = format("%s()", fnname); } /* Oddly enough, top-level program code also has a function, whose .toString() returns "function ". */ else if (fn && String(fn).startsWith("function (")) what = ""; else what = ""; return format("%s %s", what, where); } return callsites.map(describeCallSite).join("\n"); }; var configuration = { maxCommits: 1024 }; Module.load("critic-user.js"); Module.load("critic-file.js"); Module.load("critic-git.js"); Module.load("critic-commitset.js"); Module.load("critic-changeset.js"); Module.load("critic-branch.js"); Module.load("critic-dashboard.js"); Module.load("critic-review.js"); Module.load("critic-comment.js"); Module.load("critic-batch.js"); Module.load("critic-filters.js"); Module.load("critic-mail.js"); Module.load("critic-text.js"); Module.load("critic-html.js"); Module.load("critic-storage.js"); Module.load("critic-log.js"); Module.load("critic-statistics.js"); Module.load("critic-trackedbranch.js"); Module.load("critic-cli.js"); Module.assign("CriticError", CriticError); Module.assign("Error", CriticError); Module.assign("User", CriticUser); Module.assign("AnonymousUser", CriticAnonymousUser); Module.assign("File", CriticFile.find); Module.assign("Repository", CriticRepository); Module.assign("Branch", CriticBranch); Module.assign("Dashboard", CriticDashboard); Module.assign("Review", CriticReview); Module.assign("OldBatch", CriticBatch); Module.assign("CommentChain", CriticCommentChain); Module.assign("CommitSet", CriticCommitSet); Module.assign("Filters", CriticFilters); Module.assign("Statistics", CriticStatistics); Module.assign("Storage", CriticStorage); Module.assign("MailTransaction", CriticMailTransaction); Module.assign("TrackedBranch", CriticTrackedBranch); Module.assign("Log", CriticLog); Module.assign("html", CriticHtml); Module.assign("text", Object.freeze({ reflow: reflow })); var extras_dir = format("%s/extras", Module.path); var extra_modules = []; if (IO.File.isDirectory(extras_dir)) { IO.File.listDirectory(extras_dir).forEach( function (module_name) { var module_dir = format("%s/%s", extras_dir, module_name); var module_main_js = format("%s/main.js", module_dir); if (IO.File.isDirectory(module_dir) && IO.File.isRegularFile(module_main_js)) { var module = new Module(); module.load(module_main_js); if (typeof module.name == "string") module_name = module.name; Module.assign(module_name, module); extra_modules.push(module); } }); } Module.assign("Changeset", Object.freeze(Object.defineProperty(function () {}, "prototype", { value: CriticChangeset.prototype }))); Module.assign("MergeChangeset", Object.freeze(Object.defineProperty(function () {}, "prototype", { value: CriticMergeChangeset.prototype }))); Module.assign("ChangesetLine", Object.freeze(CriticChangesetLineConstants)); Module.assign("printProfilingData", function () {}); var global = {}; var db = null, dbparams = null; var library_path; var hostname; var extension_id; var user_id; var authentication_labels; var role; var git_executable; var python_executable; var python_path; var repository_work_copy_path; var changeset_address; var branchtracker_pid_path; var maildelivery_pid_path; var is_development; function setup(data) { if (!db) { var dbparams = {}; if (data.dbname) dbparams.dbname = data.dbname; if (data.dbuser) dbparams.user = data.dbuser; if (data.dbpass) dbparams.password = data.dbpass; db = new PostgreSQL.Connection(dbparams); } if (data.user_id) { global.user = new CriticUser(data.user_id); if (data.extension_id) { Module.assign("storage", global.storage = new CriticStorage(global.user)); Module.assign("log", global.log = new CriticLog(global.user)); } } else { global.user = new CriticAnonymousUser(); } library_path = data.library_path; hostname = data.hostname; extension_id = data.extension_id; user_id = data.user_id; authentication_labels = data.authentication_labels; role = data.role; git_executable = data.git; python_executable = data.python; python_path = data.python_path; repository_work_copy_path = data.repository_work_copy_path; changeset_address = data.changeset_address; branchtracker_pid_path = data.branchtracker_pid_path; maildelivery_pid_path = data.maildelivery_pid_path; is_development = data.is_development; IO.File.chdir(data.extension_path); var pagesize = parseInt((new OS.Process("getconf PAGESIZE", { shell: true })).call()); if (data.rlimit.cpu) OS.Process.setrlimit("cpu", data.rlimit.cpu); if (data.rlimit.rss) OS.Process.setrlimit("rss", data.rlimit.rss * (1024 * 1024) / pagesize); var critic = this; extra_modules.forEach( function (module) { if (typeof module.global.setup == "function") module.global.setup(critic, data); module.close(); }); } function shutdown() { if (db) db.close(); for (var index = 0; index < all_repositories.length; ++index) all_repositories[index].shutdown(); } function connect(data) { dbparams = {}; if (data.dbname) dbparams.dbname = data.dbname; if (data.dbuser) dbparams.user = data.dbuser; if (data.dbpass) dbparams.password = data.dbpass; db = new PostgreSQL.Connection(dbparams); } function reconnect() { db.reset(); db = new PostgreSQL.Connection(dbparams); } Module.assign("setup", setup); Module.assign("shutdown", shutdown); Module.assign("connect", connect); Module.assign("reconnect", reconnect); ================================================ FILE: src/linkify.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re ALL_LINKTYPES = [] class Context(object): def __init__(self, db=None, request=None, repository=None, review=None, **kwargs): self.db = db self.request = request self.repository = repository or (review.repository if review else None) self.review = review self.extra = kwargs class LinkType(object): """ A link type object is responsible for providing a regexp fragment that matches the words (or substrings) that the link type produces hyper-links from, and for constructing actual URLs from such words. """ def __init__(self, fragment): """ LinkType(regexp) -> link type object Create a link type object and add it to the global list of link type objects. The 'fragment' argument should be a string containing a regexp fragment without captures suitable to insert into the complete regexp (?:^|\b)(wordA|wordB|...)(?:\b|$) which is then used to split text into "words" which are individually turned into links or left as-is. """ self.fragment = fragment self.fragment_regexp = re.compile("%s$" % fragment) ALL_LINKTYPES.append(self) def match(self, word): return bool(self.fragment_regexp.match(word)) def linkify(self, word): """ linkify(word) -> None or a string. If the whole word matches what this link type handles, constructs a URL to which this word should be made a link, otherwise returns None. Implementations should expect to be called with words that don't match what they handle. Sub-classes must override this method. """ pass class SimpleLinkType(LinkType): """ Base class for link type when the word contains the URL. """ def __init__(self, fragment, regexp=None): super(SimpleLinkType, self).__init__(fragment) if isinstance(regexp, basestring): self.regexp = re.compile(regexp) else: self.regexp = regexp def linkify(self, word, context): if self.regexp: return self.regexp.match(word).group(1) else: return word class HTTP(SimpleLinkType): """ Link type "plain URL string". """ def __init__(self): super(HTTP, self).__init__("https?://\\S+[^\\s.,:;!?)]") class URL(SimpleLinkType): """ Link type . """ def __init__(self): super(URL, self).__init__("]+>", "]+)>$") class SHA1(LinkType): """ SHA-1 link type. Converts SHA-1 sums in text (either full or abbreviated) into links to the diff of the referenced commit. When processed in the context of a repository, a matching commit in that repository is preferred (assuming it exists.) When processed in the context of a review, a 'review=' parameter is appended to the URL, which links to the diff of the referenced commit in the context of the review (which includes comments and allows reviewing.) """ def __init__(self): super(SHA1, self).__init__("[0-9A-Fa-f]{8,40}") def linkify(self, word, context): sha1 = word if context.repository \ and context.repository.iscommit(word): sha1 = context.repository.revparse(sha1) if context.review \ and context.review.containsCommit(context.db, sha1): return "/%s/%s?review=%d" % (context.repository.name, sha1, context.review.id) else: return "/%s/%s" % (context.repository.name, sha1) else: return "/%s" % sha1 class Diff(LinkType): """ Diff link type. Like the SHA-1 link type, but with two sums separated by '..', and links to the diff between the two referenced commits. """ def __init__(self): super(Diff, self).__init__("[0-9A-Fa-f]{8,40}\\.\\.[0-9A-Fa-f]{8,40}") def linkify(self, word, context): from_sha1, _, to_sha1 = word.partition("..") if context.repository \ and context.repository.iscommit(from_sha1) \ and context.repository.iscommit(to_sha1): from_sha1 = context.repository.revparse(from_sha1) to_sha1 = context.repository.revparse(to_sha1) if context.review \ and context.review.containsCommit(context.db, from_sha1) \ and context.review.containsCommit(context.db, to_sha1): return "/%s/%s..%s?review=%d" % (context.repository.name, from_sha1, to_sha1, context.review.id) else: return "/%s/%s..%s" % (context.repository.name, from_sha1, to_sha1) else: return "/%s..%s" % (from_sha1, to_sha1) class Review(LinkType): """ Review link type. Converts 'r/' in text into a link to the front-page of the corresponding review. """ def __init__(self): super(Review, self).__init__("r/\\d+") def linkify(self, word, context): return "/" + word HTTP() URL() Diff() SHA1() Review() try: import customization.linktypes except ImportError: pass ================================================ FILE: src/log/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: src/log/commitset.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import gitutils class CommitSet: def __init__(self, commits): self.__commits = dict([(str(commit), commit) for commit in commits]) self.__merges = set() self.__children = {} parents = set() for commit in self.__commits.values(): for parent in commit.parents: parents.add(parent) self.__children.setdefault(parent, set()).add(commit) if len(commit.parents) > 1: self.__merges.add(commit) commit_set = set(self.__commits.values()) # Heads: commits that aren't the parent of a commit in the set. self.__heads = commit_set - parents # Tails: parent commits not included in the set. self.__tails = parents - commit_set def __contains__(self, commit): return str(commit) in self.__commits def __getitem__(self, key): return self.__commits[str(key)] def __len__(self): return len(self.__commits) def __iter__(self): return iter(self.__commits.values()) def __repr__(self): return repr(self.__commits) def get(self, key): return self.__commits.get(str(key)) def getHeads(self): return self.__heads.copy() def getTails(self): return self.__tails.copy() def getMerges(self): return self.__merges.copy() def getChildren(self, commit): children = self.__children.get(commit) if children: return children.copy() else: return set() def getParents(self, commit): return set([self.__commits[sha1] for sha1 in commit.parents if sha1 in self.__commits]) def getFilteredTails(self, repository): """Return a set containing each tail commit of the set of commits that isn't an ancestor of another tail commit of the set. If the tail commits of the set are all different commits on an upstream branch, then this will return only the latest one.""" candidates = self.getTails() result = set() while candidates: tail = candidates.pop() eliminated = set() for other in candidates: base = repository.mergebase([tail, other]) if base == tail: # Tail is an ancestor of other: tail should not be included # in the returned set. break elif base == other: # Other is an ancestor of tail: other should not be included # in the returned set. eliminated.add(other) else: result.add(tail) candidates -= eliminated return result def getTailsFrom(self, commit): """ Return a set containing the each tail commit of the set of commits that are ancestors of 'commit' and that are members of this commit set. A tail commit of a set is a commit that is not a member of the set but that is a parent of a commit that is a member of the set. """ assert commit in self.__commits stack = set([commit.sha1]) processed = set() tails = set() while stack: commit = self.__commits[stack.pop()] if commit not in processed: processed.add(commit) for sha1 in commit.parents: parent = self.__commits.get(sha1) if parent: stack.add(parent) else: tails.add(sha1) return tails def getCommonAncestors(self, commit): """Return a set of each commit in this set that is an ancestor of each parent of 'commit' (which must be a member of the set) or None if the parents of 'commit' have no common ancestor within this set.""" common_ancestors = set() branches = [] for sha1 in commit.parents: if sha1 not in self.__commits: return common_ancestors branches.append(set()) for index, sha1 in enumerate(commit.parents): stack = set([sha1]) branch = branches[index] while stack: commit = self.__commits.get(stack.pop()) if commit and commit not in branch: branch.add(commit) for other_index, other_branch in enumerate(branches): if commit not in other_branch: break else: common_ancestors.add(commit) continue stack.update(set(commit.parents)) return common_ancestors def filtered(self, commits): filtered = set() commits = set(commits) while commits: commit = commits.pop() if commit not in filtered: filtered.add(commit) commits.update(self.getParents(commit)) return CommitSet(filtered) def without(self, commits): """ Return a copy of this commit set without 'commit' and any ancestors of 'commit' that don't have other descendants in the commit set. """ pending = set(filter(None, (self.__commits.get(str(commit)) for commit in commits))) commits = self.__commits.copy() children = self.__children.copy() while pending: commit = pending.pop() del commits[commit] if commit in children: del children[commit] for parent_sha1 in commit.parents: if parent_sha1 in commits: children0 = children.get(parent_sha1, set()) children0 -= set([commit]) if not children0: pending.add(commits[parent_sha1]) return CommitSet(commits.values()) def isAncestorOf(self, ancestor, commit): if ancestor == commit: return False else: descendants = self.__children.get(ancestor, set()).copy() pending = descendants.copy() while pending and not commit in descendants: descendant = pending.pop() children = self.__children.get(descendant, set()) - descendants descendants.update(children) pending.update(children) return commit in descendants @staticmethod def fromRange(db, from_commit, to_commit, commits=None): repository = to_commit.repository commits = set() class NotPossible(Exception): pass if commits: def getCommit(sha1): return commits[sha1] else: def getCommit(sha1): return gitutils.Commit.fromSHA1(db, repository, sha1) def process(iter_commit): while iter_commit != from_commit and iter_commit not in commits: commits.add(iter_commit) if len(iter_commit.parents) > 1: # A merge commit. Check if 'from_commit' is an ancestor of # all its parents. If not, we don't support constructing a # commit-set from this range of commits (not because it is # particularly difficult, but because such a commit-set # would contain "unexpected" merged-in commits.) if from_commit.isAncestorOf(repository.mergebase(iter_commit)): map(process, [getCommit(sha1) for sha1 in iter_commit.parents]) return else: raise NotPossible elif iter_commit.parents: iter_commit = getCommit(iter_commit.parents[0]) else: return if from_commit == to_commit: return CommitSet([to_commit]) try: process(to_commit) return CommitSet(commits) except NotPossible: return None ================================================ FILE: src/log/html.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from gitutils import Commit from htmlutils import htmlify, jsify from profiling import Profiler from time import time, mktime, strftime, localtime import re import log.commitset def formatWhen(when): def relative_time(delta, time_unit_singular): time_unit = time_unit_singular if delta > 1: time_unit += "s" return "%d %s ago" % (delta, time_unit) def inner(when): delta = int(time() - mktime(when)) if delta < 60: return relative_time(delta, "second") elif delta < 60 * 60: return relative_time(delta / 60, "minute") elif delta < 60 * 60 * 24: return relative_time(delta / (60 * 60), "hour") elif delta < 60 * 60 * 24 * 30: return relative_time(delta / (60 * 60 * 24), "day") else: return strftime("%Y-%m-%d", localtime(mktime(when))) return inner(when).replace(" ", " ") def renderWhen(target, when): target.innerHTML(formatWhen(when)) def linkToCommit(commit, overrides={}): if "review" in overrides: review = overrides["review"] if "replayed_rebase" in overrides: return "%s..%s?review=%d&conflicts=yes" % (overrides["replayed_rebase"].sha1[:8], commit.sha1[:8], review.id) return "%s?review=%d" % (commit.sha1[:8], review.id) return "%s/%s" % (commit.repository.name, commit.sha1) re_remote_into_local = re.compile("^Merge (?:branch|commit) '([^']+)' of [^ ]+ into \\1$") re_side_into_main = re.compile("^Merge (?:remote )?(?:branch|commit) '[^']+' into .+$") re_octopus = re.compile("^Merge ((?:(?:branches|,| and) '[^']+')+) into [^ ]+$") class WhenColumn: def className(self, db, commit): return "when" def heading(self, target): target.text("When") def render(self, db, commit, target, overrides={}): renderWhen(target, commit.committer.time) class TypeColumn: def className(self, db, commit): return "type" def heading(self, target): target.text() def render(self, db, commit, target, overrides={}): if "type" in overrides: target.text(overrides["type"]) if len(commit.parents) > 1: target.text("Merge") else: target.text() class SummaryColumn: def __init__(self, linkToCommit=linkToCommit): self.linkToCommit = linkToCommit self.isFixupOrSquash = None def className(self, db, commit): return "summary clickable" def heading(self, target): target.text("Summary") def render(self, db, commit, target, overrides={}): summary = overrides.get("summary", commit.summary()) classnames = (["commit", "clickable-target"] + overrides.get("summary_classnames", [])) if self.isFixupOrSquash is not None: data = self.isFixupOrSquash(commit) if data: what, ref = data target.span(what, critic_ref=ref).text("[%s] " % what) lines = commit.message.splitlines()[1:] while lines and not lines[0].strip(): lines.pop(0) if lines: summary = lines[0] else: summary = None if not summary: classnames.append("nocomment") summary = "(no comment)" url = self.linkToCommit(commit, overrides) if summary: target.a(" ".join(classnames), href=url).text(summary) class AuthorColumn: def __init__(self): self.cache = {} def className(self, db, commit): return "author" def heading(self, target): target.text("Author") def render(self, db, commit, target, overrides={}): if "author" in overrides: fullname = overrides["author"].fullname else: fullname = commit.author.getFullname(db) target.text(fullname) DEFAULT_COLUMNS = [(10, WhenColumn()), (5, TypeColumn()), (65, SummaryColumn()), (20, AuthorColumn())] def render(db, target, title, branch=None, commits=None, columns=DEFAULT_COLUMNS, title_right=None, listed_commits=None, rebases=None, branch_name=None, bottom_right=None, review=None, highlight=None, profiler=None, collapsable=False, user=None, extra_commits=None): addResources(target) if not profiler: profiler = Profiler() profiler.check("log: start") if branch is not None: repository = branch.repository commits = branch.getCommits(db)[:] commit_set = log.commitset.CommitSet(commits) else: assert commits is not None repository = commits[0].repository if len(commits) else None commit_set = log.commitset.CommitSet(commits) profiler.check("log: commits") heads = commit_set.getHeads() tails = commit_set.getTails() rebase_old_heads = set() if rebases: class Rebase(object): def __init__(self, rebase_id, old_head, new_head, user, new_upstream, equivalent_merge, replayed_rebase, target_branch_name): self.id = rebase_id self.old_head = equivalent_merge or old_head self.new_head = new_head self.user = user self.new_upstream = new_upstream self.equivalent_merge = equivalent_merge self.replayed_rebase = replayed_rebase self.target_branch_name = target_branch_name # The first element in the tuples in 'rebases' is the rebase id, which # is an ever-increasing serial number that we can use as an indication # of the order in which the rebases were made. rebases = [Rebase(*rebase) for rebase in sorted(rebases)] rebase_old_heads = set(rebase.old_head for rebase in rebases) heads -= rebase_old_heads assert 0 <= len(heads) <= 1 if not heads: heads = set([rebases[-1].new_head]) if repository: target.addInternalScript(repository.getJS()) processed = set() summaries = {} for commit in commits: summary = commit.summary().strip() summaries[summary] = commit summaries[commit.sha1] = commit if extra_commits: for commit in extra_commits: summary = commit.summary().strip() summaries[summary] = commit summaries[commit.sha1] = commit def isFixupOrSquash(commit): key, _, summary = commit.summary().partition(" ") if key in ("fixup!", "squash!"): what = key[:-1] else: return None summary = summary.strip() commit = summaries.get(summary) if not commit and re.match("[0-9A-Fa-f]{40}$", summary): commit = summaries.get(summary) if not commit: try: sha1 = repository.revparse(summary) commit = Commit.fromSHA1(db, repository, sha1) except Exception: pass if commit: summary = commit.summary() return what, summary for width, column in columns: if isinstance(column, SummaryColumn): column.isFixupOrSquash = isFixupOrSquash break def output(table, commit, overrides={}): if commit not in processed: classes = ["commit"] row_id = None if len(commit.parents) > 1: classes.append("merge") if highlight == commit: classes.append("highlight") row_id = commit.sha1 if review: overrides["review"] = review row = table.tr(" ".join(classes), id=row_id) profiler.check("log: rendering: row") for index, (width, column) in enumerate(columns): column.render(db, commit, row.td(column.className(db, commit)), overrides=overrides) profiler.check("log: rendering: column %d" % (index + 1)) processed.add(commit) return row else: return None cursor = db.cursor() def emptyChangeset(child, parent=None): if parent is None: cursor.execute("""SELECT 1 FROM fileversions JOIN changesets ON (changesets.id=fileversions.changeset) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE changesets.child=%s AND reviewchangesets.review=%s""", (child.getId(db), review.id)) else: cursor.execute("""SELECT 1 FROM fileversions JOIN changesets ON (changesets.id=fileversions.changeset) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE changesets.parent=%s AND changesets.child=%s AND reviewchangesets.review=%s""", (parent.getId(db), child.getId(db), review.id)) return not cursor.fetchone() def inner(target, head, tails, align='right', title=None, table=None, silent_if_empty=set(), upstream=None): if not table: table = target.table('log', align=align, cellspacing=0) for width, column in columns: table.col(width=('%d%%' % width)) if title: thead = table.thead() row = thead.tr("title") header = row.td("h1", colspan=len(columns)).h1() header.text(title) if callable(title_right): title_right(db, header.span("right")) row = thead.tr('headings') for width, column in columns: column.heading(row.td(column.className(db, None))) elif head is None or head in tails: if upstream: tag = upstream.findInterestingTag(db) if tag: what = tag else: what = upstream.sha1[:8] message = "Merged with base branch (%s)." % what else: message = "Merged with base branch." thead = table.thead() row = thead.tr('basemerge') row.td(colspan=len(columns), align='center').text(message) return (None, None, None, False) tbody = table.tbody() commit = head last_commit = None skipped = True while commit and commit not in tails: suppress = False optional_merge = False listed = listed_commits is None or commit.getId(db) in listed_commits if commit in silent_if_empty and emptyChangeset(commit): # This is a clean automatically generated merge commit; pretend it isn't here at all. suppress = True if not suppress and not listed: suppress = len(commit.parents) == 1 optional_merge = not suppress if not suppress: commit_tr = output(tbody, commit) else: commit_tr = None if listed: skipped = False last_commit = commit if len(commit.parents) == 0: break elif len(commit.parents) == 1: commit = commit_set.get(commit.parents[0]) elif len(commit.parents) > 1: if len(commit.parents) > 2: common_ancestors = commit_set.getCommonAncestors(commit) match = re_octopus.match(commit.message.split("\n", 1)[0]) if match: titles = re.findall("'([^']+)'", match.group(1)) if len(titles) != len(commit.parents): titles = None else: titles = None for index, sha1 in enumerate(commit.parents): if sha1 in commit_set: sublog = tbody.tr('sublog') inner_last_commit, inner_table, inner_tail, inner_skipped = inner(sublog.td(colspan=len(columns)), commit_set[sha1], common_ancestors, title=titles and titles[index] or None) if inner_skipped: sublog.remove() if optional_merge and commit_tr: commit_tr.remove() if not common_ancestors: return (None, None, None, False) commit = common_ancestors.pop() continue parent1_sha1 = commit.parents[0] parent2_sha1 = commit.parents[1] parent1 = commit_set.get(parent1_sha1) parent2 = commit_set.get(parent2_sha1) # TODO: Try to remember what this code actually does, and why... if parent1_sha1 in rebase_old_heads: if parent2: sublog = tbody.tr('sublog') inner_last_commit, inner_table, inner_tail, inner_skipped = \ inner(sublog.td(colspan=len(columns)), parent2, tails) if inner_skipped: sublog.remove() if optional_merge and commit_tr: commit_tr.remove() return (commit, table, parent1_sha1, False) elif parent2_sha1 in rebase_old_heads: if parent1: sublog = tbody.tr('sublog') inner_last_commit, inner_table, inner_tail, inner_skipped = \ inner(sublog.td(colspan=len(columns)), parent1, tails) if inner_skipped: sublog.remove() if optional_merge and commit_tr: commit_tr.remove() return (commit, table, parent2_sha1, False) if parent1 and parent2: common_ancestors = commit_set.getCommonAncestors(commit) merged_remote_into_local = re_remote_into_local.match(commit.summary()) or re_side_into_main.match(commit.summary()) def rankPaths(commit, tails): shortest = None shortest_length = len(commit_set) longest = None longest_length = 0 for sha1 in commit.parents: parent = commit_set[sha1] counted = set() pending = set([parent]) while pending: candidate = pending.pop() if candidate in counted: continue if candidate in tails: continue counted.add(candidate) pending.update(commit_set.getParents(candidate)) length = len(counted) if length < shortest_length: shortest = parent shortest_length = length if length >= longest_length: longest = parent longest_length = length return shortest, shortest_length, longest, longest_length show_merged, shortest_length, show_normal, longest_length = rankPaths(commit, common_ancestors | tails) display_parallel = False if merged_remote_into_local and shortest_length * 2 > longest_length: if len(common_ancestors) == 1 and len(commit_set.filtered([commit]).getTails()) == 1: display_parallel = True else: show_merged = parent2 show_normal = parent1 if display_parallel: all_empty = True for sha1 in commit.parents: sublog = tbody.tr('sublog') inner_last_commit, inner_table, inner_tail, inner_skipped = inner(sublog.td(colspan=len(columns)), commit_set[sha1], common_ancestors | tails) if inner_skipped: sublog.remove() else: all_empty = False if all_empty and optional_merge and commit_tr: commit_tr.remove() commit = common_ancestors.pop() else: sublog = tbody.tr('sublog') inner_last_commit, inner_table, inner_tail, inner_skipped = inner(sublog.td(colspan=len(columns)), show_merged, common_ancestors | tails) if inner_skipped: sublog.remove() if optional_merge and commit_tr: commit_tr.remove() commit = show_normal else: if parent1: upstream_sha1 = parent2_sha1 else: upstream_sha1 = parent1_sha1 if not commit in silent_if_empty: # Merge with the base branch. inner(tbody.tr('sublog').td(colspan=len(columns)), None, None, upstream=Commit.fromSHA1(db, repository, upstream_sha1)) if parent1: commit = parent1 else: commit = parent2 return (last_commit, table, last_commit.parents[0] if last_commit and last_commit.parents else None, skipped) class_name = "paleyellow log" if collapsable: class_name += " collapsable" table = target.table(class_name, align='center', cellspacing=0) for width, column in columns: table.col(width=('%d%%' % width)) thead = table.thead("title") row = thead.tr("title") header = row.td("h1", colspan=len(columns)).h1() error_message = None if len(commit_set) == 0: thead = table.thead() row = thead.tr('error') cell = row.td(colspan=len(columns), align='center') cell.text("No commits. ") if review: cell.a(href="showtree?sha1=%s&review=%d" % (review.branch.head_sha1, review.id)).text("[Browse tree]") return elif len(heads) > 1: error_message = "Invalid commit set: Multiple heads." elif len(heads) == 0: error_message = "Invalid commit set: No heads." if error_message is not None: thead = table.thead() row = thead.tr('error') cell = row.td(colspan=len(columns), align='center') cell.text(error_message) return head = heads.pop() if heads else None row = thead.tr('headings') for width, column in columns: column.heading(row.td(column.className(db, None))) first_rebase = True silent_if_empty = set() if rebases: for rebase in rebases: if rebase.equivalent_merge: silent_if_empty.add(rebase.equivalent_merge) top_rebases = [] while rebases and head == rebases[-1].new_head: rebase = rebases.pop() top_rebases.append((head, rebase)) head = rebase.old_head for rebase_head, rebase in top_rebases: thead = table.thead("rebase") row = thead.tr('rebase') cell = row.td(colspan=len(columns), align='center') if rebase.new_upstream is None and not rebase.target_branch_name: cell.text("History rewritten") else: cell.text("Branch rebased onto ") if rebase.target_branch_name: anchor = cell.a(href=("/checkbranch?repository=%d&commit=%s" % (repository.id, rebase.target_branch_name))) anchor.text(rebase.target_branch_name) else: upstream_description = repository.describe(db, rebase.new_upstream.sha1) if upstream_description is None: upstream_description = rebase.new_upstream.sha1[:8] anchor = cell.a(href="/%s/%s" % (repository.name, rebase.new_upstream.sha1)) anchor.text(upstream_description) cell.text(" by %s" % rebase.user.fullname) if first_rebase: cell.text(": ") review_param = "&review=%d" % review.id if review else "" cell.a(href="log?repository=%d&branch=%s%s" % (repository.id, branch_name, review_param)).text("[actual log]") if user and user == rebase.user: cell.text(" ") cell.a(href="javascript:revertRebase(%d)" % rebase.id).text("[revert]") first_rebase = False else: cell.text(".") if rebase.replayed_rebase and not emptyChangeset(parent=rebase.replayed_rebase, child=rebase.new_head): output(table, rebase.new_head, overrides={ "type": "Rebase", "summary": "Changes introduced by rebase", "summary_classnames": ["rebase"], "author": rebase.user, "replayed_rebase": rebase.replayed_rebase }) while True: # 'local_tails' is the set of commits that, when reached, should make # inner() stop outputting commits and instead return. This set of # commits contains all the "tails" of the whole commit-set we're # rendering (in the 'tails' set here), as well as the "new head" of the # next rebase to be output. local_tails = tails.copy() if rebases: local_tails.add(rebases[-1].new_head) last_commit, table, tail, skipped = inner( target, head, local_tails, 'center', title, table, silent_if_empty) if rebases: rebase = rebases.pop() assert tail == rebase.new_head, "tail (%s) != rebase.new_head (%s)" % (tail, rebase.new_head) while True: head = rebase.old_head thead = table.thead("rebase") row = thead.tr('rebase') cell = row.td(colspan=len(columns), align='center') if rebase.new_upstream is None and not rebase.target_branch_name: cell.text("History rewritten") else: cell.text("Branch rebased onto ") if rebase.target_branch_name: anchor = cell.a(href=("/checkbranch?repository=%d&commit=%s" % (repository.id, rebase.target_branch_name))) anchor.text(rebase.target_branch_name) else: upstream_description = repository.describe(db, rebase.new_upstream.sha1) if upstream_description is None: upstream_description = rebase.new_upstream.sha1[:8] anchor = cell.a(href="/%s/%s" % (repository.name, rebase.new_upstream.sha1)) anchor.text(upstream_description) cell.text(" by %s" % rebase.user.fullname) if first_rebase: cell.text(": ") review_param = "&review=%d" % review.id if review else "" cell.a(href="log?repository=%d&branch=%s%s" % (repository.id, branch_name, review_param)).text("[actual log]") first_rebase = False else: cell.text(".") if rebase.replayed_rebase and not emptyChangeset(parent=rebase.replayed_rebase, child=rebase.new_head): output(table, rebase.new_head, overrides={ "type": "Rebase", "summary": "Changes introduced by rebase", "summary_classnames": ["rebase"], "author": rebase.user, "replayed_rebase": rebase.replayed_rebase }) if rebases and rebases[-1].new_head == head: rebase = rebases.pop() else: break continue if last_commit: if len(last_commit.parents) == 1: upstream = Commit.fromSHA1(db, repository, last_commit.parents[0]) upstream_description = repository.describe(db, upstream.sha1) if not upstream_description: upstream_description = upstream.sha1[:8] row = table.thead("rebase").tr('upstream') cell = row.td(colspan=len(columns), align='center') cell.text("Based on: ") anchor = cell.a(href="/%s/%s" % (repository.name, upstream.sha1)) anchor.text(upstream_description) if callable(bottom_right): bottom_right(db, table.tfoot().tr().td(colspan=len(columns))) break profiler.check("log: rendering") if "%d" in title: header.text(title % len(processed)) else: header.text(title) if callable(title_right): title_right(db, header.span("right")) def renderList(db, target, title, commits, columns=DEFAULT_COLUMNS, title_right=None, bottom_right=None, hide_merges=False, className="log"): addResources(target) table = target.table(className, align="center", cellspacing=0) for width, column in columns: table.col(width=("%d%%" % width)) thead = table.thead() title_h1 = None if title: row = thead.tr("title") title_h1 = row.td("h1", colspan=len(columns)).h1() title_h1.text(title) row = thead.tr("headings") for width, column in columns: column.heading(row.td(column.className(db, None))) tbody = table.tbody() merges = 0 for commit in commits: classname = "commit" if hide_merges: is_merge = len(commit.parents) > 1 if is_merge: classname += " merge" merges += 1 row = tbody.tr(classname, id=commit.sha1) for width, column in columns: column.render(db, commit, row.td(column.className(db, commit))) if merges and title_h1: title_h1.a(href="javascript:void(0);", onclick="showRelevantMerges(event);").text("[Show %d merge commits]" % merges) if callable(title_right): title_right(db, title_h1.span("right")) if callable(bottom_right): bottom_right(db, table.tfoot().tr().td(colspan=len(columns))) def addResources(target): target.addExternalStylesheet("resource/log.css") target.addExternalScript("resource/log.js") ================================================ FILE: src/mailutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import os import signal import email.utils import configuration import dbutils def generateMessageId(index=1): now = time.time() timestamp = time.strftime("%Y%m%d%H%M%S", time.gmtime(now)) timestamp_ms = "%04d" % ((now * 10000) % 10000) return "%s.%s.%04d" % (timestamp, timestamp_ms, index) def queueMail(from_user, to_user, recipients, subject, body, message_id=None, parent_message_id=None, headers=None): if not message_id: message_id = generateMessageId() if headers is None: headers = {} else: headers = headers.copy() if parent_message_id: parent_message_id = "<%s@%s>" % (parent_message_id, configuration.base.HOSTNAME) filename = "%s/%s_%s_%s.txt.pending" % (configuration.paths.OUTBOX, from_user.name, to_user.name, message_id) with open(filename, "w") as file: print >> file, repr({ "message_id": message_id, "parent_message_id": parent_message_id, "headers": headers, "time": time.time(), "from_user": from_user, "to_user": to_user, "recipients": recipients, "subject": subject, "body": body }) return filename class User: def __init__(self, *args): if len(args) == 1: self.name = configuration.base.SYSTEM_USER_NAME self.fullname, self.email = email.utils.parseaddr(args[0]) else: self.name, self.email, self.fullname = args def __repr__(self): return "User(%r, %r)" % (self.email, self.fullname) def sendMessage(recipients, subject, body): from_user = User(configuration.base.SYSTEM_USER_NAME, configuration.base.SYSTEM_USER_EMAIL, "Critic System") filenames = [] for to_user in recipients: filenames.append(queueMail(from_user, to_user, recipients, subject, body)) sendPendingMails(filenames) def sendAdministratorMessage(source, summary, message): recipients = [] for recipient in configuration.base.SYSTEM_RECIPIENTS: recipients.append(User(recipient)) sendMessage(recipients, "%s: %s" % (source, summary), message) def sendAdministratorErrorReport(db, source, summary, message): if db: installed_sha1 = dbutils.getInstalledSHA1(db) else: installed_sha1 = "" sendAdministratorMessage(source, summary, """\ Critic encountered an unexpected error. If you know a series of steps that can reproduce this error it would be very useful if you submitted a bug report including the steps plus the information below (see bug reporting URL at the bottom of this e-mail). %(message)s Critic version: %(installed_sha1)s Critic bug reports can be filed here: https://github.com/jensl/critic/issues/new """ % { "message": message, "installed_sha1": installed_sha1 }) def sendExceptionMessage(db, source, exception): lines = exception.splitlines() sendAdministratorErrorReport(db, source, lines[-1], exception.rstrip()) def sendPendingMails(filenames): for filename in filenames: if filename.endswith(".txt.pending"): os.rename(filename, filename[:-len(".pending")]) try: pid = int(open(configuration.services.MAILDELIVERY["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) except: pass ================================================ FILE: src/maintenance/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: src/maintenance/check-branches.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import os.path import argparse import errno sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import dbutils import gitutils import log.commitset import progress parser = argparse.ArgumentParser() parser.add_argument("--exclude", action="append", help="exclude repository") parser.add_argument("--include", action="append", help="include (only) repository") parser.add_argument("--dry-run", "-n", action="store_true", help="don't touch the database or repositories") parser.add_argument("--force", "-f", action="store_true", help="update the database and/or repositories") arguments = parser.parse_args() if not arguments.dry_run and not arguments.force: print "One of --dry-run/-n and --force/-f must be specified." sys.exit(1) elif arguments.dry_run and arguments.force: print "Only one of --dry-run/-n and --force/-f can be specified." sys.exit(1) force = arguments.force db = dbutils.Database.forSystem() cursor = db.cursor() def getBranchCommits(repository, branch_id): cursor = db.cursor() cursor.execute("SELECT sha1 FROM commits JOIN reachable ON (commit=id) WHERE branch=%s", (branch_id,)) return log.commitset.CommitSet(gitutils.Commit.fromSHA1(db, repository, sha1) for (sha1,) in cursor) def getReview(branch_id): cursor = db.cursor() cursor.execute("SELECT id FROM reviews WHERE branch=%s", (branch_id,)) return cursor.fetchone()[0] def getReviewCommits(repository, review_id): review_id = getReview(branch_id) cursor.execute("""SELECT changesets.child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s""", (review_id,)) return log.commitset.CommitSet(gitutils.Commit.fromId(db, repository, commit_id) for (commit_id,) in cursor) def getReviewHead(repository, review_id): commits = getReviewCommits(repository, review_id) heads = commits.getHeads() if len(heads) == 1: return heads.pop() cursor = db.cursor() cursor.execute("""SELECT commits.sha1 FROM commits JOIN reviewrebases ON (reviewrebases.old_head=commits.id) WHERE reviewrebases.review=%s""", (review_id,)) for (sha1,) in cursor: heads.remove(sha1) if len(heads) == 1: return heads.pop() else: return None if arguments.include: cursor.execute("SELECT id FROM repositories WHERE name=ANY (%s)", (arguments.include,)) else: cursor.execute("SELECT id FROM repositories") repository_ids = cursor.fetchall() incorrect_reviews = [] for repository_id in repository_ids: repository = gitutils.Repository.fromId(db, repository_id) if arguments.exclude and repository.name in arguments.exclude: print "Repository: %s (skipped)" % repository.name continue cursor.execute("""SELECT branches.id, branches.name, branches.type, branches.base, commits.sha1 FROM branches JOIN commits ON (commits.id=branches.head) WHERE branches.repository=%s""", (repository_id,)) branches = cursor.fetchall() refs = {} batch = [] try: for line in open(os.path.join(repository.path, "packed-refs")): if not line.startswith("#"): try: sha1, ref = line.split() if len(sha1) == 40 and ref.startswith("refs/heads/"): refs[ref[11:]] = sha1 except ValueError: pass except IOError as error: if error.errno == errno.ENOENT: pass else: raise progress.start(len(branches), "Repository: %s" % repository.name) heads_path = os.path.join(repository.path, "refs", "heads") branches_in_db = set() for branch_id, branch_name, branch_type, branch_base_id, branch_sha1 in branches: progress.update() branches_in_db.add(branch_name) try: try: repository_sha1 = open(os.path.join(heads_path, branch_name)).read().strip() except: repository_sha1 = refs.get(branch_name) if repository_sha1 != branch_sha1: progress.write("NOTE[%s]: %s differs (db:%s != repo:%s)" % (repository.name, branch_name, branch_sha1[:8], repository_sha1[:8])) if branch_type == "review": head = getReviewHead(repository, getReview(branch_id)) if not head: progress.write(" invalid review meta-data: r/%d" % getReview(branch_id)) continue if head.sha1 == branch_sha1: progress.write(" branches.head matches review meta-data; repository is wrong") if force: repository.run("update-ref", "refs/heads/%s" % branch_name, head.sha1, repository_sha1) progress.write(" repository updated") elif head.sha1 == repository_sha1: progress.write(" repository matches review meta-data; branches.head is wrong") if force: cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (head.getId(db), branch_id)) db.commit() else: progress.write(" review meta-data matches neither branches.head nor repository") incorrect_reviews.append((getReview(branch_id), "review meta-data matches neither branches.head nor repository")) else: try: gitutils.Commit.fromSHA1(db, repository, branch_sha1) progress.write(" branches.head exists in repository") except KeyboardInterrupt: sys.exit(1) except: progress.write(" branches.head not in repository; updating branches.head") head = gitutils.Commit.fromSHA1(db, repository, repository_sha1) if force: cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (head.getId(db), branch_id)) db.commit() continue try: commits = getBranchCommits(repository, branch_id) heads = commits.getHeads() if len(heads) > 1: progress.write(" reachable commit-set has multiple heads") continue head = heads.pop() if head.sha1 == branch_sha1: progress.write(" reachable agrees with branches.head; repository is wrong") if force: repository.run("update-ref", "refs/heads/%s" % branch_name, head.sha1, repository_sha1) progress.write(" repository updated") elif head.sha1 == repository_sha1: progress.write(" reachable agrees with repository; branches.head is wrong") if force: cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (head.getId(db), branch_id)) db.commit() continue except KeyboardInterrupt: sys.exit(1) except: progress.write(" reachable contains missing commits") except KeyboardInterrupt: sys.exit(1) except: progress.write("WARNING[%s]: %s missing!" % (repository.name, branch_name)) if branch_type == "normal": cursor.execute("SELECT id FROM branches WHERE base=%s", (branch_id,)) sub_branches = cursor.fetchall() if sub_branches: progress.write(" branch has sub-branches") base_branch = dbutils.Branch.fromId(db, branch_base_id) for (sub_branch_id,) in sub_branches: sub_branch = dbutils.Branch.fromId(db, sub_branch_id) sub_branch.rebase(db, base_branch) progress.write(" rebased sub-branch %s" % sub_branch.name) try: if force: cursor.execute("DELETE FROM branches WHERE id=%s", (branch_id,)) db.commit() progress.write(" deleted from database") except KeyboardInterrupt: sys.exit(1) except: progress.write(" failed to delete from database") db.rollback() else: try: review_id = getReview(branch_id) except KeyboardInterrupt: sys.exit(1) except: progress.write(" review branch without review; deleting") try: if force: cursor.execute("DELETE FROM branches WHERE id=%s", (branch_id,)) db.commit() except KeyboardInterrupt: sys.exit(1) except: progress.write(" failed to delete from database") db.rollback() continue try: commits = getReviewCommits(repository, getReview(branch_id)) except KeyboardInterrupt: sys.exit(1) except: progress.write(" review meta-data references missing commits") incorrect_reviews.append((getReview(branch_id), "branches.head = %s" % branch_sha1)) continue heads = commits.getHeads() if len(heads) > 1: progress.write(" multiple heads: r/%d" % review_id) continue head = heads.pop() try: if force: repository.run("update-ref", "refs/heads/%s" % branch_name, head.sha1, "0" * 40) progress.write(" re-created review branch") except KeyboardInterrupt: sys.exit(1) except: progress.write(" failed to re-create review branch") incorrect_reviews.append((getReview(branch_id), "failed to re-create review branch")) processed = set() def exists_in_db(branch_name): return branch_name in branches_in_db def process(path, prefix=None): for entry in os.listdir(path): entry_path = os.path.join(path, entry) branch_name = os.path.join(prefix, entry) if prefix else entry if os.path.isdir(entry_path): process(entry_path, branch_name) elif not exists_in_db(branch_name): progress.write("WARNING[%s]: %s exists in the repository but not in the database!" % (repository.name, branch_name)) if force: repository.run("update-ref", "-d", "refs/heads/%s" % branch_name) progress.write(" deleted from repository") processed.add(branch_name) for branch_name in refs.keys(): if branch_name not in processed and not exists_in_db(branch_name): progress.write("WARNING[%s]: %s exists in the repository but not in the database!" % (repository.name, branch_name)) if force: repository.run("update-ref", "-d", "refs/heads/%s" % branch_name) progress.write(" deleted from repository") process(heads_path) progress.end(".") if incorrect_reviews: print "\nReviews that need attention:" for review_id, message in incorrect_reviews: print " %5d: %s" % (review_id, message) ================================================ FILE: src/maintenance/check-commits.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import cPickle sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))) import dbutils import gitutils import progress db = dbutils.Database.forSystem() cursor = db.cursor() commits = {} pending_commits = set() cursor.execute("SELECT COUNT(*) FROM commits") print progress.start(cursor.fetchone()[0], prefix="Fetching commits ...") cursor.execute("SELECT id, sha1 FROM commits") for commit_id, commit_sha1 in cursor: commits[commit_id] = commit_sha1 pending_commits.add(commit_id) progress.update() progress.end(" %d commits." % len(commits)) print cursor.execute("SELECT MAX(CHARACTER_LENGTH(name)) FROM repositories") repository_name_length = cursor.fetchone()[0] cursor.execute("SELECT id FROM repositories ORDER BY id ASC") repositories = [repository_id for (repository_id,) in cursor] def processCommits(process_commits): global commits processed_commits = set() for commit_id in process_commits: try: gitobject = repository.fetch(commits[commit_id]) if gitobject.type == "commit": processed_commits.add(commit_id) except gitutils.GitError: pass except KeyboardInterrupt: sys.exit(1) except: raise progress.update() return processed_commits for repository_id in repositories: repository = gitutils.Repository.fromId(db, repository_id) repository.disableCache() cursor.execute("SELECT commit FROM reachable JOIN branches ON (branch=id) WHERE repository=%s", (repository.id,)) process_commits = set(commit_id for (commit_id,) in cursor if commit_id in pending_commits) progress.start(len(process_commits), "Scanning repository: %-*s" % (repository_name_length, repository.name)) processed_commits = processCommits(process_commits) missing = len(process_commits) - len(processed_commits) if missing: message = " %d commits found; %d commits missing!" % (len(processed_commits), missing) else: message = " %d commits found." % len(processed_commits) progress.end(message) pending_commits -= processed_commits if pending_commits: print print "%d commits still unaccounted for. Re-scanning all repositories." % len(pending_commits) print for repository_id in repositories: repository = gitutils.Repository.fromId(db, repository_id) repository.disableCache() progress.start(len(pending_commits), "Re-scanning repository: %-*s" % (repository_name_length, repository.name)) processed_commits = processCommits(pending_commits) pending_commits -= processed_commits progress.end(" %d commits found, %d remaining." % (len(processed_commits), len(pending_commits))) if not pending_commits: break if pending_commits: cPickle.dump(pending_commits, open("commits-to-purge.pickle", "w"), 2) print print "%d commits that were not found in any repository should be purged." % len(pending_commits) print "Run purge-commits.py to do this." ================================================ FILE: src/maintenance/configtest.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import contextlib import traceback def reflow(text, indent): try: import textutils return textutils.reflow(text, indent=indent) except Exception: # The 'textutils' module depends on 'configuration', so make our # dependency on it conditional. return text class ConfigurationValue(object): def __init__(self, module, name): self.path = module.__file__ if self.path.endswith(".pyc") or self.path.endswith(".pyo"): self.path = self.path[:-1] self.name = name class ConfigurationIssue(object): def __init__(self, issue_type, message, values): self.type = issue_type self.message = message self.values = values[:] def __str__(self): result = self.type.upper() + "\n" if self.values: result += " Relating to settings:\n" for value in self.values: result += " %s :: %s\n" % (value.path, value.name) result += " Message:\n" result += reflow(self.message, indent=4) return result def doTestConfiguration(): """Do not call directly; call testConfiguration()""" import configuration errors = [] warnings = [] values = [] def error(message): errors.append(ConfigurationIssue("error", message, values)) def warn(message): warnings.append(ConfigurationIssue("warning", message, values)) class MissingValue(Exception): pass @contextlib.contextmanager def value(module, name): values.append(ConfigurationValue(module, name)) if not hasattr(module, name): error("Configuration value missing: %s.%s" % (module.__name__, name)) raise MissingValue try: yield getattr(module, name) finally: del values[-1] try: with value(configuration.base, "WEB_SERVER_INTEGRATION") \ as web_server_integration: if web_server_integration not in ("apache", "nginx+uwsgi", "uwsgi", "none"): error("Invalid web server integration: must be one of " "'apache', 'nginx+uwsgi', 'uwsgi' and 'none'.") except MissingValue: pass def checkProvider(providers, name): provider = providers[name] if provider.get("enabled"): if not provider.get("client_id"): error("Enabled external authentication provider %r must have " "'client_id' set." % name) if not provider.get("client_secret"): error("Enabled external authentication provider %r must have " "'client_secret' set." % name) if name == "google" and not provider.get("redirect_uri"): error("Enabled external authentication provider %r must have " "'redirect_uri' set." % name) if provider.get("bypass_createuser") \ and provider.get("verify_email_addresses"): error("Enabled external authentication provider %r can't have " "both 'bypass_createuser' and 'verify_email_addresses' " "enabled." % name) try: with value(configuration.base, "AUTHENTICATION_MODE") \ as authentication_mode: if authentication_mode == "critic": with value(configuration.base, "SESSION_TYPE") as session_type: if session_type not in ("httpauth", "cookie"): error("Invalid session type: must be one of 'httpauth' " "and 'cookie'.") elif authentication_mode != "host": # Unconditional external authentication mode with value(configuration.base, "SESSION_TYPE") as session_type: if session_type != "cookie": error("Invalid session type: must be 'cookie' (with " "external authentication.)") with value(configuration.auth, "PROVIDERS") as providers: if authentication_mode not in providers: error("Authentication mode must be 'host', 'critic' or " "name an external authentication provider.") else: provider = providers[authentication_mode] if not provider.get("enabled"): error("External authentication provider %r must be " "enabled." % authentication_mode) with value(configuration.base, "REPOSITORY_URL_TYPES") \ as repository_url_types: if "http" in repository_url_types: warn("HTTP/HTTPS repository URL type is incompatible " "with using an external authentication provider.") except MissingValue: pass try: with value(configuration.auth, "PROVIDERS") as providers: for name in providers.keys(): checkProvider(providers, name) except MissingValue: pass return (errors, warnings) def testConfiguration(): """Test the system configuration Returns a tuple containing two lists of ConfigurationIssue objects. The first list contains errors, the second warnings. If the first list is empty, the configuration should be usable.""" try: return doTestConfiguration() except Exception: error = ConfigurationIssue( "error", "FATAL: Failed to test configuration!\n\n" + traceback.format_exc(), []) return ([error], []) ================================================ FILE: src/maintenance/criticctl.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import argparse import auth import configuration import dbutils import inpututils db = dbutils.Database.forSystem() cursor = db.cursor() cursor.execute("SELECT name FROM roles") roles = [role for (role,) in cursor] def valid_user(name): try: dbutils.User.fromName(db, name) except dbutils.NoSuchUser: return "no such user" def valid_role(role): if role not in roles: return "invalid role; must be one of %s" % ", ".join(roles) def invalid_user(name): try: dbutils.User.fromName(db, name) return "user exists" except dbutils.NoSuchUser: pass def check_argument(argument, check): if argument and check: error = check(argument) if error: print >>sys.stderr, "%s: %s" % (argument, error) sys.exit(-1) def use_argument_or_ask(argument, prompt, check=None): if argument: check_argument(argument, check) return argument else: return inpututils.string(prompt, check=check) def listusers(argv): formats = { "tuples": { "pre": "# id, name, email, fullname, status\n[", "row": " (%r, %r, %r, %r, %r),", "post": "]", }, "dicts": { "pre": "[", "row": " {'id': %r, 'name': %r, 'email': %r, 'fullname': %r, 'status': %r},", "post": "]", }, "table": { "pre": " id | name | email | fullname | status\n" \ "-----+------------+--------------------------------+--------------------------------+--------", "row": "%4u | %10s | %30s | %-30s | %s", "post": "", }, } parser = argparse.ArgumentParser( description="Critic administration interface: listusers", prog="criticctl [options] listusers") parser.add_argument("--format", "-f", choices=formats.keys(), default="table", help='output format (defaults to "table")') arguments = parser.parse_args(argv) cursor.execute("""SELECT users.id, name, useremails.email, fullname, status FROM users LEFT OUTER JOIN useremails ON (useremails.id=users.email) ORDER BY users.id""") print formats[arguments.format]["pre"] for row in cursor: print formats[arguments.format]["row"] % row print formats[arguments.format]["post"] def adduser(argv): class NoEmail: pass class NoPassword: pass parser = argparse.ArgumentParser( description="Critic administration interface: adduser", prog="criticctl [options] adduser") parser.add_argument("--name", help="user name") parser.add_argument("--email", "-e", help="email address") parser.add_argument("--no-email", dest="email", action="store_const", const=NoEmail, help="create user without email address") parser.add_argument("--fullname", "-f", help="full name") parser.add_argument("--password", "-p", help="password") parser.add_argument("--no-password", dest="password", action="store_const", const=NoPassword, help="create user without password") arguments = parser.parse_args(argv) name = use_argument_or_ask(arguments.name, "Username:", check=invalid_user) fullname = use_argument_or_ask(arguments.fullname, "Full name:") if arguments.email is NoEmail: email = None else: email = use_argument_or_ask(arguments.email, "Email address:") if not email.strip(): email = None if arguments.password is NoPassword: hashed_password = None else: if arguments.password is None: password = inpututils.password("Password:") else: password = arguments.password hashed_password = auth.hashPassword(password) dbutils.User.create(db, name, fullname, email, email_verified=None, password=hashed_password) print "%s: user added" % name def deluser(argv): import reviewing.utils parser = argparse.ArgumentParser( description="Critic administration interface: deluser", prog="criticctl [options] deluser") parser.add_argument("--name", help="user name") arguments = parser.parse_args(argv) name = use_argument_or_ask(arguments.name, "Username:", check=valid_user) reviewing.utils.retireUser(db, dbutils.User.fromName(db, name)) db.commit() print "%s: user retired" % name def role(command, argv): parser = argparse.ArgumentParser( description="Critic administration interface: %s" % command, prog="criticctl [options] %s" % command) parser.add_argument("--name", help="user name") parser.add_argument("--role", choices=roles, help="role name") arguments = parser.parse_args(argv) name = use_argument_or_ask(arguments.name, "Username:", check=valid_user) role = use_argument_or_ask(arguments.role, "Role:", check=valid_role) user = dbutils.User.fromName(db, name) cursor.execute("""SELECT 1 FROM userroles WHERE uid=%s AND role=%s""", (user.id, role)) if command == "addrole": if cursor.fetchone(): print "%s: user already has role '%s'" % (name, role) else: cursor.execute("""INSERT INTO userroles (uid, role) VALUES (%s, %s)""", (user.id, role)) db.commit() print "%s: role '%s' added" % (name, role) else: if not cursor.fetchone(): print "%s: user doesn't have role '%s'" % (name, role) else: cursor.execute("""DELETE FROM userroles WHERE uid=%s AND role=%s""", (user.id, role)) db.commit() print "%s: role '%s' removed" % (name, role) def passwd(argv): parser = argparse.ArgumentParser( description="Critic administration interface: passwd", prog="criticctl [options] passwd") class NoPassword: pass parser.add_argument("--name", help="user name") parser.add_argument("--password", help="password") parser.add_argument("--no-password", dest="password", action="store_const", const=NoPassword, help="delete the user's password") arguments = parser.parse_args(argv) name = use_argument_or_ask(arguments.name, "Username:", check=valid_user) if arguments.password is NoPassword: hashed_password = None else: if arguments.password is None: password = inpututils.password("Password:") else: password = arguments.password hashed_password = auth.hashPassword(password) cursor.execute("""UPDATE users SET password=%s WHERE name=%s""", (hashed_password, name)) db.commit() if hashed_password: print "%s: password changed" % name else: print "%s: password deleted" % name def connect(command, argv): parser = argparse.ArgumentParser( description="Critic administration interface: %s" % command, prog="criticctl [options] %s" % command) providers = sorted(provider_name for provider_name, provider in configuration.auth.PROVIDERS.items() if command == "disconnect" or provider.get("enabled")) if len(providers) == 0: print >>sys.stderr, "No external authentication providers configured!" return 1 parser.add_argument("--name", help="user name") parser.add_argument("--provider", choices=providers, help="external authentication provider name") if command == "connect": parser.add_argument("--account", help="external account identifier") arguments = parser.parse_args(argv) def valid_provider(provider): if provider not in providers: return ("invalid authentication provider; must be one of %s" % ", ".join(providers)) name = use_argument_or_ask(arguments.name, "Username:", check=valid_user) if len(providers) == 1: check_argument(arguments.provider, check=valid_provider) provider = providers[0] else: provider = use_argument_or_ask( arguments.provider, "Authentication provider:", check=valid_provider) user = dbutils.User.fromName(db, name) provider = auth.PROVIDERS[provider] if command == "connect": cursor.execute("""SELECT 1 FROM externalusers WHERE uid=%s AND provider=%s""", (user.id, provider.name)) if cursor.fetchone(): print >>sys.stderr, ("%s: user already connected to a %s" % (user.name, provider.getTitle())) return 1 account = use_argument_or_ask( arguments.account, provider.getAccountIdDescription() + ":") cursor.execute("""SELECT id, uid FROM externalusers WHERE provider=%s AND account=%s""", (provider.name, account)) row = cursor.fetchone() if row: external_id, user_id = row if user_id is not None: user = dbutils.User.fromId(db, user_id) print >>sys.stderr, ("%s %r: already connected to local user %s" % (provider.getTitle(), account, user.name)) return 1 cursor.execute("""UPDATE externalusers SET uid=%s WHERE id=%s""", (user.id, external_id)) else: cursor.execute("""INSERT INTO externalusers (uid, provider, account) VALUES (%s, %s, %s)""", (user.id, provider.name, account)) print "%s: connected to %s %r" % (name, provider.getTitle(), account) else: cursor.execute("""SELECT account FROM externalusers WHERE uid=%s AND provider=%s""", (user.id, provider.name)) row = cursor.fetchone() if not row: print >>sys.stderr, ("%s: user not connected to a %s" % (name, provider.getTitle())) return 1 account, = row cursor.execute("""DELETE FROM externalusers WHERE uid=%s AND provider=%s""", (user.id, provider.name)) print ("%s: disconnected from %s %r" % (name, provider.getTitle(), account)) db.commit() return 0 def configtest(command, argv): parser = argparse.ArgumentParser( description="Critic administration interface: configtest", prog="criticctl [options] configtest") parser.add_argument("--quiet", "-q", action="store_true", help="Suppress non-error/warning output") arguments = parser.parse_args(argv) import maintenance.configtest errors, warnings = maintenance.configtest.testConfiguration() def printIssue(issue): print str(issue) print for error in errors: printIssue(error) for warning in warnings: printIssue(warning) if not errors: if not arguments.quiet: print "System configuration valid." return 0 else: return 1 def restart(command, argv): parser = argparse.ArgumentParser( description="Critic administration interface: restart", prog="criticctl [options] restart") parser.parse_args(argv) result = configtest("configtest", ["--quiet"]) if result != 0: print >>sys.stderr, "ERROR: System configuration is not valid." return result import os import subprocess system_identity = configuration.base.SYSTEM_IDENTITY try: os.seteuid(0) os.setegid(0) except OSError: print >>sys.stderr, "ERROR: 'criticctl restart' must be run as root." return 1 if configuration.base.WEB_SERVER_INTEGRATION == "apache": web_server_service = "apache2" elif configuration.base.WEB_SERVER_INTEGRATION in ("nginx+uwsgi", "uwsgi"): web_server_service = "uwsgi" else: web_server_service = None if web_server_service: subprocess.check_call(["service", web_server_service, "stop"]) subprocess.check_call(["service", "critic-" + system_identity, "restart"]) if web_server_service: subprocess.check_call(["service", web_server_service, "start"]) return 0 def stop(command, argv): parser = argparse.ArgumentParser( description="Critic administration interface: stop", prog="criticctl [options] stop") parser.parse_args(argv) import os import subprocess system_identity = configuration.base.SYSTEM_IDENTITY try: os.seteuid(0) os.setegid(0) except OSError: print >>sys.stderr, "ERROR: 'criticctl stop' must be run as root." return 1 if configuration.base.WEB_SERVER_INTEGRATION == "apache": web_server_service = "apache2" elif configuration.base.WEB_SERVER_INTEGRATION in ("nginx+uwsgi", "uwsgi"): web_server_service = "uwsgi" else: web_server_service = None if web_server_service: subprocess.check_call(["service", web_server_service, "stop"]) subprocess.check_call(["service", "critic-" + system_identity, "stop"]) return 0 def interactive(command, argv): try: import IPython except ImportError: print >>sys.stderr, "ERROR: IPython must be installed (import failed)" return 1 parser = argparse.ArgumentParser( description="Critic administration interface: configtest", prog="criticctl [options] configtest") parser.add_argument("--user", "-u", help="Impersonate this user") arguments = parser.parse_args(argv) import api critic = api.critic.startSession(for_user=bool(arguments.user), for_system=not bool(arguments.user)) db = critic.database if arguments.user: db.setUser(dbutils.User.fromName(db, arguments.user)) IPython.embed() return 0 def main(parser, show_help, command, argv): returncode = 0 if show_help or command is None: parser.print_help() else: if command == "listusers": listusers(argv) return 0 elif command == "adduser": adduser(argv) return 0 elif command == "deluser": deluser(argv) return 0 elif command in ("addrole", "delrole"): role(command, argv) return 0 elif command == "passwd": passwd(argv) return 0 elif command in ("connect", "disconnect"): return connect(command, argv) elif command == "configtest": return configtest(command, argv) elif command == "restart": return restart(command, argv) elif command == "stop": return stop(command, argv) elif command == "interactive": return interactive(command, argv) else: print >>sys.stderr, "ERROR: Invalid command: %s" % command returncode = 1 print """ Available commands are: listusers List all users. adduser Add a user. deluser Retire a user. addrole Add a role to a user. delrole Remove a role from a user. passwd Set or delete a user's password. connect Set up connection between user and external authentication provider. disconnect Remove such connection. configtest Test system configuration. restart Restart host WSGI container and Critic's background services. stop Stop host WSGI container and Critic's background services. interactive Drop into an interactive IPython shell. Use 'criticctl COMMAND --help' to see per command options.""" return returncode ================================================ FILE: src/maintenance/dumppreferences.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os.path sys.path.insert(0, os.path.dirname(os.path.dirname(sys.argv[0]))) from dbaccess import connect db = connect() cursor = db.cursor() cursor.execute("SELECT item, type, default_integer, default_string, description FROM preferences") preferences = cursor.fetchall() installpreferences_py = open(os.path.join(os.path.dirname(sys.argv[0]), "installpreferences.py"), "w") print >>installpreferences_py, "PREFERENCES = [ ", for index, (item, type, default_integer, default_string, description) in enumerate(preferences): if index != 0: installpreferences_py.write(""", """) installpreferences_py.write("""{ "item": %r, "type": %r,""" % (item, type)) if type == "string": installpreferences_py.write(""" "default_string": %r,""" % default_string) else: installpreferences_py.write(""" "default_integer": %r,""" % default_integer) installpreferences_py.write(""" "description": %r }""" % description) print >>installpreferences_py, " ]" print >>installpreferences_py print >>installpreferences_py, "def installPreferences(db, quiet):" print >>installpreferences_py, " cursor = db.cursor()" print >>installpreferences_py print >>installpreferences_py, " for preference in PREFERENCES:" print >>installpreferences_py, " item = preference[\"item\"]" print >>installpreferences_py, " type = preference[\"type\"]" print >>installpreferences_py, " default_integer = preference.get(\"default_integer\")" print >>installpreferences_py, " default_string = preference.get(\"default_string\")" print >>installpreferences_py, " description = preference[\"description\"]" print >>installpreferences_py print >>installpreferences_py, " cursor.execute(\"SELECT 1 FROM preferences WHERE item=%s\", (item,))" print >>installpreferences_py print >>installpreferences_py, " if cursor.fetchone():" print >>installpreferences_py, " if not quiet: print \"Updating: %s\" % item" print >>installpreferences_py, " cursor.execute(\"UPDATE preferences SET type=%s, default_integer=%s, default_string=%s, description=%s WHERE item=%s\", (type, default_integer, default_string, description, item))" print >>installpreferences_py, " else:" print >>installpreferences_py, " if not quiet: print \"Adding: %s\" % item" print >>installpreferences_py, " cursor.execute(\"INSERT INTO preferences (item, type, default_integer, default_string, description) VALUES (%s, %s, %s, %s, %s)\", (item, type, default_integer, default_string, description))" print >>installpreferences_py print >>installpreferences_py, "if __name__ == \"__main__\":" print >>installpreferences_py, " import sys" print >>installpreferences_py, " import os.path" print >>installpreferences_py print >>installpreferences_py, " sys.path.insert(0, os.path.dirname(os.path.dirname(sys.argv[0])))" print >>installpreferences_py print >>installpreferences_py, " import dbaccess" print >>installpreferences_py print >>installpreferences_py, " db = dbaccess.connect()" print >>installpreferences_py print >>installpreferences_py, " installPreferences(db, \"--quiet\" in sys.argv or \"-q\" in sys.argv)" print >>installpreferences_py print >>installpreferences_py, " db.commit()" ================================================ FILE: src/maintenance/progress.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys __prefix = "" __current = 0 __total = 0 __previous = "" def __output(string=None): global __prefix, __current, __total, __previous if string is None: percent = int(round((100.0 * __current) / __total)) string = "%s [%3d %%]" % (__prefix, percent) if string != __previous: sys.stdout.write("\r%s%s" % (string, " " * (len(__previous) - len(string)))) sys.stdout.flush() __previous = string def start(total, prefix=""): global __prefix, __current, __total, __previous __prefix = prefix __current = 0 __total = total __previous = "" if total: __output() def update(count=1): global __current __current += count __output() def end(message): __output(__prefix + message) sys.stdout.write("\n") def write(string): __output(string) sys.stdout.write("\n") __output() if __name__ == "__main__": import time start(1000, prefix="Testing: ") for index in range(200): time.sleep(0.01) update(5) end("Finished!") ================================================ FILE: src/operation/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import traceback import base import dbutils import extensions from textutils import json_encode, json_decode from operation.basictypes import (OperationResult, OperationError, OperationFailure, OperationFailureMustLogin) from operation.typechecker import (Optional, Request, RestrictedString, SHA1, RestrictedInteger, NonNegativeInteger, PositiveInteger, Review, Repository, Commit, File, User, Extension) class Operation(object): """ Base class for operation implementations. Sub-classes must call Operation.__init__() to define the structure of expected input data. An operation accepts input in the form of a JSON object literal and returns a result in the form of a JSON object literal. The object contains a property named "status" whose value should be "ok" or "error". If it is "error", the object contains a property named "error" whose value is an error message. If the HTTP request method is POST, the input is the request body (this is the usual case) otherwise, if the HTTP request method is GET, the input is the value of the "data" URI query parameter (this is supported to simplify ad-hoc testing). Operation implementations should inherit this class and implement the process() method. This method is called with two positional arguments, 'db' and 'user', and one keyword argument per property in the input value. The process() method should return an OperationResult object or either return or raise an OperationError object. Any other raised exceptions are caught and converted to OperationError objects. """ def __init__(self, parameter_types, accept_anonymous_user=False): """ Initialize input data type checker. The parameter_types argument must be a dict object. See TypeChecker and sub-classes for details on how it works. A parameter types argument of { "name": str, "points": [{"x": int, "y": int }], "what": Optional(str) } would for instance represents an input object with two required properties named "name" and "points", and an optional property named "what". The "name" and "what" property values should be a strings. The "points" property value should be an array of objects, each with two properties named "x" and "y", whose values should be integer. The operation's process() method would be called with the keyword arguments "name", "points" and "what". """ from operation.typechecker import TypeChecker if not type(parameter_types) is dict: raise base.ImplementationError("invalid source type") self.__checker = TypeChecker.make(parameter_types) self.__accept_anonymous_user = accept_anonymous_user def __call__(self, req, db, user): import auth from operation.typechecker import TypeCheckerContext if user.isAnonymous() and not self.__accept_anonymous_user: return OperationFailureMustLogin() if req.method == "POST": data = req.read() else: data = req.getParameter("data") if not data: raise OperationError("no input") try: value = json_decode(data) except ValueError as error: raise OperationError("invalid input: %s" % str(error)) try: self.__checker(value, TypeCheckerContext(req, db, user)) return self.process(db, user, **value) except OperationError as error: return error except OperationFailure as failure: return failure except dbutils.NoSuchUser as error: return OperationFailure(code="nosuchuser", title="Who is '%s'?" % error.name, message="There is no user in Critic's database named that.") except dbutils.NoSuchReview as error: return OperationFailure(code="nosuchreview", title="Invalid review ID", message="The review ID r/%d is not valid." % error.id) except auth.AccessDenied as error: return OperationFailure(code="accessdenied", title="Access denied", message=error.message) except dbutils.TransactionRollbackError: return OperationFailure(code="transactionrollback", title="Transaction rolled back", message="Your database transaction rolled back, probably due to a deadlock. Please try again.") except extensions.extension.ExtensionError as error: return OperationFailure( code="invalidextension", title="Invalid extension", message=error.message) except: # Decode value again since the type checkers might have modified it. value = json_decode(data) error_message = ("User: %s\nReferrer: %s\nData: %s\n\n%s" % (user.name, req.getReferrer(), json_encode(self.sanitize(value), indent=2), traceback.format_exc())) db.rollback() import mailutils import configuration if not user.hasRole(db, "developer"): mailutils.sendExceptionMessage(db, "wsgi[%s]" % req.path, error_message) if configuration.debug.IS_DEVELOPMENT or user.hasRole(db, "developer"): return OperationError(error_message) else: return OperationError("An unexpected error occurred. " + "A message has been sent to the system administrator(s) " + "with details about the problem.") def process(self, *args, **kwargs): raise OperationError("not implemented!?!") def sanitize(self, value): """Sanitize arguments value for use in error messages or logs.""" return value @staticmethod def requireRole(db, role, user): if not user.hasRole(db, role): raise OperationFailure( code="notallowed", title="Not allowed!", message="Operation not permitted, user that lacks role '%s'." % role) ================================================ FILE: src/operation/addrepository.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import subprocess import os import signal import configuration import gitutils import htmlutils from operation import Operation, OperationResult, OperationFailure, Optional, RestrictedString class AddRepository(Operation): def __init__(self): Operation.__init__(self, { "name": RestrictedString(allowed=lambda ch: ch != "/", minlength=1, maxlength=64, ui_name="short name"), "path": RestrictedString(minlength=1, ui_name="path"), "mirror": Optional({ "remote_url": RestrictedString(maxlength=256, ui_name="source repository"), "remote_branch": str, "local_branch": str }) }) def process(self, db, user, name, path, mirror=None): if not user.hasRole(db, "repositories"): raise OperationFailure(code="notallowed", title="Not allowed!", message="Only users with the 'repositories' role can add new repositories.") if name.endswith(".git"): raise OperationFailure(code="badsuffix_name", title="Invalid short name", message="The short name must not end with .git") if name == "r": raise OperationFailure(code="invalid_name", title="Invalid short name", message="The short name 'r' is not allowed since corresponding /REPOSHORTNAME/{SHA1|BRANCH} URLs would conflict with r/REVIEW_ID URLs.") path = path.strip("/").rsplit("/", 1) if len(path) == 2: base, repository_name = path else: base, repository_name = None, path[0] if base: main_base_path = os.path.join(configuration.paths.GIT_DIR, base) else: main_base_path = configuration.paths.GIT_DIR main_path = os.path.join(main_base_path, repository_name + ".git") cursor = db.cursor() cursor.execute("""SELECT name FROM repositories WHERE path=%s""", (main_path,)) row = cursor.fetchone() if row: raise OperationFailure(code="duplicaterepository", title="Duplicate repository", message="The specified path is already used by repository %s" % row[0]) cursor.execute("""SELECT name FROM repositories WHERE name=%s""", (name,)) row = cursor.fetchone() if row: raise OperationFailure(code="duplicateshortname", title="Duplicate short name", message="The specified short name is already in use, please select a different short name.") if not os.path.isdir(main_base_path): os.makedirs(main_base_path, mode=0775) def git(arguments, cwd): argv = [configuration.executables.GIT] + arguments git = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) stdout, stderr = git.communicate() if git.returncode != 0: raise gitutils.GitError("unexpected output from '%s': %s" % (" ".join(argv), stderr)) if mirror: try: subprocess.check_output([configuration.executables.GIT, "ls-remote", mirror["remote_url"]], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: raise OperationFailure(code="failedreadremote", title="Failed to read source repository", message="Critic failed to read from the specified source repository. The error reported from git " + "(when running as the system user '%s') was:
%s
" % (configuration.base.SYSTEM_USER_NAME, htmlutils.htmlify(e.output)), is_html=True) git(["init", "--bare", "--shared", repository_name + ".git"], cwd=main_base_path) git(["config", "receive.denyNonFastforwards", "false"], cwd=main_path) git(["config", "critic.name", name], cwd=main_path) if configuration.debug.IS_QUICKSTART: git(["config", "critic.socket", os.path.join(configuration.paths.SOCKETS_DIR, "githook.unix")], cwd=main_path) os.symlink(os.path.join(configuration.paths.INSTALL_DIR, "hooks", "pre-receive"), os.path.join(main_path, "hooks", "pre-receive")) cursor.execute("""INSERT INTO repositories (name, path) VALUES (%s, %s) RETURNING id""", (name, main_path)) repository_id = cursor.fetchone()[0] if mirror: cursor.execute("""INSERT INTO trackedbranches (repository, local_name, remote, remote_name, forced, delay) VALUES (%s, '*', %s, '*', true, '1 day')""", (repository_id, mirror["remote_url"])) cursor.execute("""INSERT INTO trackedbranches (repository, local_name, remote, remote_name, forced, delay) VALUES (%s, %s, %s, %s, true, '1 day')""", (repository_id, mirror["local_branch"], mirror["remote_url"], mirror["remote_branch"])) git(["symbolic-ref", "HEAD", "refs/heads/" + mirror["local_branch"]], cwd=main_path) db.commit() if mirror: pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) return OperationResult() ================================================ FILE: src/operation/applyfilters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import reviewing.utils from operation import Operation, OperationResult class QueryGlobalFilters(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) reviewers, watchers = reviewing.utils.queryFilters(db, user, review, globalfilters=True) return OperationResult(reviewers=[dbutils.User.fromId(db, user_id).getJSON() for user_id in reviewers], watchers=[dbutils.User.fromId(db, user_id).getJSON() for user_id in watchers]) class ApplyGlobalFilters(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) reviewing.utils.applyFilters(db, user, review, globalfilters=True) return OperationResult() class QueryParentFilters(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) reviewers, watchers = reviewing.utils.queryFilters(db, user, review, parentfilters=True) return OperationResult(reviewers=[dbutils.User.fromId(db, user_id).getJSON() for user_id in reviewers], watchers=[dbutils.User.fromId(db, user_id).getJSON() for user_id in watchers]) class ApplyParentFilters(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) reviewing.utils.applyFilters(db, user, review, parentfilters=True) return OperationResult() ================================================ FILE: src/operation/autocompletedata.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import reviewing.filters from operation import Operation, OperationResult, OperationError, Optional class GetAutoCompleteData(Operation): def __init__(self): Operation.__init__(self, { "values": [set(["users", "paths"])], "review_id": Optional(int), "changeset_ids": Optional([int]) }) def process(self, db, user, values, review_id=None, changeset_ids=None): cursor = db.cursor() data = {} if "users" in values: cursor.execute("SELECT name, fullname FROM users WHERE status!='retired'") data["users"] = dict(cursor) if "paths" in values: if review_id is not None: cursor.execute("""SELECT files.path, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM files JOIN reviewfiles ON (reviewfiles.file=files.id) WHERE reviewfiles.review=%s GROUP BY files.id""", (review_id,)) elif changeset_ids is not None: cursor.execute("""SELECT files.path, SUM(chunks.deleteCount), SUM(chunks.insertCount) FROM files JOIN chunks ON (chunks.file=files.id) WHERE chunks.changeset=ANY (%s) GROUP BY files.id""", (changeset_ids,)) else: raise OperationError("paths requested, but neither review_id nor changeset_ids given") paths = {} for filename, deleted, inserted in cursor: paths[filename] = (0, deleted, inserted) components = filename.split("/") for index in range(len(components) - 1, 0, -1): directory = "/".join(components[:index]) + "/" nfiles, current_deleted, current_inserted = paths.get(directory, (0, 0, 0)) paths[directory] = nfiles + 1, current_deleted + deleted, current_inserted + inserted data["paths"] = paths return OperationResult(**data) class GetRepositoryPaths(Operation): def __init__(self): Operation.__init__(self, { "prefix": str, "repository_id": Optional(int), "repository_name": Optional(str) }) def process(self, db, user, prefix, repository_id=None, repository_name=None): if reviewing.filters.hasWildcard(prefix): return OperationResult(paths={}) prefix = reviewing.filters.sanitizePath(prefix) if repository_id is not None: repository = gitutils.Repository.fromId(db, repository_id) else: repository = gitutils.Repository.fromName(db, repository_name) if repository.isEmpty(): return OperationResult(paths={}) paths = {} use_prefix = prefix.rpartition("/")[0] if use_prefix: names = repository.run("ls-tree", "-r", "--name-only", "HEAD", use_prefix).splitlines() else: names = repository.run("ls-tree", "-r", "--name-only", "HEAD").splitlines() def add(path): if path.endswith("/"): if path not in paths: paths[path] = { "files": 0 } paths[path]["files"] += 1 else: paths[path] = {} for name in names: if not name.startswith(prefix): continue relname = name[len(prefix):] use_prefix = prefix if prefix.endswith("/"): add(prefix) elif relname.startswith("/"): add(prefix + "/") use_prefix = prefix + "/" relname = relname[1:] localname, pathsep, _ = relname.partition("/") add(use_prefix + localname + pathsep) return OperationResult(paths=paths) ================================================ FILE: src/operation/basictypes.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import htmlutils import textutils class OperationResult: """ Simple container for successful operation result. The constructor builds a dictionary from all keyword arguments, and adds {"status": "ok"} unless a different "status" is specified as a keyword argument. Converting an OperationResult object to string converts this dictionary to a JSON object literal. """ def __init__(self, **kwargs): self.__value = kwargs if "status" not in self.__value: self.__value["status"] = "ok" self.__cookies = {} def __str__(self): return textutils.json_encode(self.__value) def set(self, key, value): self.__value[key] = value class OperationError(Exception): """ Exception class for unexpected operation errors. Converting an OperationError object to string produces a JSON object literal with the properties status="error" and error=. """ def __init__(self, message): self.__message = message def __str__(self): return textutils.json_encode({ "status": "error", "error": self.__message }) class OperationFailure(Exception): """ Exception class for operation failures caused by invalid input. Converting an OperationFailure object to string produces a JSON object literal with the properties status="failure", title= and message=<message>. """ def __init__(self, code, title, message, is_html=False): self.__code = code self.__title = htmlutils.htmlify(title) self.__message = message if is_html else htmlutils.htmlify(message) def __str__(self): return textutils.json_encode({ "status": "failure", "code": self.__code, "title": self.__title, "message": self.__message }) class OperationFailureMustLogin(OperationFailure): def __init__(self): super(OperationFailureMustLogin, self).__init__( code="mustlogin", title="Login Required", message="You have to sign in to perform this operation.") ================================================ FILE: src/operation/basictypes_unittest.py ================================================ import json def basic(): from operation.basictypes import ( OperationResult, OperationError, OperationFailure, OperationFailureMustLogin) def convert(value): return json.loads(str(value)) # # OperationResult # # OperationResult has status=ok by default. assert convert(OperationResult()) == { "status": "ok" } # But status can be overridden. assert convert(OperationResult(status="bananas")) == { "status": "bananas" } # Other values can be set as well. assert convert(OperationResult(foo=10)) == { "status": "ok", "foo": 10 } # Even to None/null. assert convert(OperationResult(foo=None)) == { "status": "ok", "foo": None } # And test OperationResult.set(). result = OperationResult() result.set("foo", 10) assert convert(result) == { "status": "ok", "foo": 10 } result.set("foo", [1, 2, 3]) assert convert(result) == { "status": "ok", "foo": [1, 2, 3] } result.set("foo", None) assert convert(result) == { "status": "ok", "foo": None } # # OperationError # assert convert(OperationError("wrong!")) == { "status": "error", "error": "wrong!" } # # OperationFailure # assert (convert(OperationFailure("the code", "the title", "the message")) == { "status": "failure", "code": "the code", "title": "the title", "message": "the message" }) # Check HTML escaping. assert (convert(OperationFailure("<code>", "<title>", "<message>")) == { "status": "failure", "code": "<code>", "title": "<title>", "message": "<message>" }) # Check HTML escaping with is_html=True (title still escaped, but not the # message.) assert (convert(OperationFailure("<code>", "<title>", "<message>", True)) == { "status": "failure", "code": "<code>", "title": "<title>", "message": "<message>" }) print "basic: ok" ================================================ FILE: src/operation/blame.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import itertools import diff from operation import Operation, OperationResult, OperationError, Optional from log.commitset import CommitSet from changeset.utils import createChangeset from changeset.load import loadChangesetsForCommits class LineAnnotator: class NotSupported: pass def __init__(self, db, parent, child, file_ids=None, commits=None, changeset_cache=None): self.parent = parent self.child = child self.commitset = CommitSet.fromRange(db, parent, child, commits=commits) self.changesets = {} if not self.commitset: raise LineAnnotator.NotSupported commits = [] if not changeset_cache: changeset_cache = {} for commit in self.commitset: if len(commit.parents) > 1: raise LineAnnotator.NotSupported if commit in changeset_cache: self.changesets[commit.sha1] = changeset_cache[commit] else: commits.append(commit) for changeset in loadChangesetsForCommits(db, parent.repository, commits, filtered_file_ids=file_ids): self.changesets[changeset.child.sha1] = changeset_cache[changeset.child] = changeset for commit in set(self.commitset) - set(self.changesets.keys()): changesets = createChangeset(db, None, commit.repository, commit=commit, filtered_file_ids=file_ids, do_highlight=False) assert len(changesets) == 1 self.changesets[commit.sha1] = changeset_cache[commit] = changesets[0] self.commits = [parent] self.commit_index = { parent.sha1: 0 } for commit in self.commitset: self.commit_index[commit.sha1] = len(self.commits) self.commits.append(commit) class Line: def __init__(self, sha1, primary): self.sha1 = sha1 self.primary = primary self.untouched = True def touch(self, sha1): if self.untouched: self.sha1 = sha1 self.untouched = False def __repr__(self): return self.sha1[:8] def annotate(self, file_id, first, last, check_user=None): offset = first count = last - first + 1 initial_lines = [LineAnnotator.Line(sha1, True) for sha1 in itertools.repeat(self.parent.sha1, count)] lines = initial_lines[:] commit = self.commitset.getHeads().pop() while True: changeset = self.changesets[commit.sha1] changeset_file = changeset.getFile(file_id) if changeset_file: changeset_file.loadOldLines() changeset_file.loadNewLines() offset_delta = 0 modifications = [] for chunk in changeset_file.chunks: if chunk.insertEnd() < offset: offset_delta -= chunk.delta() continue if chunk.insert_offset < offset + count: if not chunk.deleted_lines: chunk.deleted_lines = changeset_file.getOldLines(chunk) if not chunk.inserted_lines: chunk.inserted_lines = changeset_file.getNewLines(chunk) for line in chunk.getLines(): if line.new_offset < offset: if line.type == line.DELETED: offset_delta += 1 elif line.type == line.INSERTED: offset_delta -= 1 elif line.new_offset < offset + count: if line.type == line.CONTEXT: pass elif line.type == line.DELETED: modifications.append((line.new_offset, -1)) else: if line.type == line.INSERTED: modifications.append((line.new_offset, 1)) line = lines[line.new_offset - offset] if check_user and line.primary and line.untouched and commit.author.email == check_user.email: return True line.touch(commit.sha1) else: break modification_offset = offset for line_offset, delta in modifications: if delta > 0: del lines[line_offset - modification_offset] count -= 1 modification_offset += 1 else: lines.insert(line_offset - modification_offset, LineAnnotator.Line(None, False)) count += 1 modification_offset -= 1 offset += offset_delta parents = self.commitset.getParents(commit) if len(parents) > 1: raise LineAnnotator.NotSupported if parents: commit = parents.pop() else: break if check_user: if self.parent.author.email == check_user.email: return any(itertools.imap(lambda line: line.untouched, initial_lines)) else: return False else: return [(first + index, self.commit_index[line.sha1]) for index, line in enumerate(initial_lines)] class Blame(Operation): def __init__(self): Operation.__init__(self, { "repository_id": int, "changeset_id": int, "files": [{ "id": int, "blocks": [{ "first": int, "last": int }] }] }) def process(self, db, user, repository_id, changeset_id, files): repository = gitutils.Repository.fromId(db, repository_id) cursor = db.cursor() cursor.execute("SELECT parent, child FROM changesets WHERE id=%s", (changeset_id,)) parent_id, child_id = cursor.fetchone() parent = gitutils.Commit.fromId(db, repository, parent_id) child = gitutils.Commit.fromId(db, repository, child_id) try: annotator = LineAnnotator(db, parent, child) for file in files: for block in file["blocks"]: lines = annotator.annotate(file["id"], block["first"], block["last"]) block["lines"] = [{ "offset": offset, "commit": commit } for offset, commit in lines] return OperationResult(commits=[{ "sha1": commit.sha1, "author_name": commit.author.name, "author_email": commit.author.email, "summary": commit.niceSummary(), "message": commit.message, "original": commit == parent, "current": commit == child } for commit in annotator.commits], files=files) except LineAnnotator.NotSupported: blame = gitutils.Blame(parent, child) paths = {} for file in files: file_id = file["id"] path = paths.get(file_id) if not path: path = paths[file_id] = dbutils.describe_file(db, file_id) for block in file["blocks"]: block["lines"] = blame.blame(db, path, block["first"], block["last"]) return OperationResult(commits=blame.commits, files=files) ================================================ FILE: src/operation/brancharchiving.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils from operation import Operation, OperationResult, OperationFailure, Review # This operation has no UI entry point; it is used during testing. class ArchiveBranch(Operation): def __init__(self): Operation.__init__(self, { "review": Review }) def process(self, db, user, review): if review.branch.archived: raise OperationFailure( code="invalidstate", title="Branch already archived!", message="The review's branch has already been archived.") if review.state not in ("closed", "dropped"): raise OperationFailure( code="invalidstate", title="Invalid review state!", message=("The review must be closed or dropped to archive " "its branch.")) review.branch.archive(db) review.cancelScheduledBranchArchival(db) db.commit() return OperationResult() class ResurrectBranch(Operation): def __init__(self): Operation.__init__(self, { "review": Review }) def process(self, db, user, review): if not review.branch.archived: raise OperationFailure( code="invalidstate", title="Branch not archived!", message="The review's branch has not been archived.") review.branch.resurrect(db) delay = review.scheduleBranchArchival(db) db.commit() return OperationResult(delay=delay) # This operation has no UI entry point; it is used during testing. class ScheduleBranchArchival(Operation): def __init__(self): Operation.__init__(self, { "review": Review, "delay": int }) def process(self, db, user, review, delay): # This operation intentionally doesn't check that the review is closed # or dropped, or that the branch isn't already archived. Those checks # are performed by dbutils.Review.scheduleBranchArchival(), which is a # no-op if the conditions aren't met. review.scheduleBranchArchival(db, delay=delay) db.commit() return OperationResult() ================================================ FILE: src/operation/checkrebase.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import Operation, OperationResult, Optional import dbutils import gitutils import reviewing.rebase import changeset.utils as changeset_utils import log.commitset class CheckMergeStatus(Operation): def __init__(self): super(CheckMergeStatus, self).__init__({ "review_id": int, "new_head_sha1": str, "new_upstream_sha1": str }) def process(self, db, user, review_id, new_head_sha1, new_upstream_sha1): review = dbutils.Review.fromId(db, review_id) upstreams = log.commitset.CommitSet(review.branch.getCommits(db)).getFilteredTails(review.repository) if len(upstreams) > 1: return OperationResult(rebase_supported=False) old_head = review.branch.getHead(db) old_upstream = gitutils.Commit.fromSHA1(db, review.repository, upstreams.pop()) new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1) new_upstream = gitutils.Commit.fromSHA1(db, review.repository, new_upstream_sha1) equivalent_merge = reviewing.rebase.createEquivalentMergeCommit( db, review, user, old_head, old_upstream, new_head, new_upstream) changesets = changeset_utils.createChangeset( db, user, review.repository, equivalent_merge, do_highlight=False) for changeset in changesets: if changeset.files: has_conflicts = True break else: has_conflicts = False return OperationResult(rebase_supported=True, has_conflicts=has_conflicts, merge_sha1=equivalent_merge.sha1) class CheckConflictsStatus(Operation): def __init__(self): super(CheckConflictsStatus, self).__init__({ "review_id": int, "merge_sha1": Optional(str), "new_head_sha1": Optional(str), "new_upstream_sha1": Optional(str) }) def process(self, db, user, review_id, merge_sha1=None, new_head_sha1=None, new_upstream_sha1=None): review = dbutils.Review.fromId(db, review_id) if merge_sha1 is not None: merge = gitutils.Commit.fromSHA1(db, review.repository, merge_sha1) changesets = changeset_utils.createChangeset( db, user, review.repository, merge, conflicts=True, do_highlight=False) url = "/showcommit?repository=%d&sha1=%s&conflicts=yes" % (review.repository.id, merge.sha1) else: upstreams = review.getCommitSet(db).getFilteredTails(review.repository) if len(upstreams) > 1: return OperationResult(rebase_supported=False) old_head = review.branch.getHead(db) old_upstream = gitutils.Commit.fromSHA1(db, review.repository, upstreams.pop()) new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1) new_upstream = gitutils.Commit.fromSHA1(db, review.repository, new_upstream_sha1) replay = reviewing.rebase.replayRebase(db, review, user, old_head, old_upstream, new_head, new_upstream) changesets = changeset_utils.createChangeset( db, user, review.repository, from_commit=replay, to_commit=new_head, conflicts=True, do_highlight=False) url = "/showcommit?repository=%d&from=%s&to=%s&conflicts=yes" % (review.repository.id, replay.sha1, new_head.sha1) has_changes = False has_conflicts = False for changed_file in changesets[0].files: changed_file.loadOldLines() file_has_conflicts = False for chunk in changed_file.chunks: lines = changed_file.getOldLines(chunk) for line in lines: if line.startswith("<<<<<<<"): has_conflicts = file_has_conflicts = True break if file_has_conflicts: break if not file_has_conflicts: has_changes = True return OperationResult(has_conflicts=has_conflicts, has_changes=has_changes, url=url) class CheckHistoryRewriteStatus(Operation): def __init__(self): super(CheckHistoryRewriteStatus, self).__init__({ "review_id": int, "new_head_sha1": str }) def process(self, db, user, review_id, new_head_sha1): review = dbutils.Review.fromId(db, review_id) old_head = review.branch.getHead(db) new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1) mergebase = review.repository.mergebase([old_head, new_head]) sha1s = review.repository.revlist([new_head], [mergebase]) valid = True for sha1 in sha1s: commit = gitutils.Commit.fromSHA1(db, review.repository, sha1) if commit.tree == old_head.tree: break else: valid = False return OperationResult(valid=valid) ================================================ FILE: src/operation/createcomment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils from operation import (Operation, OperationResult, OperationFailure, Optional, NonNegativeInteger, PositiveInteger, Review, Commit, File) from reviewing.comment import CommentChain, validateCommentChain, createCommentChain, createComment class ValidateCommentChain(Operation): def __init__(self): Operation.__init__(self, { "review": Review, "origin": set(["old", "new"]), "parent": Optional(Commit), "child": Commit, "file": File, "offset": PositiveInteger, "count": PositiveInteger }) def process(self, db, user, review, origin, child, file, offset, count, parent=None): verdict, data = validateCommentChain(db, review, origin, parent, child, file, offset, count) return OperationResult(verdict=verdict, **data) def checkComment(text): if not text.strip(): raise OperationFailure(code="emptycomment", title="Empty comment!", message="Creating empty (or white-space only) comments is not allowed.") class CreateCommentChain(Operation): def __init__(self): Operation.__init__(self, { "review": Review, "chain_type": set(["issue", "note"]), "commit_context": Optional({ "commit": Commit, "offset": NonNegativeInteger, "count": PositiveInteger }), "file_context": Optional({ "origin": set(["old", "new"]), "parent": Optional(Commit), "child": Commit, "file": File, "offset": PositiveInteger, "count": PositiveInteger }), "text": str }) def process(self, db, user, review, chain_type, text, commit_context=None, file_context=None): checkComment(text) if commit_context: chain_id = createCommentChain(db, user, review, chain_type, **commit_context) elif file_context: chain_id = createCommentChain(db, user, review, chain_type, **file_context) else: chain_id = createCommentChain(db, user, review, chain_type) comment_id = createComment(db, user, chain_id, text, first=True) db.commit() return OperationResult(chain_id=chain_id, comment_id=comment_id, draft_status=review.getDraftStatus(db, user)) class CreateComment(Operation): def __init__(self): Operation.__init__(self, { "chain_id": int, "text": str }) def process(self, db, user, chain_id, text): checkComment(text) chain = CommentChain.fromId(db, chain_id, user) comment_id = createComment(db, user, chain_id, text) db.commit() return OperationResult(comment_id=comment_id, draft_status=chain.review.getDraftStatus(db, user)) ================================================ FILE: src/operation/createreview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import os import signal import dbutils import gitutils import htmlutils import configuration from operation import (Operation, OperationResult, OperationError, Optional, OperationFailure, Repository) from reviewing.utils import (parseReviewFilters, parseRecipientFilters, createReview, getReviewersAndWatchers) from page.createreview import generateReviewersAndWatchersTable from log.commitset import CommitSet if configuration.extensions.ENABLED: import extensions.role.processcommits from cStringIO import StringIO class ReviewersAndWatchers(Operation): def __init__(self): Operation.__init__(self, { "repository_id": int, "commit_ids": [int], "reviewfilters": [{ "username": str, "type": set(["reviewer", "watcher"]), "path": str }], "applyfilters": bool, "applyparentfilters": bool }) def process(req, db, user, repository_id, commit_ids, reviewfilters, applyfilters, applyparentfilters): reviewfilters = parseReviewFilters(db, reviewfilters) repository = gitutils.Repository.fromId(db, repository_id) commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids] all_reviewers, all_watchers = getReviewersAndWatchers(db, repository, commits, reviewfilters=reviewfilters, applyfilters=applyfilters, applyparentfilters=applyparentfilters) document = htmlutils.Document(req) generateReviewersAndWatchersTable(db, repository, document, all_reviewers, all_watchers, applyfilters=applyfilters, applyparentfilters=applyparentfilters) return OperationResult(html=document.render(plain=True)) class SubmitReview(Operation): def __init__(self): Operation.__init__(self, { "repository": Repository, "branch": str, "summary": str, "commit_ids": Optional([int]), "commit_sha1s": Optional([str]), "applyfilters": Optional(bool), "applyparentfilters": Optional(bool), "reviewfilters": Optional([{ "username": str, "type": set(["reviewer", "watcher"]), "path": str }]), "recipientfilters": Optional({ "mode": set(["opt-in", "opt-out"]), "included": Optional([str]), "excluded": Optional([str]) }), "description": Optional(str), "frombranch": Optional(str), "trackedbranch": Optional({ "remote": str, "name": str }) }) def process(self, db, user, repository, branch, summary, commit_ids=None, commit_sha1s=None, applyfilters=True, applyparentfilters=True, reviewfilters=None, recipientfilters=None, description=None, frombranch=None, trackedbranch=None): # Raises auth.AccessDenied if access should not be allowed. repository.checkAccess(db, "modify") if not branch.startswith("r/"): raise OperationFailure(code="invalidbranch", title="Invalid review branch name", message="'%s' is not a valid review branch name; it must have a \"r/\" prefix." % branch) if reviewfilters is None: reviewfilters = [] if recipientfilters is None: recipientfilters = {} components = branch.split("/") for index in range(1, len(components)): try: repository.revparse("refs/heads/%s" % "/".join(components[:index])) except gitutils.GitReferenceError: continue message = ("Cannot create branch with name<pre>%s</pre>since there is already a branch named<pre>%s</pre>in the repository." % (htmlutils.htmlify(branch), htmlutils.htmlify("/".join(components[:index])))) raise OperationFailure(code="invalidbranch", title="Invalid review branch name", message=message, is_html=True) if commit_sha1s is not None: commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s] elif commit_ids is not None: commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids] else: commits = [] commitset = CommitSet(commits) reviewfilters = parseReviewFilters(db, reviewfilters) recipientfilters = parseRecipientFilters(db, recipientfilters) review = createReview(db, user, repository, commits, branch, summary, description, from_branch_name=frombranch, reviewfilters=reviewfilters, recipientfilters=recipientfilters, applyfilters=applyfilters, applyparentfilters=applyparentfilters) extensions_output = StringIO() kwargs = {} if configuration.extensions.ENABLED: if extensions.role.processcommits.execute(db, user, review, commits, None, commitset.getHeads().pop(), extensions_output): kwargs["extensions_output"] = extensions_output.getvalue().lstrip() if trackedbranch: cursor = db.cursor() cursor.execute("""SELECT 1 FROM knownremotes WHERE url=%s AND pushing""", (trackedbranch["remote"],)) if cursor.fetchone(): delay = "1 week" else: delay = "1 hour" cursor.execute("""INSERT INTO trackedbranches (repository, local_name, remote, remote_name, forced, delay) VALUES (%s, %s, %s, %s, false, INTERVAL %s) RETURNING id""", (repository.id, branch, trackedbranch["remote"], trackedbranch["name"], delay)) trackedbranch_id = cursor.fetchone()[0] kwargs["trackedbranch_id"] = trackedbranch_id cursor.execute("""INSERT INTO trackedbranchusers (branch, uid) VALUES (%s, %s)""", (trackedbranch_id, user.id)) db.commit() pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) return OperationResult(review_id=review.id, **kwargs) class FetchRemoteBranches(Operation): def __init__(self): Operation.__init__(self, { "remote": str, "pattern": Optional(str) }, accept_anonymous_user=True) def process(self, db, user, remote, pattern=None): if pattern: regexp = re.compile(pattern.replace("*", ".*")) else: regexp = None try: refs = gitutils.Repository.lsremote(remote, regexp=regexp) except gitutils.GitCommandError as error: if error.output.splitlines()[0].endswith("does not appear to be a git repository"): raise OperationFailure( code="invalidremote", title="Invalid remote!", message=("<code>%s</code> does not appear to be a valid Git repository." % htmlutils.htmlify(remote)), is_html=True) else: raise else: branches = dict([(ref[1], ref[0]) for ref in refs]) return OperationResult(branches=branches) class FetchRemoteBranch(Operation): def __init__(self): Operation.__init__(self, { "repository": Repository, "remote": str, "branch": str, "upstream": Optional(str) }, accept_anonymous_user=True) def process(self, db, user, repository, remote, branch, upstream="refs/heads/master"): # Raises auth.AccessDenied if access should not be allowed. repository.checkAccess(db, "modify") cursor = db.cursor() # Check if only other repositories are currently tracking branches from # this remote. If that's the case, then the user most likely either # selected the wrong repository or entered the wrong remote. cursor.execute("""SELECT repositories.name FROM repositories JOIN trackedbranches ON (trackedbranches.repository=repositories.id) WHERE trackedbranches.remote=%s""", (remote,)) repository_names = set(repository_name for repository_name, in cursor) if repository_names and repository.name not in repository_names: raise OperationFailure( code="badremote", title="Bad remote!", message=("The remote <code>%s</code> appears to be related to " "%s on this server (<code>%s</code>). " "You most likely shouldn't be importing branches from " "it into the selected repository (<code>%s</code>)." % (htmlutils.htmlify(remote), ("another repository" if len(repository_names) == 1 else "other repositories"), htmlutils.htmlify(", ".join(sorted(repository_names))), htmlutils.htmlify(repository.name))), is_html=True) if not branch.startswith("refs/"): branch = "refs/heads/%s" % branch try: with repository.fetchTemporaryFromRemote(db, remote, branch) as sha1: head_sha1 = repository.keepalive(sha1) except gitutils.GitReferenceError as error: if error.repository: raise OperationFailure( code="refnotfound", title="Remote ref not found!", message=("Could not find the ref <code>%s</code> in the repository <code>%s</code>." % (htmlutils.htmlify(error.ref), htmlutils.htmlify(error.repository))), is_html=True) else: raise OperationFailure( code="invalidref", title="Invalid ref!", message=("The specified ref is invalid: <code>%s</code>." % htmlutils.htmlify(error.ref)), is_html=True) except gitutils.GitCommandError as error: if error.output.splitlines()[0].endswith("does not appear to be a git repository"): raise OperationFailure( code="invalidremote", title="Invalid remote!", message=("<code>%s</code> does not appear to be a valid Git repository." % htmlutils.htmlify(remote)), is_html=True) else: raise if upstream.startswith("refs/"): try: with repository.fetchTemporaryFromRemote(db, remote, upstream) as sha1: upstream_sha1 = repository.keepalive(sha1) except gitutils.GitReferenceError: raise OperationFailure( code="refnotfound", title="Remote ref not found!", message=("Could not find the ref <code>%s</code> in the repository <code>%s</code>." % (htmlutils.htmlify(upstream), htmlutils.htmlify(remote))), is_html=True) else: try: upstream_sha1 = repository.revparse(upstream) except gitutils.GitReferenceError: raise OperationFailure( code="refnotfound", title="Local ref not found!", message=("Could not find the ref <code>%s</code> in the repository <code>%s</code>." % (htmlutils.htmlify(upstream), htmlutils.htmlify(str(repository)))), is_html=True) try: resolved_upstream_sha1 = gitutils.getTaggedCommit(repository, upstream_sha1) except gitutils.GitReferenceError: resolved_upstream_sha1 = None if not resolved_upstream_sha1: raise OperationFailure( code="missingcommit", title="Upstream commit is missing!", message=("<p>Could not find the commit <code>%s</code> in the " "repository <code>%s</code>.</p>" "<p>Since it would have been fetched along with the " "branch if it actually was a valid upstream commit, " "this means it's not valid.</p>" % (htmlutils.htmlify(upstream_sha1), htmlutils.htmlify(str(repository)))), is_html=True) commit_sha1s = repository.revlist(included=[head_sha1], excluded=[resolved_upstream_sha1]) if not commit_sha1s: raise OperationFailure( code="emptybranch", title="Branch contains no commits!", message=("All commits referenced by <code>%s</code> are reachable from <code>%s</code>." % (htmlutils.htmlify(branch), htmlutils.htmlify(upstream))), is_html=True) cursor.execute("SELECT id FROM commits WHERE sha1=ANY (%s)", (commit_sha1s,)) return OperationResult(commit_ids=[commit_id for (commit_id,) in cursor], head_sha1=head_sha1, upstream_sha1=resolved_upstream_sha1) ================================================ FILE: src/operation/draftchanges.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import profiling from operation import Operation, OperationResult, Optional from reviewing.comment import CommentChain, createCommentChain, createComment from reviewing.mail import sendPendingMails from reviewing.utils import generateMailsForBatch class ReviewStateChange(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): cursor = db.cursor() def unaccepted(): # Raised issues. cursor.execute("""SELECT 1 FROM commentchains WHERE commentchains.review=%s AND commentchains.uid=%s AND commentchains.type='issue' AND commentchains.state='draft'""", (review_id, user.id)) if cursor.fetchone(): return True # Reopened issues. cursor.execute("""SELECT 1 FROM commentchainchanges JOIN commentchains ON (commentchains.id=commentchainchanges.chain) WHERE commentchains.review=%s AND commentchains.type='issue' AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.from_state=commentchains.state AND commentchainchanges.to_state='open' AND commentchainchanges.to_type IS NULL""", (review_id, user.id)) if cursor.fetchone(): return True # Note converted into issues. cursor.execute("""SELECT 1 FROM commentchainchanges JOIN commentchains ON (commentchains.id=commentchainchanges.chain) WHERE commentchains.review=%s AND commentchains.type='note' AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.from_type=commentchains.type AND commentchainchanges.to_type='issue'""", (review_id, user.id)) if cursor.fetchone(): return True # Unreviewed lines. cursor.execute("""SELECT 1 FROM reviewfilechanges JOIN reviewfiles ON (reviewfiles.id=reviewfilechanges.file) WHERE reviewfiles.review=%s AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft' AND reviewfilechanges.from_state=reviewfiles.state AND reviewfilechanges.to_state='pending'""", (review_id, user.id)) if cursor.fetchone(): return True # Otherwise still accepted (if accepted before.) return False def stillOpen(): if unaccepted(): return True # Still open issues. cursor.execute("""SELECT 1 FROM commentchains LEFT OUTER JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND (commentchainchanges.to_state IN ('closed', 'addressed') OR commentchainchanges.to_type='note')) WHERE commentchains.review=%s AND commentchains.type='issue' AND commentchains.state='open' AND commentchainchanges.chain IS NULL""", (user.id, review_id)) if cursor.fetchone(): return True # Still pending lines. cursor.execute("""SELECT 1 FROM reviewfiles LEFT OUTER JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft' AND reviewfilechanges.to_state='reviewed') WHERE reviewfiles.review=%s AND reviewfiles.state='pending' AND reviewfilechanges.file IS NULL""", (user.id, review_id)) if cursor.fetchone(): return True # Otherwise accepted now. return False if dbutils.Review.isAccepted(db, review_id): return OperationResult(current_state="accepted", new_state="open" if unaccepted() else "accepted") else: return OperationResult(current_state="open", new_state="open" if stillOpen() else "accepted") class SubmitChanges(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "remark": Optional(str) }) def process(self, db, user, review_id, remark=None): cursor = db.cursor() profiler = profiling.Profiler() profiler.check("start") review = dbutils.Review.fromId(db, review_id) profiler.check("create review") was_accepted = review.state == "open" and review.accepted(db) profiler.check("accepted before") if remark and remark.strip(): chain_id = createCommentChain(db, user, review, 'note') createComment(db, user, chain_id, remark, first=True) else: chain_id = None # Create a batch that groups all submitted changes together. cursor.execute("INSERT INTO batches (review, uid, comment) VALUES (%s, %s, %s) RETURNING id", (review.id, user.id, chain_id)) batch_id = cursor.fetchone()[0] profiler.check("batches++") # Reject all draft file approvals where the affected review file isn't in # the state it was in when the change was drafted. cursor.execute("""UPDATE reviewfilechanges SET state='rejected', time=now() WHERE uid=%s AND state='draft' AND file IN (SELECT id FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfilechanges.uid=%s AND reviewfilechanges.state='draft' AND reviewfilechanges.from_state!=reviewfiles.state)""", (user.id, review.id, user.id)) profiler.check("reviewfilechanges reject state changes") # Then perform the remaining draft file approvals by updating the state of # the corresponding review file. cursor.execute("""UPDATE reviewfiles SET state='reviewed', reviewer=%s, time=now() WHERE review=%s AND id IN (SELECT file FROM reviewfilechanges WHERE uid=%s AND state='draft' AND to_state='reviewed')""", (user.id, review.id, user.id)) profiler.check("reviewfiles pending=>reviewed") # Then perform the remaining draft file disapprovals by updating the state # of the corresponding review file. cursor.execute("""UPDATE reviewfiles SET state='pending', reviewer=NULL, time=now() WHERE review=%s AND id IN (SELECT file FROM reviewfilechanges WHERE uid=%s AND state='draft' AND to_state='pending')""", (review.id, user.id)) profiler.check("reviewfiles reviewed=>pending") # Finally change the state of just performed approvals from draft to # 'performed'. cursor.execute("""UPDATE reviewfilechanges SET batch=%s, state='performed', time=now() WHERE uid=%s AND state='draft' AND file IN (SELECT id FROM reviewfiles WHERE reviewfiles.review=%s)""", (batch_id, user.id, review.id)) profiler.check("reviewfilechanges draft=>performed") # Find all chains with draft comments being submitted that the current user # isn't associated with via the commentchainusers table, and associate the # user with them. cursor.execute("""SELECT DISTINCT commentchains.id, commentchainusers.uid IS NULL FROM commentchains JOIN comments ON (comments.chain=commentchains.id) LEFT OUTER JOIN commentchainusers ON (commentchainusers.chain=commentchains.id AND commentchainusers.uid=comments.uid) WHERE commentchains.review=%s AND comments.uid=%s AND comments.state='draft'""", (review.id, user.id)) for chain_id, need_associate in cursor.fetchall(): if need_associate: cursor.execute("INSERT INTO commentchainusers (chain, uid) VALUES (%s, %s)", (chain_id, user.id)) profiler.check("commentchainusers++") # Find all chains with draft comments being submitted and add a record for # every user associated with the chain to read the comment. cursor.execute("""INSERT INTO commentstoread (uid, comment) SELECT commentchainusers.uid, comments.id FROM commentchains, commentchainusers, comments WHERE commentchains.review=%s AND commentchainusers.chain=commentchains.id AND commentchainusers.uid!=comments.uid AND comments.chain=commentchains.id AND comments.uid=%s AND comments.state='draft'""", (review.id, user.id)) profiler.check("commentstoread++") # Associate all users associated with a draft comment chain to # the review (if they weren't already.) cursor.execute("""SELECT DISTINCT commentchainusers.uid FROM commentchains JOIN commentchainusers ON (commentchainusers.chain=commentchains.id) LEFT OUTER JOIN reviewusers ON (reviewusers.review=commentchains.review AND reviewusers.uid=commentchainusers.uid) WHERE commentchains.review=%s AND commentchains.uid=%s AND commentchains.state='draft' AND reviewusers.uid IS NULL""", (review.id, user.id)) for (user_id,) in cursor.fetchall(): cursor.execute("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", (review.id, user_id)) # Change state on all draft commentchains by the user in the review to 'open'. cursor.execute("""UPDATE commentchains SET batch=%s, state='open', time=now() WHERE commentchains.review=%s AND commentchains.uid=%s AND commentchains.state='draft'""", (batch_id, review.id, user.id)) profiler.check("commentchains draft=>open") # Reject all draft comment chain changes where the affected comment # chain isn't in the state it was in when the change was drafted, or has # been morphed into a note since the change was drafted. cursor.execute("""UPDATE commentchainchanges SET state='rejected', time=now() WHERE uid=%s AND state='draft' AND from_state IS NOT NULL AND chain IN (SELECT id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id AND (commentchainchanges.from_state!=commentchains.state OR commentchainchanges.from_last_commit!=commentchains.last_commit OR commentchains.type!='issue')) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft')""", (user.id, review.id, user.id)) profiler.check("commentchainchanges reject state changes") # Reject all draft comment chain changes where the affected comment chain # type isn't what it was in when the change was drafted. cursor.execute("""UPDATE commentchainchanges SET state='rejected', time=now() WHERE uid=%s AND state='draft' AND from_type IS NOT NULL AND chain IN (SELECT id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id AND commentchainchanges.from_type!=commentchains.type) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft')""", (user.id, review.id, user.id)) profiler.check("commentchainchanges reject type changes") # Reject all draft comment chain changes where the affected comment chain # addressed_by isn't what it was in when the change was drafted. cursor.execute("""UPDATE commentchainchanges SET state='rejected', time=now() WHERE uid=%s AND state='draft' AND from_addressed_by IS NOT NULL AND chain IN (SELECT id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id AND commentchainchanges.from_addressed_by!=commentchains.addressed_by) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft')""", (user.id, review.id, user.id)) profiler.check("commentchainchanges reject addressed_by changes") # Then perform the remaining draft comment chain changes by updating the # state of the corresponding comment chain. # Perform open->closed changes, including setting 'closed_by'. cursor.execute("""UPDATE commentchains SET state='closed', closed_by=%s WHERE review=%s AND id IN (SELECT chain FROM commentchainchanges WHERE uid=%s AND state='draft' AND to_state='closed')""", (user.id, review.id, user.id)) profiler.check("commentchains closed") # Perform (closed|addressed)->open changes, including resetting 'closed_by' and # 'addressed_by' to NULL. cursor.execute("""SELECT commentchainchanges.to_last_commit, commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.to_state='open'""", (review.id, user.id)) cursor.executemany("""UPDATE commentchains SET state='open', last_commit=%s, closed_by=NULL, addressed_by=NULL WHERE id=%s""", cursor.fetchall()) profiler.check("commentchains reopen") # Perform addressed->addressed changes, i.e. updating 'addressed_by'. cursor.execute("""SELECT commentchainchanges.to_addressed_by, commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.to_addressed_by IS NOT NULL""", (review.id, user.id)) cursor.executemany("""UPDATE commentchains SET addressed_by=%s WHERE id=%s""", cursor.fetchall()) profiler.check("commentchains reopen (partial)") # Perform type changes. cursor.execute("""SELECT commentchainchanges.to_type, commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE commentchains.review=%s AND commentchainchanges.uid=%s AND commentchainchanges.state='draft' AND commentchainchanges.to_type IS NOT NULL""", (review.id, user.id)) cursor.executemany("""UPDATE commentchains SET type=%s WHERE id=%s""", cursor.fetchall()) profiler.check("commentchains type change") # Finally change the state of just performed changes from draft to # 'performed'. cursor.execute("""UPDATE commentchainchanges SET batch=%s, state='performed', time=now() WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s)""", (batch_id, user.id, review.id)) profiler.check("commentchainchanges draft=>performed") # Change state on all draft commentchainlines by the user in the review to 'current'. cursor.execute("""UPDATE commentchainlines SET state='current', time=now() WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s)""", (user.id, review.id)) profiler.check("commentchainlines draft=>current") # Change state on all draft comments by the user in the review to 'current'. cursor.execute("""UPDATE comments SET batch=%s, state='current', time=now() WHERE comments.uid=%s AND comments.state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s)""", (batch_id, user.id, review.id)) profiler.check("comments draft=>current") # Associate the submitting user with the review if he isn't already. cursor.execute("SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s", (review.id, user.id)) if not cursor.fetchone(): cursor.execute("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", (review.id, user.id)) generate_emails = profiler.start("generate emails") is_accepted = review.state == "open" and review.accepted(db) pending_mails = generateMailsForBatch(db, batch_id, was_accepted, is_accepted, profiler=profiler) generate_emails.stop() review.incrementSerial(db) db.commit() profiler.check("commit transaction") sendPendingMails(pending_mails) profiler.check("finished") if user.getPreference(db, "debug.profiling.submitChanges"): return OperationResult(batch_id=batch_id, serial=review.serial, profiling=profiler.output()) else: return OperationResult(batch_id=batch_id, serial=review.serial) class AbortChanges(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "what": { "approval": bool, "comments": bool, "metacomments": bool }}) def process(self, db, user, review_id, what): cursor = db.cursor() profiler = profiling.Profiler() if what["approval"]: # Delete all pending review file approvals. cursor.execute("""DELETE FROM reviewfilechanges WHERE uid=%s AND state='draft' AND file IN (SELECT id FROM reviewfiles WHERE review=%s)""", (user.id, review_id)) profiler.check("approval") if what["comments"]: # Delete all pending comments chains. This will, via ON DELETE CASCADE, # also delete all related comments and commentchainlines rows. cursor.execute("""DELETE FROM commentchains WHERE review=%s AND uid=%s AND state='draft'""", (review_id, user.id)) profiler.check("chains") # Delete all still existing draft comments. cursor.execute("""DELETE FROM comments WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s)""", (user.id, review_id)) profiler.check("replies") if what["metacomments"]: # Delete all still existing draft comment lines. cursor.execute("""DELETE FROM commentchainlines WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s AND state!='draft')""", (user.id, review_id)) profiler.check("comment lines") # Delete all still existing draft comment state changes. cursor.execute("""DELETE FROM commentchainchanges WHERE uid=%s AND state='draft' AND chain IN (SELECT id FROM commentchains WHERE review=%s)""", (user.id, review_id)) profiler.check("comment state") db.commit() if user.getPreference(db, "debug.profiling.abortChanges"): return OperationResult(profiling=profiler.output()) else: return OperationResult() ================================================ FILE: src/operation/editresource.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils from operation import Operation, OperationResult, OperationError, Optional class StoreResource(Operation): """Store new revision of user-edited resource.""" def __init__(self): Operation.__init__(self, { "name": str, "source": str }) def process(self, db, user, name, source): cursor = db.cursor() cursor.execute("SELECT MAX(revision) FROM userresources WHERE uid=%s AND name=%s", (user.id, name)) current_revision = cursor.fetchone()[0] or 0 next_revision = current_revision + 1 cursor.execute("INSERT INTO userresources (uid, name, revision, source) VALUES (%s, %s, %s, %s)", (user.id, name, next_revision, source)) db.commit() return OperationResult() class ResetResource(Operation): """Reset user-edited resource back to its original.""" def __init__(self): Operation.__init__(self, { "name": str }) def process(self, db, user, name): cursor = db.cursor() cursor.execute("SELECT MAX(revision) FROM userresources WHERE uid=%s AND name=%s", (user.id, name)) current_revision = cursor.fetchone()[0] or 0 if current_revision > 0: cursor.execute("SELECT source FROM userresources WHERE uid=%s AND name=%s AND revision=%s", (user.id, name, current_revision)) if cursor.fetchone()[0] is not None: next_revision = current_revision + 1 cursor.execute("INSERT INTO userresources (uid, name, revision) VALUES (%s, %s, %s)", (user.id, name, next_revision)) db.commit() return OperationResult() class RestoreResource(Operation): """Restore last user-edited revision of resource after it's been reset.""" def __init__(self): Operation.__init__(self, { "name": str }) def process(self, db, user, name): cursor = db.cursor() cursor.execute("SELECT MAX(revision) FROM userresources WHERE uid=%s AND name=%s", (user.id, name)) current_revision = cursor.fetchone()[0] or 0 if current_revision > 1: cursor.execute("SELECT source FROM userresources WHERE uid=%s AND name=%s AND revision=%s", (user.id, name, current_revision)) if cursor.fetchone()[0] is None: cursor.execute("DELETE FROM userresources WHERE uid=%s AND name=%s AND revision=%s", (user.id, name, current_revision)) db.commit() return OperationResult() ================================================ FILE: src/operation/extensioninstallation.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import reviewing.filters from operation import (Operation, OperationResult, OperationFailure, OperationError, Optional, typechecker) from extensions.installation import (installExtension, uninstallExtension, reinstallExtension, InstallationError, getExtension) from extensions.extension import Extension, ExtensionError from extensions.manifest import FilterHookRole class ExtensionOperation(Operation): def __init__(self, perform): Operation.__init__(self, { "extension_name": str, "author_name": Optional(str), "version": Optional(str), "universal": Optional(bool) }) self.perform = perform def process(self, db, user, extension_name, author_name=None, version=None, universal=False): if universal: if not user.hasRole(db, "administrator"): raise OperationFailure(code="notallowed", title="Not allowed!", message="Operation not permitted.") user = None if version is not None: if version == "live": version = None elif version.startswith("version/"): version = version[8:] else: raise OperationError( "invalid version, got '%s', expected 'live' or 'version/*'" % version) try: self.perform(db, user, author_name, extension_name, version) except InstallationError as error: raise OperationFailure(code="installationerror", title=error.title, message=error.message, is_html=error.is_html) return OperationResult() class InstallExtension(ExtensionOperation): def __init__(self): ExtensionOperation.__init__(self, installExtension) class UninstallExtension(ExtensionOperation): def __init__(self): ExtensionOperation.__init__(self, uninstallExtension) class ReinstallExtension(ExtensionOperation): def __init__(self): ExtensionOperation.__init__(self, reinstallExtension) class ClearExtensionStorage(Operation): def __init__(self): Operation.__init__(self, { "extension_name": str, "author_name": Optional(str) }) def process(self, db, user, extension_name, author_name=None): extension = getExtension(author_name, extension_name) extension_id = extension.getExtensionID(db, create=False) if extension_id is not None: cursor = db.cursor() cursor.execute("""DELETE FROM extensionstorage WHERE extension=%s AND uid=%s""", (extension_id, user.id)) db.commit() return OperationResult() class AddExtensionHookFilter(Operation): def __init__(self): Operation.__init__(self, { "subject": typechecker.User, "extension": typechecker.Extension, "repository": typechecker.Repository, "filterhook_name": str, "path": str, "data": Optional(str), "replaced_filter_id": Optional(int) }) def process(self, db, user, subject, extension, repository, filterhook_name, path, data=None, replaced_filter_id=None): if user != subject: Operation.requireRole(db, "administrator", user) path = reviewing.filters.sanitizePath(path) if "*" in path: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: raise OperationFailure( code="invalidpattern", title="Invalid path pattern", message="There are invalid wild-cards in the path: %s" % error.message) installed_sha1, _ = extension.getInstalledVersion(db, subject) if installed_sha1 is False: raise OperationFailure( code="invalidrequest", title="Invalid request", message=("The extension \"%s\" must be installed first!" % extension.getTitle(db))) manifest = extension.getManifest(sha1=installed_sha1) for role in manifest.roles: if isinstance(role, FilterHookRole) and role.name == filterhook_name: break else: raise OperationFailure( code="invalidrequest", title="Invalid request", message=("The extension doesn't have a filter hook role named %r!" % filterhook_name)) cursor = db.cursor() if replaced_filter_id is not None: cursor.execute("""SELECT 1 FROM extensionhookfilters WHERE id=%s AND uid=%s""", (replaced_filter_id, subject.id)) if not cursor.fetchone(): raise OperationFailure( code="invalidoperation", title="Invalid operation", message="Filter to replace does not exist or belongs to another user!") cursor.execute("""DELETE FROM extensionhookfilters WHERE id=%s""", (replaced_filter_id,)) cursor.execute("""INSERT INTO extensionhookfilters (uid, extension, repository, name, path, data) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id""", (subject.id, extension.getExtensionID(db), repository.id, filterhook_name, path, data)) filter_id, = cursor.fetchone() db.commit() return OperationResult(filter_id=filter_id) class DeleteExtensionHookFilter(Operation): def __init__(self): Operation.__init__(self, { "subject": typechecker.User, "filter_id": int }) def process(self, db, user, subject, filter_id): if user != subject: Operation.requireRole(db, "administrator", user) cursor = db.cursor() cursor.execute("""SELECT 1 FROM extensionhookfilters WHERE id=%s AND uid=%s""", (filter_id, subject.id)) if not cursor.fetchone(): raise OperationFailure( code="invalidoperation", title="Invalid operation", message="Filter to delete does not exist or belongs to another user!") cursor.execute("""DELETE FROM extensionhookfilters WHERE id=%s""", (filter_id,)) db.commit() return OperationResult() ================================================ FILE: src/operation/fetchlines.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import gitutils import htmlutils import diff from operation import Operation, OperationResult class FetchLines(Operation): def __init__(self): Operation.__init__(self, { "repository_id": int, "path": str, "sha1": str, "ranges": [{ "offset": int, "count": int, "context": bool }], "tabify": bool }, accept_anonymous_user=True) def process(self, db, user, repository_id, path, sha1, ranges, tabify): repository = gitutils.Repository.fromId(db, repository_id) cursor = db.cursor() def getContext(offset): cursor.execute("""SELECT context FROM codecontexts WHERE sha1=%s AND %s BETWEEN first_line AND last_line ORDER BY first_line DESC LIMIT 1""", (sha1, offset)) row = cursor.fetchone() if row: return row[0] else: return None file = diff.File(repository=repository, path=path, new_sha1=sha1) file.loadNewLines(highlighted=True, request_highlight=True) if tabify: tabwidth = file.getTabWidth() indenttabsmode = file.getIndentTabsMode() def processRange(offset, count, context): if context: context = getContext(offset) else: context = None # Offset is a 1-based line number. start = offset - 1 # If count is -1, fetch all lines. end = start + count if count > -1 else None lines = file.newLines(highlighted=True)[start:end] if tabify: lines = [htmlutils.tabify(line, tabwidth, indenttabsmode) for line in lines] return { "lines": lines, "context": context } return OperationResult(ranges=[processRange(**line_range) for line_range in ranges]) ================================================ FILE: src/operation/manipulateassignments.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import dbutils import mailutils import reviewing.utils from operation import Operation, OperationResult class GetAssignedChanges(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "user_name": str }) def process(self, db, user, review_id, user_name): reviewer = dbutils.User.fromName(db, user_name) cursor = db.cursor() cursor.execute("SELECT file FROM fullreviewuserfiles WHERE review=%s AND assignee=%s", (review_id, reviewer.id)) return OperationResult(files=[file_id for (file_id,) in cursor]) class SetAssignedChanges(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "user_name": str, "files": [int] }) def process(self, db, user, review_id, user_name, files): reviewer = dbutils.User.fromName(db, user_name) new_file_ids = set(files) cursor = db.cursor() cursor.execute("SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s", (review_id, reviewer.id)) if not cursor.fetchone(): cursor.execute("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", (review_id, reviewer.id)) current_file_ids = set() else: cursor.execute("SELECT file FROM fullreviewuserfiles WHERE review=%s AND assignee=%s", (review_id, reviewer.id)) current_file_ids = set(file_id for (file_id,) in cursor) delete_file_ids = current_file_ids - new_file_ids new_file_ids -= current_file_ids if delete_file_ids or new_file_ids: cursor.execute("INSERT INTO reviewassignmentstransactions (review, assigner) VALUES (%s, %s) RETURNING id", (review_id, user.id)) transaction_id = cursor.fetchone()[0] if delete_file_ids: cursor.executemany("""INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) SELECT %s, reviewfiles.id, reviewuserfiles.uid, false FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.file=%s AND reviewuserfiles.uid=%s""", itertools.izip(itertools.repeat(transaction_id), itertools.repeat(review_id), delete_file_ids, itertools.repeat(reviewer.id))) cursor.executemany("""DELETE FROM reviewuserfiles WHERE file IN (SELECT id FROM reviewfiles WHERE review=%s AND file=%s) AND uid=%s""", itertools.izip(itertools.repeat(review_id), delete_file_ids, itertools.repeat(reviewer.id))) if new_file_ids: cursor.executemany("""INSERT INTO reviewuserfiles (file, uid) SELECT reviewfiles.id, %s FROM reviewfiles WHERE reviewfiles.review=%s AND reviewfiles.file=%s""", itertools.izip(itertools.repeat(reviewer.id), itertools.repeat(review_id), new_file_ids)) cursor.executemany("""INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) SELECT %s, reviewfiles.id, %s, true FROM reviewfiles WHERE reviewfiles.review=%s AND reviewfiles.file=%s""", itertools.izip(itertools.repeat(transaction_id), itertools.repeat(reviewer.id), itertools.repeat(review_id), new_file_ids)) if delete_file_ids or new_file_ids: cursor.execute("UPDATE reviews SET serial=serial+1 WHERE id=%s", (review_id,)) pending_mails = reviewing.utils.generateMailsForAssignmentsTransaction(db, transaction_id) db.commit() mailutils.sendPendingMails(pending_mails) return OperationResult() ================================================ FILE: src/operation/manipulatecomment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils from operation import Operation, OperationResult, OperationError, OperationFailure, Optional from reviewing.comment import Comment, CommentChain, propagate class SetCommentChainState(Operation): def __init__(self, parameters): Operation.__init__(self, parameters) def setChainState(self, db, user, chain, old_state, new_state, new_last_commit=None): review = chain.review if chain.state != old_state: raise OperationFailure(code="invalidoperation", title="Invalid operation", message="The comment chain's state is not '%s'; can't change state to '%s'." % (old_state, new_state)) elif new_state == "open" and review.state != "open": raise OperationFailure(code="invalidoperation", title="Invalid operation", message="Can't reopen comment chain in %s review!" % review.state) if chain.last_commit: old_last_commit = chain.last_commit.id if new_last_commit is None: new_last_commit = old_last_commit else: old_last_commit = new_last_commit = None cursor = db.cursor() if chain.state_is_draft: # The user is reverting a draft state change; just undo the draft # change. cursor.execute("""DELETE FROM commentchainchanges WHERE chain=%s AND uid=%s AND to_state IS NOT NULL""", (chain.id, user.id)) else: # Otherwise insert a new row into the commentchainchanges table. cursor.execute("""INSERT INTO commentchainchanges (uid, chain, from_state, to_state, from_last_commit, to_last_commit) VALUES (%s, %s, %s, %s, %s, %s)""", (user.id, chain.id, old_state, new_state, old_last_commit, new_last_commit)) db.commit() return OperationResult(old_state=old_state, new_state=new_state, draft_status=review.getDraftStatus(db, user)) class ReopenResolvedCommentChain(SetCommentChainState): def __init__(self): SetCommentChainState.__init__(self, { "chain_id": int }) def process(self, db, user, chain_id): return self.setChainState(db, user, CommentChain.fromId(db, chain_id, user), "closed", "open") class ReopenAddressedCommentChain(SetCommentChainState): def __init__(self): SetCommentChainState.__init__(self, { "chain_id": int, "commit_id": int, "sha1": str, "offset": int, "count": int }) def process(self, db, user, chain_id, commit_id, sha1, offset, count): chain = CommentChain.fromId(db, chain_id, user) existing = chain.lines_by_sha1.get(sha1) if chain.state != "addressed": raise OperationFailure(code="invalidoperation", title="Invalid operation", message="The comment chain is not marked as addressed!") if not existing: assert commit_id == chain.addressed_by.getId(db) commits = chain.review.getCommitSet(db).without(chain.addressed_by.parents) propagation = propagate.Propagation(db) propagation.setExisting(chain.review, chain.id, chain.addressed_by, chain.file_id, offset, offset + count - 1, True) propagation.calculateAdditionalLines(commits, chain.review.branch.getHead(db)) commentchainlines_values = [] for file_sha1, (first_line, last_line) in propagation.new_lines.items(): commentchainlines_values.append((chain.id, user.id, file_sha1, first_line, last_line)) cursor = db.cursor() cursor.executemany("""INSERT INTO commentchainlines (chain, uid, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s)""", commentchainlines_values) if not propagation.active: old_addressed_by_id = chain.addressed_by.getId(db) new_addressed_by_id = propagation.addressed_by[0].child.getId(db) if chain.addressed_by_is_draft: cursor.execute("""UPDATE commentchainchanges SET to_addressed_by=%s WHERE chain=%s AND uid=%s AND state='draft' AND to_addressed_by=%s""", (new_addressed_by_id, chain.id, user.id, old_addressed_by_id)) else: cursor.execute("""INSERT INTO commentchainchanges (uid, chain, from_addressed_by, to_addressed_by) VALUES (%s, %s, %s, %s)""", (user.id, chain.id, old_addressed_by_id, new_addressed_by_id)) old_last_commit_id = chain.last_commit.getId(db) new_last_commit_id = chain.addressed_by.getId(db) if chain.last_commit_is_draft: cursor.execute("""UPDATE commentchainchanges SET to_last_commit=%s WHERE chain=%s AND uid=%s AND state='draft' AND to_last_commit=%s""", (new_last_commit_id, chain.id, user.id, old_last_commit_id)) else: cursor.execute("""INSERT INTO commentchainchanges (uid, chain, from_last_commit, to_last_commit) VALUES (%s, %s, %s, %s)""", (user.id, chain.id, old_last_commit_id, new_last_commit_id)) db.commit() return OperationResult(old_state='addressed', new_state='addressed', draft_status=chain.review.getDraftStatus(db, user)) elif offset != existing[0] or count != existing[1]: raise OperationFailure(code="invalidoperation", title="Invalid operation", message="The comment chain is already present at other lines in same file version") return self.setChainState(db, user, chain, "addressed", "open", new_last_commit=commit_id) class ResolveCommentChain(SetCommentChainState): def __init__(self): Operation.__init__(self, { "chain_id": int }) def process(self, db, user, chain_id): return self.setChainState(db, user, CommentChain.fromId(db, chain_id, user), "open", "closed") class MorphCommentChain(Operation): def __init__(self): Operation.__init__(self, { "chain_id": int, "new_type": set(["issue", "note"]) }) def process(self, db, user, chain_id, new_type): chain = CommentChain.fromId(db, chain_id, user) review = chain.review if chain.type == new_type: raise OperationError("the comment chain's type is already '%s'" % new_type) elif new_type == "note" and chain.state in ("closed", "addressed"): raise OperationError("can't convert resolved or addressed issue to a note") cursor = db.cursor() if chain.state == "draft": # The chain is still a draft; just change its type directly. cursor.execute("""UPDATE commentchains SET type=%s WHERE id=%s""", (new_type, chain.id)) elif chain.type_is_draft: # The user is reverting a draft chain type change; just undo the # draft change. cursor.execute("""DELETE FROM commentchainchanges WHERE chain=%s AND uid=%s AND to_type IS NOT NULL""", (chain.id, user.id)) else: # Otherwise insert a new row into the commentchainchanges table. cursor.execute("""INSERT INTO commentchainchanges (uid, chain, from_type, to_type) VALUES (%s, %s, %s, %s)""", (user.id, chain.id, chain.type, new_type)) db.commit() return OperationResult(draft_status=review.getDraftStatus(db, user)) class UpdateComment(Operation): def __init__(self): Operation.__init__(self, { "comment_id": int, "new_text": str }) def process(self, db, user, comment_id, new_text): comment = Comment.fromId(db, comment_id, user) if user != comment.user: raise OperationError("can't edit comment written by another user") if comment.state != "draft": raise OperationError("can't edit comment that has been submitted") if not new_text.strip(): raise OperationError("empty comment") cursor = db.cursor() cursor.execute("""UPDATE comments SET comment=%s, time=now() WHERE id=%s""", (new_text, comment.id)) db.commit() return OperationResult(draft_status=comment.chain.review.getDraftStatus(db, user)) class DeleteComment(Operation): def __init__(self): Operation.__init__(self, { "comment_id": int }) def process(self, db, user, comment_id): comment = Comment.fromId(db, comment_id, user) if user != comment.user: raise OperationError("can't delete comment written by another user") if comment.state != "draft": raise OperationError("can't delete comment that has been submitted") cursor = db.cursor() cursor.execute("""UPDATE comments SET state='deleted' WHERE id=%s""", (comment.id,)) if comment.chain.state == "draft": # If the comment chain was a draft, then delete it as well. cursor.execute("""UPDATE commentchains SET state='empty' WHERE id=%s""", (comment.chain.id,)) db.commit() return OperationResult(draft_status=comment.chain.review.getDraftStatus(db, user)) class MarkChainsAsRead(Operation): def __init__(self): Operation.__init__(self, { "chain_ids": Optional([int]), "review_ids": Optional([int]) }) def process(self, db, user, chain_ids=None, review_ids=None): cursor = db.cursor() if chain_ids: cursor.execute("""DELETE FROM commentstoread WHERE uid=%s AND comment IN (SELECT id FROM comments WHERE chain=ANY (%s))""", (user.id, chain_ids)) if review_ids: cursor.execute("""DELETE FROM commentstoread WHERE uid=%s AND comment IN (SELECT comments.id FROM comments JOIN commentchains ON (commentchains.id=comments.chain) WHERE commentchains.review=ANY (%s))""", (user.id, review_ids)) db.commit() return OperationResult() ================================================ FILE: src/operation/manipulatefilters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import mailutils import htmlutils import reviewing.utils import reviewing.filters from operation import Operation, OperationResult, OperationError, \ OperationFailure, OperationFailureMustLogin, Optional class AddFilter(Operation): def __init__(self): Operation.__init__(self, { "filter_type": set(["reviewer", "watcher", "ignored"]), "path": str, "delegates": [str], "repository_id": Optional(int), "repository_name": Optional(str), "replaced_filter_id": Optional(int) }) def process(self, db, user, filter_type, path, delegates, repository_id=None, repository_name=None, replaced_filter_id=None): path = reviewing.filters.sanitizePath(path) if "*" in path: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: raise OperationFailure(code="invalidpattern", title="Invalid path pattern", message="There are invalid wild-cards in the path: %s" % error.message) if filter_type == "reviewer": delegates = filter(None, delegates) invalid_delegates = [] for delegate in delegates: try: dbutils.User.fromName(db, delegate) except dbutils.NoSuchUser: invalid_delegates.append(delegate) if invalid_delegates: raise OperationFailure(code="invaliduser", title="Invalid delegate(s)", message="These user-names are not valid: %s" % ", ".join(invalid_delegates)) else: delegates = [] cursor = db.cursor() if repository_id is None: cursor.execute("""SELECT id FROM repositories WHERE name=%s""", (repository_name,)) repository_id = cursor.fetchone()[0] if replaced_filter_id is not None: cursor.execute("""SELECT 1 FROM filters WHERE id=%s AND uid=%s""", (replaced_filter_id, user.id)) if not cursor.fetchone(): raise OperationFailure(code="invalidoperation", title="Invalid operation", message="Filter to replace does not exist or belongs to another user!") cursor.execute("""DELETE FROM filters WHERE id=%s""", (replaced_filter_id,)) cursor.execute("""SELECT 1 FROM filters WHERE uid=%s AND repository=%s AND path=%s""", (user.id, repository_id, path)) if cursor.fetchone(): raise OperationFailure(code="duplicatefilter", title="Duplicate filter", message=("You already have a filter for the path <code>%s</code> in this repository." % htmlutils.htmlify(path)), is_html=True) cursor.execute("""INSERT INTO filters (uid, repository, path, type, delegate) VALUES (%s, %s, %s, %s, %s) RETURNING id""", (user.id, repository_id, path, filter_type, ",".join(delegates))) filter_id = cursor.fetchone()[0] db.commit() return OperationResult(filter_id=filter_id) class DeleteFilter(Operation): def __init__(self): Operation.__init__(self, { "filter_id": int }) def process(self, db, user, filter_id): cursor = db.cursor() cursor.execute("""SELECT uid FROM filters WHERE id=%s""", (filter_id,)) row = cursor.fetchone() if row: if user.id != row[0]: Operation.requireAdministratorRole(db, user) cursor.execute("""DELETE FROM filters WHERE id=%s""", (filter_id,)) db.commit() return OperationResult() class ReapplyFilters(Operation): def __init__(self): Operation.__init__(self, { "repository_id": Optional(int), "filter_id": Optional(int) }) def process(self, db, user, repository_id=None, filter_id=None): if user.isAnonymous(): return OperationFailureMustLogin() cursor = db.cursor() if filter_id is not None: cursor.execute("""SELECT repository, path, type, delegate FROM filters WHERE id=%s""", (filter_id,)) repository_id, filter_path, filter_type, filter_delegate = cursor.fetchone() if repository_id is None: cursor.execute("""SELECT reviews.id, applyfilters, applyparentfilters, branches.repository FROM reviews JOIN branches ON (reviews.branch=branches.id) WHERE reviews.state!='closed'""") else: cursor.execute("""SELECT reviews.id, applyfilters, applyparentfilters, branches.repository FROM reviews JOIN branches ON (reviews.branch=branches.id) WHERE reviews.state!='closed' AND branches.repository=%s""", (repository_id,)) repositories = {} # list(review_file_id) assign_changes = [] # set(review_id) assigned_reviews = set() # set(review_id) watched_reviews = set() for review_id, applyfilters, applyparentfilters, repository_id in cursor.fetchall(): if repository_id in repositories: repository = repositories[repository_id] else: repository = gitutils.Repository.fromId(db, repository_id) repositories[repository_id] = repository review = reviewing.filters.Filters.Review(review_id, applyfilters, applyparentfilters, repository) filters = reviewing.filters.Filters() filters.setFiles(db, review=review) if filter_id is not None: filters.addFilter(user.id, filter_path, filter_type, filter_delegate, filter_id) else: filters.load(db, review=review, user=user) cursor.execute("""SELECT commits.id, reviewfiles.file, reviewfiles.id FROM commits JOIN gitusers ON (gitusers.id=commits.author_gituser) LEFT OUTER JOIN usergitemails ON (usergitemails.email=gitusers.email AND usergitemails.uid=%s) JOIN changesets ON (changesets.child=commits.id) JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s AND usergitemails.uid IS NULL AND reviewuserfiles.uid IS NULL""", (user.id, user.id, review_id)) for commit_id, file_id, review_file_id in cursor.fetchall(): association = filters.getUserFileAssociation(user.id, file_id) if association == 'reviewer': assign_changes.append(review_file_id) assigned_reviews.add(review_id) elif association == 'watcher': watched_reviews.add(review_id) cursor.execute("""SELECT reviews.id FROM reviews LEFT OUTER JOIN reviewusers ON (reviewusers.review=reviews.id AND reviewusers.uid=%s) WHERE reviews.id=ANY (%s) AND reviewusers.uid IS NULL""", (user.id, list(assigned_reviews) + list(watched_reviews))) new_reviews = set(review_id for (review_id,) in cursor) cursor.executemany("""INSERT INTO reviewusers (review, uid) VALUES (%s, %s)""", [(review_id, user.id) for review_id in new_reviews]) cursor.executemany("""INSERT INTO reviewuserfiles (file, uid) VALUES (%s, %s)""", [(review_file_id, user.id) for review_file_id in assign_changes]) db.commit() watched_reviews &= new_reviews watched_reviews -= assigned_reviews cursor.execute("""SELECT id, summary FROM reviews WHERE id=ANY (%s)""", (list(assigned_reviews | watched_reviews),)) return OperationResult(assigned_reviews=sorted(assigned_reviews), watched_reviews=sorted(watched_reviews), summaries=dict(cursor)) class CountMatchedPaths(Operation): def __init__(self): Operation.__init__(self, { "single": Optional({ "repository_name": str, "path": str }), "multiple": Optional([int]), "user_id": Optional(int) }) def process(self, db, user, single=None, multiple=None, user_id=None): if user_id is None: user_id = user.id try: if single: repository = gitutils.Repository.fromName(db, single["repository_name"]) path = reviewing.filters.sanitizePath(single["path"]) cursor = db.cursor() cursor.execute("""SELECT path FROM filters WHERE repository=%s AND uid=%s""", (repository.id, user_id,)) paths = set(filter_path for (filter_path,) in cursor) paths.add(path) return OperationResult(count=reviewing.filters.countMatchedFiles(repository, list(paths))[path]) cursor = db.cursor() cursor.execute("""SELECT repository, id, path FROM filters WHERE id=ANY (%s) ORDER BY repository""", (multiple,)) per_repository = {} result = [] for repository_id, filter_id, filter_path in cursor: per_repository.setdefault(repository_id, []).append((filter_id, filter_path)) for repository_id, filters in per_repository.items(): repository = gitutils.Repository.fromId(db, repository_id) counts = reviewing.filters.countMatchedFiles( repository, [filter_path for (filter_id, filter_path) in filters]) for filter_id, filter_path in filters: result.append({ "id": filter_id, "count": counts[filter_path] }) return OperationResult(filters=result) except reviewing.filters.PatternError as error: return OperationFailure(code="invalidpattern", title="Invalid pattern!", message=str(error)) class GetMatchedPaths(Operation): def __init__(self): Operation.__init__(self, { "repository_name": str, "path": str, "user_id": Optional(int) }) def process(self, db, user, repository_name, path, user_id=None): if user_id is None: user_id = user.id repository = gitutils.Repository.fromName(db, repository_name) path = reviewing.filters.sanitizePath(path) cursor = db.cursor() cursor.execute("""SELECT path FROM filters WHERE repository=%s AND uid=%s""", (repository.id, user_id,)) paths = set(filter_path for (filter_path,) in cursor) paths.add(path) return OperationResult(paths=reviewing.filters.getMatchedFiles(repository, list(paths))[path]) class AddReviewFilters(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "filters": [{ "type": set(["reviewer", "watcher"]), "user_names": Optional([str]), "user_ids": Optional([int]), "paths": Optional([str]), "file_ids": Optional([int]) }] }) def process(self, db, creator, review_id, filters): review = dbutils.Review.fromId(db, review_id) by_user = {} for filter in filters: if "user_ids" in filter: user_ids = set(filter["user_ids"]) else: user_ids = set([]) if "user_names" in filter: for user_name in filter["user_names"]: user_ids.add(dbutils.User.fromName(db, user_name).id) if "paths" in filter: paths = set(reviewing.filters.sanitizePath(path) for path in filter["paths"]) for path in paths: try: reviewing.filters.validatePattern(path) except reviewing.filters.PatternError as error: raise OperationFailure( code="invalidpattern", title="Invalid path pattern", message="There are invalid wild-cards in the path: %s" % error.message) else: paths = set() if "file_ids" in filter: for file_id in filter["file_ids"]: paths.add(dbutils.describe_file(file_id)) for user_id in user_ids: reviewer_paths, watcher_paths = by_user.setdefault(user_id, (set(), set())) if filter["type"] == "reviewer": reviewer_paths |= paths else: watcher_paths |= paths pending_mails = [] for user_id, (reviewer_paths, watcher_paths) in by_user.items(): try: user = dbutils.User.fromId(db, user_id) except dbutils.InvalidUserId: raise OperationFailure( code="invaliduserid", title="Invalid user ID", message="At least one of the specified user IDs was invalid.") pending_mails.extend(reviewing.utils.addReviewFilters( db, creator, user, review, reviewer_paths, watcher_paths)) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() mailutils.sendPendingMails(pending_mails) return OperationResult() class RemoveReviewFilter(Operation): def __init__(self): Operation.__init__(self, { "filter_id": int }) def process(self, db, user, filter_id): cursor = db.cursor() cursor.execute("SELECT review FROM reviewfilters WHERE id=%s", (filter_id,)) review_id = cursor.fetchone() if not review_id: raise OperationFailure( code="nosuchfilter", title="No such filter!", message=("Maybe the filter has been deleted since you " "loaded this page?")) cursor.execute("DELETE FROM reviewfilters WHERE id=%s", (filter_id,)) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() return OperationResult() ================================================ FILE: src/operation/manipulatereview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import reviewing.mail as review_mail import mailutils from operation import (Operation, OperationResult, OperationError, Optional, OperationFailure, Review, User) class CloseReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) if review.state != "open": raise OperationError("review not open; can't close") if not review.accepted(db): raise OperationError("review is not accepted; can't close") review.close(db, user) review.disableTracking(db) db.commit() return OperationResult() class DropReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) if review.state != "open": raise OperationError("review not open; can't drop") review.drop(db, user) review.disableTracking(db) db.commit() return OperationResult() class ReopenReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) if review.state == "open": raise OperationError("review already open; can't reopen") review.reopen(db, user) db.commit() return OperationResult() class PingReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "note": str }) def process(self, db, user, review_id, note): review = dbutils.Review.fromId(db, review_id) cursor = db.cursor() cursor.execute("""SELECT DISTINCT uid FROM reviewuserfiles JOIN reviewfiles ON (reviewfiles.id=reviewuserfiles.file) JOIN users ON (users.id=reviewuserfiles.uid) WHERE reviewfiles.review=%s AND reviewfiles.state='pending' AND users.status!='retired'""", (review.id,)) user_ids = set([user_id for (user_id,) in cursor.fetchall()]) # Add the pinging user and the owners (they are usually the same.) user_ids.add(user.id) for owner in review.owners: user_ids.add(owner.id) recipients = [dbutils.User.fromId(db, user_id) for user_id in user_ids] pending_mails = [] for recipient in recipients: pending_mails.extend(review_mail.sendPing(db, user, recipient, recipients, review, note)) mailutils.sendPendingMails(pending_mails) return OperationResult() class UpdateReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "new_summary": Optional(str), "new_description": Optional(str), "new_owners": Optional([str]) }) def process(self, db, user, review_id, new_summary=None, new_description=None, new_owners=None): review = dbutils.Review.fromId(db, review_id) if new_summary is not None: if not new_summary: raise OperationError("invalid new summary") review.setSummary(db, new_summary) if new_description is not None: review.setDescription(db, new_description if new_description else None) if new_owners is not None: remove_owners = set(review.owners) for user_name in new_owners: owner = dbutils.User.fromName(db, user_name) if owner in remove_owners: remove_owners.remove(owner) else: review.addOwner(db, owner) for owner in remove_owners: review.removeOwner(db, owner) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() return OperationResult() class WatchReview(Operation): def __init__(self): super(WatchReview, self).__init__({ "review": Review, "subject": User }) def process(self, db, user, review, subject): if user != subject: Operation.requireRole(db, "administrator", user) cursor = db.readonly_cursor() cursor.execute("""SELECT 1 FROM reviewusers WHERE review=%s AND uid=%s""", (review.id, subject.id)) if cursor.fetchone(): # Already a watcher (or reviewer/owner). return OperationResult() cursor.execute("""SELECT uid, include FROM reviewrecipientfilters WHERE review=%s AND (uid=%s OR uid IS NULL)""", (review.id, subject.id)) default_include = True user_include = None for user_id, include in cursor: if user_id is None: default_include = include else: user_include = include with db.updating_cursor( "reviewusers", "reviewrecipientfilters") as cursor: cursor.execute("""INSERT INTO reviewusers (review, uid, type) VALUES (%s, %s, 'manual')""", (review.id, subject.id)) if not default_include and user_include is None: cursor.execute( """INSERT INTO reviewrecipientfilters (review, uid, include) VALUES (%s, %s, TRUE)""", (review.id, subject.id)) return OperationResult() class UnwatchReview(Operation): def __init__(self): super(UnwatchReview, self).__init__({ "review": Review, "subject": User }) def process(self, db, user, review, subject): if user != subject: Operation.requireRole(db, "administrator", user) cursor = db.readonly_cursor() cursor.execute("""SELECT owner FROM reviewusers WHERE review=%s AND uid=%s""", (review.id, subject.id)) row = cursor.fetchone() if not row: # Already not associated. return OperationResult() is_owner, = row if is_owner: raise OperationFailure( code="isowner", title="Is owner", message="Cannot unwatch review since user owns the review.") cursor.execute("""SELECT 1 FROM fullreviewuserfiles WHERE review=%s AND assignee=%s""", (review.id, subject.id)) if cursor.fetchone(): raise OperationFailure( code="isreviewer", title="Is reviewer", message=("Cannot unwatch review since user is assigned to " "review changes.")) with db.updating_cursor("reviewusers") as cursor: cursor.execute("""DELETE FROM reviewusers WHERE review=%s AND uid=%s""", (review.id, subject.id)) return OperationResult() ================================================ FILE: src/operation/manipulateuser.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import base64 import dbutils import gitutils import auth import mailutils import textutils import configuration from operation import (Operation, OperationResult, OperationError, OperationFailure, Optional, User) class SetFullname(Operation): def __init__(self): Operation.__init__(self, { "user_id": int, "value": str }) def process(self, db, user, user_id, value): if user.id != user_id: Operation.requireRole(db, "administrator", user) if not value.strip(): raise OperationError("empty display name is not allowed") db.cursor().execute("UPDATE users SET fullname=%s WHERE id=%s", (value.strip(), user_id)) db.commit() return OperationResult() class SetGitEmails(Operation): def __init__(self): Operation.__init__(self, { "subject": User, "value": [str] }) def process(self, db, user, subject, value): if user != subject: Operation.requireRole(db, "administrator", user) for address in value: if not address.strip(): raise OperationError("empty email address is not allowed") if address.count("@") != 1: raise OperationError("invalid email address") cursor = db.cursor() cursor.execute("SELECT email FROM usergitemails WHERE uid=%s", (subject.id,)) current_addresses = set(address for (address,) in cursor) new_addresses = set(address.strip() for address in value) for address in (current_addresses - new_addresses): cursor.execute("DELETE FROM usergitemails WHERE uid=%s AND email=%s", (subject.id, address)) for address in (new_addresses - current_addresses): cursor.execute("INSERT INTO usergitemails (uid, email) VALUES (%s, %s)", (subject.id, address)) db.commit() return OperationResult() class ChangePassword(Operation): def __init__(self): Operation.__init__(self, { "subject": Optional(User), "current_pw": Optional(str), "new_pw": str }) def process(self, db, user, new_pw, subject=None, current_pw=None): if subject is None: subject = user cursor = db.cursor() if not auth.DATABASE.supportsPasswordChange(): raise OperationFailure( code="notsupported", title="Not supported!", message="Changing password is not supported via this system.") if not new_pw: raise OperationFailure( code="emptypassword", title="Empty password!", message="Setting an empty password is not allowed.") if user != subject: Operation.requireRole(db, "administrator", user) if current_pw is None: # Signal that it doesn't need to be checked. current_pw = True try: auth.DATABASE.changePassword(db, subject, current_pw, new_pw) except auth.WrongPassword: raise OperationFailure( code="wrongpassword", title="Wrong password!", message="The provided current password is not correct.") return OperationResult() def sanitize(self, value): sanitized = value.copy() if "current_pw" in value: sanitized["current_pw"] = "****" sanitized["new_pw"] = "****" return sanitized def checkEmailAddressSyntax(address): return bool(re.match(r"[^@]+@[^.]+(?:\.[^.]+)*$", address)) def sendVerificationMail(db, user, email_id=None): cursor = db.cursor() if email_id is None: cursor.execute("""SELECT email FROM users WHERE id=%s""", (user.id,)) email_id, = cursor.fetchone() cursor.execute("""SELECT email, verification_token FROM useremails WHERE id=%s""", (email_id,)) email, verification_token = cursor.fetchone() if verification_token is None: verification_token = auth.getToken(encode=base64.b16encode) with db.updating_cursor("useremails") as cursor: cursor.execute("""UPDATE useremails SET verification_token=%s WHERE id=%s""", (verification_token, email_id)) if configuration.base.ACCESS_SCHEME == "http": protocol = "http" else: protocol = "https" administrators = dbutils.getAdministratorContacts(db, indent=2) if administrators: administrators = ":\n\n%s" % administrators else: administrators = "." recipients = [mailutils.User(user.name, email, user.fullname)] subject = "[Critic] Please verify your email: %s" % email body = textutils.reflow(""" This is a message from the Critic code review system at %(hostname)s. The user '%(username)s' on this system has added this email address to his/her account. If this is you, please confirm this by following this link: %(url_prefix)s/verifyemail?email=%(email)s&token=%(verification_token)s If this is not you, you can safely ignore this email. If you wish to report abuse, please contact the Critic system's administrators%(administrators)s """ % { "hostname": configuration.base.HOSTNAME, "username": user.name, "email": email, "url_prefix": "%s://%s" % (protocol, configuration.base.HOSTNAME), "verification_token": verification_token, "administrators": administrators }) mailutils.sendMessage(recipients, subject, body) class RequestVerificationEmail(Operation): def __init__(self): Operation.__init__(self, { "email_id": int }) def process(self, db, user, email_id): cursor = db.cursor() cursor.execute("""SELECT uid, email, verified FROM useremails WHERE id=%s""", (email_id,)) row = cursor.fetchone() if not row: raise OperationFailure( code="invalidemailid", title="No such email address", message="The address might have been deleted already.") user_id, email, verified = row if verified is True: raise OperationFailure( code="alreadyverified", title="Address already verified", message="This address has already been verified.") if user != user_id: Operation.requireRole(db, "administrator", user) user = dbutils.User.fromId(db, user_id) sendVerificationMail(db, user, email_id) db.commit() return OperationResult() class DeleteEmailAddress(Operation): def __init__(self): Operation.__init__(self, { "email_id": int }) def process(self, db, user, email_id): cursor = db.cursor() cursor.execute("""SELECT uid FROM useremails WHERE id=%s""", (email_id,)) row = cursor.fetchone() if not row: raise OperationFailure( code="invalidemailid", title="No such email address", message="The address might have been deleted already.") subject_id, = row if user != subject_id: Operation.requireRole(db, "administrator", user) cursor.execute("""SELECT useremails.id, users.email IS NOT NULL FROM useremails LEFT OUTER JOIN users ON (users.email=useremails.id) WHERE useremails.uid=%s""", (subject_id,)) emails = dict(cursor) # Reject if the user has more than one email address registered and is # trying to delete the selected one. The UI checks this too, but that # check is not 100 % reliable since it checks the state at the time the # page was loaded, not necessarily the current state. if len(emails) > 1 and emails[email_id]: raise OperationFailure( code="notallowed", title="Will not delete current address", message=("This email address is your current address. Please " "select one of the other addresses as your current " "address before deleting it.")) cursor.execute("""UPDATE users SET email=NULL WHERE id=%s AND email=%s""", (subject_id, email_id)) cursor.execute("""DELETE FROM useremails WHERE id=%s""", (email_id,)) db.commit() return OperationResult() class SelectEmailAddress(Operation): def __init__(self): Operation.__init__(self, { "email_id": int }) def process(self, db, user, email_id): cursor = db.cursor() cursor.execute("""SELECT uid FROM useremails WHERE id=%s""", (email_id,)) row = cursor.fetchone() if not row: raise OperationFailure( code="invalidemailid", title="No such email address", message="The address might have been deleted already.") user_id, = row if user != user_id: Operation.requireRole(db, "administrator", user) cursor.execute("""UPDATE users SET email=%s WHERE id=%s""", (email_id, user_id)) db.commit() return OperationResult() class AddEmailAddress(Operation): def __init__(self): Operation.__init__(self, { "subject": User, "email": str }) def process(self, db, user, subject, email): if not checkEmailAddressSyntax(email): raise OperationFailure( code="invalidemail", title="Invalid email address", message="Please provide an address on the form <user>@<host>!") if user != subject: Operation.requireRole(db, "administrator", user) cursor = db.cursor() cursor.execute("""SELECT 1 FROM useremails WHERE uid=%s AND email=%s""", (subject.id, email)) if cursor.fetchone(): raise OperationFailure( code="invalidemail", title="Duplicate email address", message="The exact same address is already registered!") if user.hasRole(db, "administrator"): verified = None elif configuration.base.VERIFY_EMAIL_ADDRESSES: verified = False else: verified = None with db.updating_cursor("users", "useremails") as cursor: cursor.execute("""INSERT INTO useremails (uid, email, verified) VALUES (%s, %s, %s) RETURNING id""", (subject.id, email, verified)) email_id, = cursor.fetchone() if subject.email is None: cursor.execute("""UPDATE users SET email=%s WHERE id=%s""", (email_id, subject.id)) if verified is False: sendVerificationMail(db, subject, email_id) return OperationResult() ================================================ FILE: src/operation/markfiles.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils from operation import Operation, OperationResult class MarkFiles(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "reviewed": bool, "changeset_ids": [int], "file_ids": [int] }) def process(req, db, user, review_id, reviewed, changeset_ids, file_ids): review = dbutils.Review.fromId(db, review_id) cursor = db.cursor() # Revert any draft changes the user has for the specified files in # the specified changesets. cursor.execute("""DELETE FROM reviewfilechanges WHERE uid=%s AND state='draft' AND file IN (SELECT id FROM reviewfiles WHERE review=%s AND changeset=ANY (%s) AND file=ANY (%s))""", (user.id, review.id, changeset_ids, file_ids)) if reviewed: from_state, to_state = 'pending', 'reviewed' else: from_state, to_state = 'reviewed', 'pending' # Insert draft changes for every file whose state would be updated. cursor.execute("""INSERT INTO reviewfilechanges (file, uid, from_state, to_state) SELECT reviewfiles.id, reviewuserfiles.uid, reviewfiles.state, %s FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s AND reviewfiles.state=%s AND reviewfiles.changeset=ANY (%s) AND reviewfiles.file=ANY (%s)""", (to_state, user.id, review.id, from_state, changeset_ids, file_ids)) db.commit() return OperationResult(draft_status=review.getDraftStatus(db, user)) ================================================ FILE: src/operation/miscellaneous.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import dbutils import gitutils import configuration from operation import (Operation, OperationResult, OperationFailure, Review, Repository, SHA1) class CheckSerial(Operation): def __init__(self): super(CheckSerial, self).__init__({ "review_id": int, "serial": int }) def process(self, db, user, review_id, serial): cursor = db.readonly_cursor() cursor.execute("SELECT serial FROM reviews WHERE id=%s", (review_id,)) current_serial, = cursor.fetchone() if serial == current_serial: interval = user.getPreference(db, "review.updateCheckInterval") return OperationResult(stale=False, interval=interval) return OperationResult(stale=True) class RebaseBranch(Operation): def __init__(self): super(RebaseBranch, self).__init__({ "repository": Repository, "branch_name": str, "base_branch_name": str }) def process(self, db, user, repository, branch_name, base_branch_name): branch = dbutils.Branch.fromName(db, repository, branch_name) base_branch = dbutils.Branch.fromName(db, repository, base_branch_name) branch.rebase(db, base_branch) db.commit() return OperationResult() class SuggestReview(Operation): def __init__(self): super(SuggestReview, self).__init__({ "repository": Repository, "sha1": SHA1 }) def process(self, db, user, repository, sha1): try: commit = gitutils.Commit.fromSHA1(db, repository, sha1) except gitutils.GitReferenceError: raise OperationFailure( code="invalidsha1", title="Invalid SHA-1", message="No such commit: %s in %s" % (sha1, repository.path)) suggestions = {} cursor = db.readonly_cursor() cursor.execute("""SELECT reviews.id FROM reviews WHERE reviews.summary=%s""", (commit.summary(),)) for review_id, in cursor: review = dbutils.Review.fromId(db, review_id) if review.state != 'dropped': suggestions[review_id] = review.getReviewState(db) return OperationResult(summary=commit.summary(), reviews=reviews) ================================================ FILE: src/operation/news.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import Operation, OperationResult class AddNewsItem(Operation): """Add news item.""" def __init__(self): Operation.__init__(self, { "text": str }) def process(self, db, user, text): Operation.requireRole(db, "newswriter", user) cursor = db.cursor() cursor.execute("INSERT INTO newsitems (text) VALUES (%s) RETURNING id", (text,)) item_id = cursor.fetchone()[0] db.commit() return OperationResult(item_id=item_id) class EditNewsItem(Operation): """Add news item.""" def __init__(self): Operation.__init__(self, { "item_id": int, "text": str }) def process(self, db, user, item_id, text): Operation.requireRole(db, "newswriter", user) db.cursor().execute("UPDATE newsitems SET text=%s WHERE id=%s", (text, item_id)) db.commit() return OperationResult() ================================================ FILE: src/operation/rebasereview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import log.commitset from operation import (Operation, OperationResult, OperationError, Optional, Review) def doPrepareRebase(db, user, review, new_upstream_arg=None, branch=None): commitset = log.commitset.CommitSet(review.branch.getCommits(db)) tails = commitset.getFilteredTails(review.branch.repository) cursor = db.cursor() cursor.execute("SELECT uid FROM reviewrebases WHERE review=%s AND new_head IS NULL", (review.id,)) row = cursor.fetchone() if row: rebaser = dbutils.User.fromId(db, row[0]) raise OperationError("The review is already being rebased by %s <%s>." % (rebaser.fullname, rebaser.email)) head = commitset.getHeads().pop() head_id = head.getId(db) if new_upstream_arg is not None: if len(tails) > 1: raise OperationError("Rebase to new upstream commit not supported.") tail = gitutils.Commit.fromSHA1(db, review.branch.repository, tails.pop()) old_upstream_id = tail.getId(db) if new_upstream_arg == "0" * 40: new_upstream_id = None else: if not gitutils.re_sha1.match(new_upstream_arg): cursor.execute("SELECT sha1 FROM tags WHERE repository=%s AND name=%s", (review.branch.repository.id, new_upstream_arg)) row = cursor.fetchone() if row: new_upstream_arg = row[0] else: raise OperationError("Specified new upstream is invalid.") try: new_upstream = gitutils.Commit.fromSHA1(db, review.branch.repository, new_upstream_arg) except: raise OperationError("The specified new upstream commit does not exist in Critic's repository.") new_upstream_id = new_upstream.getId(db) else: old_upstream_id = None new_upstream_id = None cursor.execute("""INSERT INTO reviewrebases (review, old_head, new_head, old_upstream, new_upstream, uid, branch) VALUES (%s, %s, NULL, %s, %s, %s, %s)""", (review.id, head_id, old_upstream_id, new_upstream_id, user.id, branch)) review.incrementSerial(db) db.commit() def doCancelRebase(db, user, review): review.incrementSerial(db) db.cursor().execute("DELETE FROM reviewrebases WHERE review=%s AND new_head IS NULL", (review.id,)) db.commit() class CheckRebase(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) tails = review.getFilteredTails(db) available = "both" if len(tails) == 1 else "inplace" return OperationResult(available=available) class SuggestUpstreams(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) tails = review.getFilteredTails(db) if len(tails) > 1: raise OperationError("Multiple tail commits.") try: from customization.filtertags import getUpstreamPattern except ImportError: def getUpstreamTagPattern(review): pass tail = tails.pop() tags = review.branch.repository.run("tag", "-l", "--contains", tail, getUpstreamTagPattern(review) or "*").splitlines() cursor = db.cursor() upstreams = [] for tag in tags: cursor.execute("SELECT sha1 FROM tags WHERE repository=%s AND name=%s", (review.branch.repository.id, tag)) row = cursor.fetchone() if row and row[0] != tail: upstreams.append(tag) return OperationResult(upstreams=upstreams) class PrepareRebase(Operation): def __init__(self): Operation.__init__(self, { "review": Review, "new_upstream": Optional(str), "branch": Optional(str) }) def process(self, db, user, review, new_upstream=None, branch=None): doPrepareRebase(db, user, review, new_upstream, branch) return OperationResult() class CancelRebase(Operation): def __init__(self): Operation.__init__(self, { "review_id": int }) def process(self, db, user, review_id): review = dbutils.Review.fromId(db, review_id) doCancelRebase(db, user, review) return OperationResult() class RebaseReview(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "new_head_sha1": str, "new_upstream_sha1": Optional(str), "branch": Optional(str), "new_trackedbranch": Optional(str) }) def process(self, db, user, review_id, new_head_sha1, new_upstream_sha1=None, branch=None, new_trackedbranch=None): review = dbutils.Review.fromId(db, review_id) new_head = gitutils.Commit.fromSHA1(db, review.repository, new_head_sha1) cursor = db.cursor() if review.state == 'closed': cursor.execute("SELECT closed_by FROM reviews WHERE id=%s", (review.id,)) closed_by = cursor.fetchone()[0] review.serial += 1 cursor.execute("UPDATE reviews SET state='open', serial=%s, closed_by=NULL WHERE id=%s", (review.serial, review.id)) else: closed_by = None trackedbranch = review.getTrackedBranch(db) if trackedbranch and not trackedbranch.disabled: cursor.execute("UPDATE trackedbranches SET disabled=TRUE WHERE id=%s", (trackedbranch.id,)) commitset = log.commitset.CommitSet(review.branch.getCommits(db)) tails = commitset.getFilteredTails(review.branch.repository) if len(tails) == 1 and tails.pop() == new_upstream_sha1: # This appears to be a history rewrite. new_upstream_sha1 = None doPrepareRebase(db, user, review, new_upstream_sha1, branch) try: with review.repository.relaycopy("RebaseReview") as relay: with review.repository.temporaryref(new_head) as ref_name: relay.run("fetch", "origin", ref_name) relay.run("push", "--force", "origin", "%s:refs/heads/%s" % (new_head.sha1, review.branch.name), env={ "REMOTE_USER": user.name }) if closed_by is not None: db.commit() state = review.getReviewState(db) if state.accepted: review.serial += 1 cursor.execute("UPDATE reviews SET state='closed', serial=%s, closed_by=%s WHERE id=%s", (review.serial, closed_by, review.id)) if trackedbranch and not trackedbranch.disabled: cursor.execute("UPDATE trackedbranches SET disabled=FALSE WHERE id=%s", (trackedbranch.id,)) if new_trackedbranch: cursor.execute("UPDATE trackedbranches SET remote_name=%s WHERE id=%s", (new_trackedbranch, trackedbranch.id)) db.commit() except: doCancelRebase(db, user, review) raise return OperationResult() class RevertRebase(Operation): def __init__(self): Operation.__init__(self, { "review": Review, "rebase_id": int }) def process(self, db, user, review, rebase_id): cursor = db.cursor() cursor.execute("""SELECT old_head, new_head, new_upstream, equivalent_merge, replayed_rebase FROM reviewrebases WHERE id=%s""", (rebase_id,)) old_head_id, new_head_id, new_upstream_id, equivalent_merge_id, replayed_rebase_id = cursor.fetchone() cursor.execute("SELECT commit FROM previousreachable WHERE rebase=%s", (rebase_id,)) reachable = [commit_id for (commit_id,) in cursor] if not reachable: # Fail if rebase was done before the 'previousreachable' table was # added, and we thus don't know what commits the branch contained # before the rebase. raise OperationError("Automatic revert not supported; rebase is pre-historic.") if review.branch.getHead(db).getId(db) != new_head_id: raise OperationError("Commits added to review after rebase; need to remove them first.") old_head = gitutils.Commit.fromId(db, review.repository, old_head_id) new_head = gitutils.Commit.fromId(db, review.repository, new_head_id) cursor.execute("DELETE FROM reachable WHERE branch=%s", (review.branch.id,)) cursor.executemany("INSERT INTO reachable (branch, commit) VALUES (%s, %s)", ((review.branch.id, commit_id) for commit_id in reachable)) if new_upstream_id: generated_commit_id = equivalent_merge_id or replayed_rebase_id if generated_commit_id is not None: # A generated commit (equivalent merge or replayed rebase) was # added when performing the rebase; remove it. # Reopen any issues marked as addressed by the rebase. If the # rebase was a fast-forward one, issues will have been addressed # by the equivalent merge commit. Otherwise, issues will have # been addressed by the new head commit (not the replayed rebase # commit.) addressed_by_commit_id = equivalent_merge_id or new_head_id cursor.execute("""UPDATE commentchains SET state='open', addressed_by=NULL WHERE review=%s AND state='addressed' AND addressed_by=%s""", (review.id, addressed_by_commit_id)) # Delete the review changesets (and, via cascade, all related # assignments and state changes.) cursor.execute( """DELETE FROM reviewchangesets WHERE review=%s AND changeset IN (SELECT id FROM changesets WHERE child=%s OR parent=%s)""", (review.id, generated_commit_id, generated_commit_id)) cursor.execute("UPDATE branches SET head=%s WHERE id=%s", (old_head_id, review.branch.id)) cursor.execute("DELETE FROM reviewrebases WHERE id=%s", (rebase_id,)) review.incrementSerial(db) db.commit() review.repository.run( "update-ref", "refs/heads/%s" % review.branch.name, old_head.sha1, new_head.sha1) return OperationResult() ================================================ FILE: src/operation/recipientfilter.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils from operation import Operation, OperationResult class AddRecipientFilter(Operation): def __init__(self): Operation.__init__(self, { "review_id": int, "user_id": int, "include": bool }) def process(self, db, user, review_id, user_id, include): cursor = db.cursor() cursor.execute("SELECT include FROM reviewrecipientfilters WHERE review=%s AND uid=%s", (review_id, user_id)) row = cursor.fetchone() if row: if row[0] != include: cursor.execute("UPDATE reviewrecipientfilters SET include=%s WHERE review=%s AND uid=%s", (include, review_id, user_id)) else: cursor.execute("INSERT INTO reviewrecipientfilters (review, uid, include) VALUES (%s, %s, %s)", (review_id, user_id, include)) review = dbutils.Review.fromId(db, review_id) review.incrementSerial(db) db.commit() return OperationResult() ================================================ FILE: src/operation/registeruser.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import configuration import auth from operation import Operation, OperationResult, Optional, Request from operation.manipulateuser import sendVerificationMail, checkEmailAddressSyntax class RegisterUser(Operation): def __init__(self): super(RegisterUser, self).__init__( { "req": Request, "username": str, "fullname": str, "email": str, "password": Optional(str), "external": Optional({ "provider": set(auth.PROVIDERS.keys()), "account": str, "token": str }) }, accept_anonymous_user=True) def process(self, db, user, req, username, fullname, email, password=None, external=None): cursor = db.cursor() if not fullname: fullname = username if not email: email = None if not password: # Empty password => disabled. password = None if external: provider_config = configuration.auth.PROVIDERS[external["provider"]] provider = auth.PROVIDERS[external["provider"]] # Check that user registration is actually enabled. This would also # disable the UI for user registration, of course, but the UI could be # bypassed, so we should check here as well. if not configuration.base.ALLOW_USER_REGISTRATION: if not external or not provider_config["allow_user_registration"]: return OperationResult( message="User registration is not enabled.") # Check that the user name is valid. try: auth.validateUserName(username) except auth.InvalidUserName as error: return OperationResult( message="<u>Invalid user name</u><br>" + str(error), focus="#newusername") # Check that the user name is not already taken. cursor.execute("SELECT 1 FROM users WHERE name=%s", (username,)) if cursor.fetchone(): return OperationResult( message="A user named '%s' already exists!" % username, focus="#newusername") # Check that the email address has some hope of being valid. if email and not checkEmailAddressSyntax(email): return OperationResult( message=("<u>Invalid email address</u><br>" "Please provide an address on the form user@host!"), focus="#email") # Check that we have either a password or an external authentication # provider. If we have neither, the user wouldn't be able to sign in. if password is None and external is None: return OperationResult( message="Empty password.", focus="#password1") if password: password = auth.hashPassword(password) verify_email_address = configuration.base.VERIFY_EMAIL_ADDRESSES if external: # Check that the external authentication token is valid. if not provider.validateToken(db, external["account"], external["token"]): return OperationResult( message="Invalid external authentication state.") cursor.execute("""SELECT id, uid, email FROM externalusers WHERE provider=%s AND account=%s""", (external["provider"], external["account"])) # Note: the token validation above implicitly checks that there's a # matching row in the 'externalusers' table. external_user_id, existing_user_id, external_email = cursor.fetchone() # Check that we don't already have a Critic user associated with # this external user. if existing_user_id is not None: existing_user = dbutils.User.fromId(db, existing_user_id) return OperationResult( message=("There is already a Critic user ('%s') connected " "to the %s '%s'" % (existing_user.name, provider.getTitle(), external["account"]))) if email == external_email: verify_email_address = provider.configuration["verify_email_addresses"] # Reset 'email' column in 'externalusers': we only need it to detect # if the user changed the email address in the "Create user" form. # Also reset the 'token' column, which serves no further purpose # beyond this point. with db.updating_cursor("externalusers") as cursor: cursor.execute("""UPDATE externalusers SET email=NULL, token=NULL WHERE id=%s""", (external_user_id,)) email_verified = False if email and verify_email_address else None user = dbutils.User.create( db, username, fullname, email, email_verified, password, external_user_id=external_user_id) if email_verified is False: sendVerificationMail(db, user) user.sendUserCreatedMail("wsgi[registeruser]", external) auth.createSessionId(db, req, user) return OperationResult() ================================================ FILE: src/operation/savesettings.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import (Operation, OperationResult, OperationFailure, OperationError, Optional, User, Repository) import dbutils import gitutils class SaveSettings(Operation): def __init__(self): super(SaveSettings, self).__init__( { "subject": Optional(User), "repository": Optional(Repository), "filter_id": Optional(int), "settings": [{ "item": str, "value": Optional(set([bool, int, str])) }], "defaults": Optional(bool) }) def process(self, db, user, settings, subject=None, repository=None, filter_id=None, defaults=False): if repository is not None and filter_id is not None: raise OperationError("invalid input: both 'repository' and 'filter_id' set") if (subject and subject != user) or defaults: Operation.requireRole(db, "administrator", user) else: subject = user cursor = db.cursor() if filter_id is not None: # Check that the filter exists and that it's one of the user's # filters (or that the user has the administrator role.) cursor.execute("SELECT uid FROM filters WHERE id=%s", (filter_id,)) row = cursor.fetchone() if not row: raise OperationFailure( code="nosuchfilter", title="No such filter!", message=("Maybe the filter has been deleted since you " "loaded this page?")) elif row[0] != subject.id: raise OperationFailure( code="invalidfilter", title="The filter belongs to someone else!", message=("What are you up to?")) saved_settings = [] for setting in settings: item = setting["item"] value = setting.get("value") if dbutils.User.storePreference(db, item, value, subject, repository, filter_id): saved_settings.append(setting["item"]) db.commit() return OperationResult(saved_settings=sorted(saved_settings)) ================================================ FILE: src/operation/searchreview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import urllib import configuration import dbutils import gitutils from operation import Operation, OperationResult, OperationFailure def globToSQLPattern(glob): pattern = glob.replace("\\", "\\\\").replace("%", "\\%").replace("?", "_").replace("*", "%") if "?" in glob or "*" in glob: return pattern return "%" + pattern + "%" def pathToSQLRegExp(path): pattern = "" if path.startswith("/"): pattern += "^" path = path.lstrip("/") escaped = re.sub(r"[(){}\[\].\\+^$]", lambda match: "\\" + match.group(), path) replacements = { "**/": "(?:[^/]+/)*", "*": "[^/]*", "?": "." } pattern += re.sub("\*\*/|\*|\?", lambda match: replacements[match.group()], escaped) return pattern class Query(object): def __init__(self, parent=None): if not parent: self.tables = { "reviews": set() } self.arguments = [] else: self.tables = parent.tables self.arguments = parent.arguments self.conditions = [] def addTable(self, table, *conditions): self.tables.setdefault(table, set()).update(conditions) class Review(object): def __init__(self, review_id, summary): self.review_id = review_id self.summary = summary def json(self): return { "id": self.review_id, "summary": self.summary } class InvalidFilter(Exception): def __init__(self, title, message): self.title = title self.message = message class Filter(object): def __init__(self, db, value): self.db = db self.value = value self.check(db) def check(self, db): pass def filter(self, db, review): return True class SummaryFilter(Filter): def contribute(self, query): query.conditions.append("reviews.summary LIKE %s") query.arguments.append(globToSQLPattern(self.value)) class DescriptionFilter(Filter): def contribute(self, query): query.conditions.append("reviews.description LIKE %s") query.arguments.append(globToSQLPattern(self.value)) class BranchFilter(Filter): def contribute(self, query): query.addTable("branches", "branches.id=reviews.branch") query.conditions.append("branches.name ~ %s") query.arguments.append(pathToSQLRegExp(self.value)) class PathFilter(Filter): def contribute(self, query): query.addTable("reviewfiles", "reviewfiles.review=reviews.id") query.addTable("files", "files.id=reviewfiles.file") if configuration.database.DRIVER == "postgresql": # This is just an optimization; with PostgreSQL we have an index # that avoids matching the pattern against most paths. static_components = [] for component in self.value.split("/"): if component and not ("*" in component or "?" in component): static_components.append(component) if static_components: query.conditions.append("%s <@ STRING_TO_ARRAY(path, '/')") query.arguments.append(static_components) query.conditions.append("files.path ~ %s") query.arguments.append(pathToSQLRegExp(self.value)) class UserFilter(Filter): def check(self, db): self.user = dbutils.User.fromName(db, self.value) def contribute(self, query): query.addTable("reviewusers", "reviewusers.review=reviews.id") query.conditions.append("reviewusers.uid=%s") query.arguments.append(self.user.id) class OwnerFilter(UserFilter): def contribute(self, query): super(OwnerFilter, self).contribute(query) query.conditions.append("reviewusers.owner") class ReviewerFilter(UserFilter): def contribute(self, query): query.addTable("reviewfiles", "reviewfiles.review=reviews.id") query.addTable("reviewuserfiles", "reviewuserfiles.file=reviewfiles.id") query.conditions.append("reviewuserfiles.uid=%s") query.arguments.append(self.user.id) class StateFilter(Filter): def check(self, db): if self.value not in ("open", "pending", "accepted", "closed", "dropped"): raise InvalidFilter( title="Invalid review state: %r" % self.value, message=("Supported review states are open, pending, accepted, " "closed and dropped.")) def contribute(self, query): state = "open" if self.value in ("pending", "accepted") else self.value query.conditions.append("reviews.state=%s") query.arguments.append(state) def filter(self, db, review): if self.value == "pending": return not dbutils.Review.isAccepted(db, review.review_id) elif self.value == "accepted": return dbutils.Review.isAccepted(db, review.review_id) return True class RepositoryFilter(Filter): def check(self, db): cursor = db.cursor() cursor.execute("SELECT id FROM repositories WHERE name=%s", (self.value,)) row = cursor.fetchone() if not row: raise gitutils.NoSuchRepository(self.value) self.repository_id = row[0] def contribute(self, query): query.addTable("branches", "branches.id=reviews.branch") query.conditions.append("branches.repository=%s") query.arguments.append(self.repository_id) class OrFilter(Filter): def __init__(self, filters): self.filters = filters def contribute(self, query): conditions = [] for search_filter in self.filters: subquery = Query(query) search_filter.contribute(subquery) conditions.append("(%s)" % " AND ".join(subquery.conditions)) query.conditions.append("(%s)" % " OR ".join(conditions)) class SearchReview(Operation): def __init__(self): Operation.__init__(self, { "query": str }, accept_anonymous_user=True) def process(req, db, user, query): terms = re.findall("""((?:"[^"]*"|'[^']*'|[^ "']+)+)""", query) url_terms = [] filters = [] for term in terms: if re.match("[a-z\-]+:", term): keyword, _, value = term.partition(":") url_terms.append(("q" + keyword, value)) if keyword == "summary": filter_classes = [SummaryFilter] elif keyword == "description": filter_classes = [DescriptionFilter] elif keyword == "text": filter_classes = [SummaryFilter, DescriptionFilter] elif keyword in ("branch", "b"): filter_classes = [BranchFilter] elif keyword in ("path", "p"): filter_classes = [PathFilter] elif keyword in ("user", "u"): filter_classes = [UserFilter] elif keyword in ("owner", "o"): filter_classes = [OwnerFilter] elif keyword == "reviewer": filter_classes = [ReviewerFilter] elif keyword == "owner-or-reviewer": filter_classes = [OwnerFilter, ReviewerFilter] elif keyword in ("state", "s"): filter_classes = [StateFilter] elif keyword in ("repository", "repo", "r"): filter_classes = [RepositoryFilter] else: raise OperationFailure( code="invalidkeyword", title="Invalid keyword: %r" % keyword, message=("Supported keywords are summary, description, " "text, branch, path, user, owner and reviewer.")) if re.match("([\"']).*\\1$", value): value = value[1:-1] try: if len(filter_classes) > 1: keyword_filters = [filter_class(db, value) for filter_class in filter_classes] filters.append(OrFilter(keyword_filters)) else: filters.append(filter_classes[0](db, value)) except InvalidFilter as error: raise OperationFailure( code="invalidterm", title=error.title, message=error.message) except dbutils.NoSuchUser as error: raise OperationFailure( code="invalidterm", title="No such user: %r" % error.name, message=("The search term following %r must be a valid user name." % (keyword + ":"))) except gitutils.NoSuchRepository as error: raise OperationFailure( code="invalidterm", title="No such repository: %r" % error.name, message=("The search term following %r must be a valid repository name." % (keyword + ":"))) else: url_terms.append(("q", term)) if re.match("([\"']).*\\1$", term): term = term[1:-1] auto_filters = [] auto_filters.append(SummaryFilter(db, term)) auto_filters.append(DescriptionFilter(db, term)) if not re.search(r"\s", term): auto_filters.append(BranchFilter(db, term)) if re.search(r"\w/\w|\w\.\w+$", term): auto_filters.append(PathFilter(db, term)) filters.append(OrFilter(auto_filters)) if not filters: raise OperationFailure( code="nofilters", title="No search filter specified", message="Your search would find all reviews. Please restrict it a bit.") query_params = Query() for search_filter in filters: search_filter.contribute(query_params) query_string = """SELECT DISTINCT reviews.id, reviews.summary FROM %s WHERE %s ORDER BY reviews.id DESC""" for conditions in query_params.tables.values(): query_params.conditions[0:0] = conditions query = query_string % (", ".join(query_params.tables.keys()), " AND ".join(query_params.conditions)) cursor = db.cursor() cursor.execute(query, query_params.arguments) reviews = [Review(review_id, summary) for review_id, summary in cursor] for search_filter in filters: reviews = filter(lambda review: search_filter.filter(db, review), reviews) return OperationResult( reviews=map(Review.json, reviews), query_string=urllib.urlencode(url_terms)) ================================================ FILE: src/operation/servicemanager.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import Operation, OperationResult, Optional, OperationError import configuration import textutils import os import socket import signal class RestartService(Operation): def __init__(self): Operation.__init__(self, { "service_name": str }) def process(self, db, user, service_name): Operation.requireRole(db, "administrator", user) if service_name == "wsgi": for pid in os.listdir(configuration.paths.WSGI_PIDFILE_DIR): try: os.kill(int(pid), signal.SIGINT) except: pass return OperationResult() else: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.SERVICEMANAGER["address"]) connection.send(textutils.json_encode({ "command": "restart", "service": service_name })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "ok": return OperationResult() else: raise OperationError(result["error"]) class GetServiceLog(Operation): def __init__(self): Operation.__init__(self, { "service_name": str, "lines": Optional(int) }) def process(self, db, user, service_name, lines=40): logfile_paths = { "manager": configuration.services.SERVICEMANAGER["logfile_path"] } for service in configuration.services.SERVICEMANAGER["services"]: logfile_paths[service["name"]] = service["logfile_path"] logfile_path = logfile_paths.get(service_name) if not logfile_path: raise OperationError("unknown service: %s" % service_name) try: logfile = open(logfile_path) except OSError as error: raise OperationError("failed to open logfile: %s" % error.message) return OperationResult(lines=logfile.read().splitlines()[-lines:]) ================================================ FILE: src/operation/trackedbranch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import Operation, OperationResult, OperationFailure, OperationError, Optional import dbutils import gitutils import htmlutils import configuration import calendar import os import signal def getTrackedBranchReviewState(db, branch_id): cursor = db.cursor() cursor.execute("""SELECT reviews.state FROM reviews JOIN branches ON (branches.id=reviews.branch) JOIN trackedbranches ON (trackedbranches.repository=branches.repository AND trackedbranches.local_name=branches.name) WHERE trackedbranches.id=%s""", (branch_id,)) row = cursor.fetchone() return row[0] if row else None class TrackedBranchLog(Operation): def __init__(self): Operation.__init__(self, { "branch_id": int }) def process(self, db, user, branch_id): cursor = db.cursor() cursor.execute("""SELECT previous, next FROM trackedbranches WHERE id=%s""", (branch_id,)) previous, next = cursor.fetchone() previous = calendar.timegm(previous.utctimetuple()) if previous else None next = calendar.timegm(next.utctimetuple()) if next else None cursor.execute("""SELECT time, from_sha1, to_sha1, hook_output, successful FROM trackedbranchlog WHERE branch=%s ORDER BY time ASC""", (branch_id,)) items = [] for update_time, from_sha1, to_sha1, hook_output, successful in cursor: items.append({ "time": calendar.timegm(update_time.utctimetuple()), "from_sha1": from_sha1, "to_sha1": to_sha1, "hook_output": hook_output, "successful": successful }) cursor.execute("""SELECT repository FROM trackedbranches WHERE id=%s""", (branch_id,)) repository = gitutils.Repository.fromId(db, cursor.fetchone()[0]) return OperationResult(previous=previous, next=next, items=items, repository={ "id": repository.id, "name": repository.name }) class DisableTrackedBranch(Operation): def __init__(self): Operation.__init__(self, { "branch_id": int }) def process(self, db, user, branch_id): cursor = db.cursor() if not user.hasRole(db, "administrator"): cursor.execute("""SELECT 1 FROM trackedbranchusers WHERE branch=%s AND uid=%s""", (branch_id, user.id)) if not cursor.fetchone(): raise OperationFailure(code="notallowed", title="Not allowed!", message="Operation not permitted.") cursor.execute("""UPDATE trackedbranches SET disabled=TRUE WHERE id=%s""", (branch_id,)) db.commit() return OperationResult() class TriggerTrackedBranchUpdate(Operation): def __init__(self): Operation.__init__(self, { "branch_id": int }) def process(self, db, user, branch_id): cursor = db.cursor() if not user.hasRole(db, "administrator"): cursor.execute("""SELECT 1 FROM trackedbranchusers WHERE branch=%s AND uid=%s""", (branch_id, user.id)) if not cursor.fetchone(): raise OperationFailure(code="notallowed", title="Not allowed!", message="Operation not permitted.") review_state = getTrackedBranchReviewState(db, branch_id) if review_state is not None and review_state != "open": raise OperationFailure(code="reviewnotopen", title="The review is not open!", message="You need to reopen the review before new commits can be added to it.") cursor.execute("""UPDATE trackedbranches SET next=NULL WHERE id=%s""", (branch_id,)) db.commit() pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) return OperationResult() class EnableTrackedBranch(Operation): def __init__(self): Operation.__init__(self, { "branch_id": int, "new_remote_name": Optional(str) }) def process(self, db, user, branch_id, new_remote_name=None): cursor = db.cursor() if not user.hasRole(db, "administrator"): cursor.execute("""SELECT 1 FROM trackedbranchusers WHERE branch=%s AND uid=%s""", (branch_id, user.id)) if not cursor.fetchone(): raise OperationFailure(code="notallowed", title="Not allowed!", message="Operation not permitted.") review_state = getTrackedBranchReviewState(db, branch_id) if review_state is not None and review_state != "open": raise OperationFailure(code="reviewnotopen", title="The review is not open!", message="You need to reopen the review before new commits can be added to it.") if new_remote_name is not None: cursor.execute("""SELECT remote FROM trackedbranches WHERE id=%s""", (branch_id,)) remote = cursor.fetchone()[0] if not gitutils.Repository.lsremote(remote, pattern="refs/heads/" + new_remote_name): raise OperationFailure( code="refnotfound", title="Remote ref not found!", message=("Could not find the ref <code>%s</code> in the repository <code>%s</code>." % (htmlutils.htmlify("refs/heads/" + new_remote_name), htmlutils.htmlify(remote))), is_html=True) cursor.execute("""UPDATE trackedbranches SET remote_name=%s, disabled=FALSE, next=NULL WHERE id=%s""", (new_remote_name, branch_id)) else: cursor.execute("""UPDATE trackedbranches SET disabled=FALSE, next=NULL WHERE id=%s""", (branch_id,)) db.commit() pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) return OperationResult() class DeleteTrackedBranch(Operation): def __init__(self): Operation.__init__(self, { "branch_id": int }) def process(self, db, user, branch_id): cursor = db.cursor() if not user.hasRole(db, "administrator"): cursor.execute("""SELECT 1 FROM trackedbranchusers WHERE branch=%s AND uid=%s""", (branch_id, user.id)) if not cursor.fetchone(): raise OperationFailure(code="notallowed", title="Not allowed!", message="Operation not permitted.") cursor.execute("""DELETE FROM trackedbranches WHERE id=%s""", (branch_id,)) db.commit() return OperationResult() class AddTrackedBranch(Operation): def __init__(self): Operation.__init__(self, { "repository_id": int, "source_location": str, "source_name": str, "target_name": str, "users": [str], "forced": Optional(bool) }) def process(self, db, user, repository_id, source_location, source_name, target_name, users, forced=None): cursor = db.cursor() cursor.execute("""SELECT 1 FROM trackedbranches WHERE repository=%s AND local_name=%s""", (repository_id, target_name)) if cursor.fetchone(): raise OperationError("branch '%s' already tracks another branch" % target_name) users = [dbutils.User.fromName(db, username) for username in users] if target_name.startswith("r/"): cursor.execute("""SELECT 1 FROM reviews JOIN branches ON (branches.id=reviews.branch) WHERE branches.repository=%s AND branches.name=%s""", (repository_id, target_name)) if not cursor.fetchone(): raise OperationError("non-existing review branch can't track another branch") if forced is None: forced = True elif forced is None: forced = False cursor.execute("""SELECT 1 FROM knownremotes WHERE url=%s AND pushing""", (source_location,)) if cursor.fetchone(): delay = "1 week" else: delay = "1 hour" cursor.execute("""INSERT INTO trackedbranches (repository, local_name, remote, remote_name, forced, delay) VALUES (%s, %s, %s, %s, %s, INTERVAL %s) RETURNING id""", (repository_id, target_name, source_location, source_name, forced, delay)) branch_id = cursor.fetchone()[0] for user in users: cursor.execute("""INSERT INTO trackedbranchusers (branch, uid) VALUES (%s, %s)""", (branch_id, user.id)) db.commit() pid = int(open(configuration.services.BRANCHTRACKER["pidfile_path"]).read().strip()) os.kill(pid, signal.SIGHUP) return OperationResult(branch_id=branch_id) ================================================ FILE: src/operation/typechecker.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import base import dbutils from operation.basictypes import OperationError, OperationFailure class Optional: """Utility class for signaling that a dictionary member is optional.""" def __init__(self, source): self.source = source class TypeCheckerContext(object): def __init__(self, *args): self.args = args self.req, self.db, self.user = args self.repository = None self.review = None self.__path = ["data"] def __str__(self): return "".join(self.__path) def push(self, name): self.__path.append(name) def pop(self): self.__path.pop() def clone(self): copy = TypeCheckerContext(*self.args) copy.copy_from(self) copy.__path = self.__path[:] return copy def copy_from(self, other): self.repository = other.repository self.review = other.review class TypeChecker(object): """ Interface for checking operation input type correctness. Sub-classes implement the method __call__(value, context) which raises an OperationError if the input is incorrect. A type checker structure is created using the static make() function. """ @staticmethod def make(source): """ Construct a structure of TypeChecker objects. The source argument should be a dict object, single-element list object, a set object containing strings, or str, int or bool (the actual type objects, not a string, integer or boolean value). If the source argument is a dict object, per-element type checkers are constructed by calling this function on the value of each item in the dictionary. See DictionaryChecker for details. If the source argument is a list object, a per-element type checker is constructed by calling this function on the value of the single element in the list. If the source argument is a set object, all elements in it should be strings, and the constructed checker verifies that the value is a string that is a member of the set. Otherwise the constructed checker verifies that the value is of the type of the source argument (or, in the case of source=str, that the value's type is either str or unicode). """ if isinstance(source, TypeChecker): return source elif isinstance(source, dict): if (len(source) == 1 and all(key is str for key in source.keys()) and all(isinstance(value, type) for value in source.values())): return GenericDictionaryChecker(source) return DictionaryChecker(source) elif isinstance(source, list): return ArrayChecker(source) elif isinstance(source, set): if all(type(x) is str for x in source): return EnumerationChecker(source) return VariantChecker(source) elif source is str: return StringChecker() elif source is int: return IntegerChecker() elif source is bool: return BooleanChecker() try: is_type_checker = issubclass(source, TypeChecker) except TypeError: pass else: if is_type_checker: return source() raise base.ImplementationError("invalid source type: %r" % source) def getSuffixedCheckers(self): """ Return a list of (suffix, checker) tuples. A suffixed checker allows a parameter to be specified with an optional suffix, that (optionally) restricts the acceptable input values. For instance, a checker that supports either an integer id or a string name could return a list [("id", id_checker), ("name", name_checker)] with the effect that the input can be one of { "thing": id-or-name } { "thing_id": id } { "thing_name": name } and the resulting parameter name "thing" in either case. A checker can also return [("suffix", None)] to signal that a suffix is supported, but that the same checker (not a restricted one) should be applied regardless. """ return [] class Implicit(object): """ Mix-in class for implicit parameters. An implicit parameter is one we don't expect to receive from the client at all, but rather already have. The use-case is for an operation to request additional information passed to it, for instance a reference to the Request object. """ pass class Prioritized(object): """ Mix-in class for prioritized parameters. A prioritized parameter is one that stores information in the context that might be required for the checkers of other parameters, and thus need to be processed first. DictionaryChecker uses this to control the order in which it processes dictionary items. """ pass class Request(TypeChecker, Implicit): def __call__(self, value, context): assert value is None return context.req class BooleanChecker(TypeChecker): """ Type checker for booleans. Raises an OperationError if the checked value is not a boolean. """ def __call__(self, value, context): if not isinstance(value, bool): raise OperationError("invalid input: %s is not a boolean" % context) class StringChecker(TypeChecker): """ Type checker for strings. Raises an OperationError if the checked value is not a string. """ def __call__(self, value, context): if not isinstance(value, basestring): raise OperationError("invalid input: %s is not a string" % context) class RestrictedString(StringChecker): """ Type checker for restricted strings. A restricted string is one that may consist only of certain characters, and/or must be of a certain min/max length. Raises an OperationFailure if the checked value is not valid. """ def __init__(self, allowed=None, minlength=None, maxlength=None, ui_name=None): self.allowed = allowed self.minlength = minlength self.maxlength = maxlength self.ui_name = ui_name def __call__(self, value, context): super(RestrictedString, self).__call__(value, context) if self.ui_name: ui_name = self.ui_name else: ui_name = context if self.minlength is not None \ and len(value) < self.minlength: raise OperationFailure( code="paramtooshort:%s" % context, title="Invalid %s" % ui_name, message=("invalid input: %s must be at least %d characters long" % (ui_name, self.minlength))) if self.maxlength is not None \ and len(value) > self.maxlength: raise OperationFailure( code="paramtoolong:%s" % context, title="Invalid %s" % ui_name, message=("invalid input: %s must be at most %d characters long" % (ui_name, self.maxlength))) if self.allowed: disallowed = [ch for ch in sorted(set(value)) if not self.allowed(ch)] if disallowed: raise OperationFailure( code="paramcontainsillegalchar:%s" % context, title="Invalid %s" % ui_name, message=("invalid input: %s may not contain the character%s %s" % (ui_name, "s" if len(disallowed) > 1 else "", ", ".join(repr(ch) for ch in disallowed)))) class SHA1(RestrictedString): def __init__(self): super(SHA1, self).__init__(minlength=4, maxlength=40, allowed=re.compile("[0-9A-Fa-f]$").match) class IntegerChecker(TypeChecker): """ Type checker for integers. Raises an OperationError if the checked value is not an integer. """ def __call__(self, value, context): if not isinstance(value, int) or isinstance(value, bool): raise OperationError("invalid input: %s is not an integer" % context) class RestrictedInteger(IntegerChecker): def __init__(self, minvalue=None, maxvalue=None, ui_name=None): self.minvalue = minvalue self.maxvalue = maxvalue self.ui_name = ui_name def __call__(self, value, context): super(RestrictedInteger, self).__call__(value, context) if self.ui_name: ui_name = self.ui_name else: ui_name = context if self.minvalue is not None \ and value < self.minvalue: raise OperationFailure( code="valuetoolow:%s" % context, title="Invalid %s parameter" % ui_name, message=("invalid input: %s must be %d or higher" % (ui_name, self.minvalue))) if self.maxvalue is not None \ and value > self.maxvalue: raise OperationFailure( code="valuetoohigh:%s" % context, title="Invalid %s parameter" % ui_name, message=("invalid input: %s must be %d or lower" % (ui_name, self.maxvalue))) class NonNegativeInteger(RestrictedInteger): def __init__(self): super(NonNegativeInteger, self).__init__(minvalue=0) class PositiveInteger(RestrictedInteger): def __init__(self): super(PositiveInteger, self).__init__(minvalue=1) def check(checker, value, context): """Apply checker and return converted, or original, value.""" converted = checker(value, context) if converted is None: return value return converted class GenericDictionaryChecker(TypeChecker): """ Type checker for generic dictionary objects. Simply checks that every key and every value is of the appropriate type. """ def __init__(self, source): assert len(source) == 1 self.__key_checker = TypeChecker.make(source.keys()[0]) self.__value_checker = TypeChecker.make(source.values()[0]) def __call__(self, value, context): if not type(value) is dict: raise OperationError("invalid input: %s is not a dictionary" % context) for key in value: context.push("." + key) self.__key_checker(key, context) value[key] = check(self.__value_checker, value[key], context) context.pop() class DictionaryChecker(TypeChecker): """ Type checker for dictionary objects. Checks two sets of members: required and optional. Raises an OperationError if the checked value is not a dictionary or if any required member is not present in it, or if it contains any unexpected members. Applies per-element checkers on all required members and on all present optional members. """ def __init__(self, source): self.__implicit = [] self.__prioritized = [] self.__required = [] self.__optional = [] self.__expected = set() for name, source_type in source.items(): if isinstance(source_type, Optional): checker = TypeChecker.make(source_type.source) if isinstance(checker, Implicit): raise base.ImplementationError( "implicit parameter cannot be optional: %s" % name) self.__optional.append((name, checker)) else: checker = TypeChecker.make(source_type) if isinstance(checker, Implicit): self.__implicit.append((name, checker)) elif isinstance(checker, Prioritized): self.__prioritized.append((name, checker)) else: self.__required.append((name, checker)) for suffix, _ in checker.getSuffixedCheckers(): if name.endswith("_" + suffix): raise base.ImplementationError( "invalid parameter name: %s (includes optional suffix)" % name) def __call__(self, value, context): if not type(value) is dict: raise OperationError("invalid input: %s is not a dictionary" % context) specified_names = set(value.keys()) class Missing: pass def read_with_suffixes(name, checker): try: if name in value: specified_names.remove(name) context.push("." + name) return check(checker, value[name], context) for suffix, suffixed_checker in checker.getSuffixedCheckers(): suffixed_name = "%s_%s" % (name, suffix) if suffixed_name in value: specified_names.remove(suffixed_name) context.push("." + suffixed_name) if suffixed_checker is not None: checker = suffixed_checker return check(checker, value.pop(suffixed_name), context) context.push("." + name) return Missing finally: context.pop() for name, checker in self.__implicit: context.push("." + name) if name in value: raise OperationError( "invalid input: %s should not be specified" % context) value[name] = checker(None, context) context.pop() def process_members(items, required): for name, checker in items: converted = read_with_suffixes(name, checker) if not converted is Missing: value[name] = converted elif required: context.push("." + name) raise OperationError("invalid input: %s missing" % context) process_members(self.__prioritized, True) process_members(self.__required, True) process_members(self.__optional, False) if specified_names: context.push("." + specified_names.pop()) raise OperationError("invalid input: %s was not used" % context) class ArrayChecker(TypeChecker): """ Type checker for arrays. Raises an OperationError if the checked value is not an array. Applies the per-element checker on each element in the array. """ def __init__(self, source): if len(source) != 1: raise base.ImplementationError("invalid source type") self.__checker = TypeChecker.make(source[0]) def __call__(self, value, context): if not type(value) is list: raise OperationError("%s is not a list" % context) for index, item in enumerate(value): context.push("[%d]" % index) value[index] = check(self.__checker, item, context) context.pop() class VariantChecker(TypeChecker): """ Type checker for variants (values of one of a set of types.) Raises an OperationError if the checked value is not one of the permitted types (checked by applying a per-type checker on the value.) """ def __init__(self, source): self.__checkers = [] self.__suffixed_checkers = [] if isinstance(source, dict): for suffix, item in source.items(): checker = TypeChecker.make(item) self.__checkers.append(checker) self.__suffixed_checkers.append((suffix, checker)) else: self.__checkers.extend(TypeChecker.make(item) for item in source) def __call__(self, value, context): for checker in self.__checkers: try: variant_context = context.clone() value = checker(value, variant_context) context.copy_from(variant_context) return value except (OperationError, OperationFailure): pass raise OperationError("%s is of invalid type" % context) def getSuffixedCheckers(self): return self.__suffixed_checkers class EnumerationChecker(TypeChecker): """ Type checker for enumerations. Raises an OperationError if the checked value is not a string or if the string value is not a member of the enumeration. """ def __init__(self, source): self.__checker = TypeChecker.make(str) for item in source: if not type(item) is str: raise base.ImplementationError("invalid source type") self.__enumeration = source def __call__(self, value, context): self.__checker(value, context) if value not in self.__enumeration: raise OperationError("invalid input: %s is not valid" % context) class Review(PositiveInteger, Prioritized): def __call__(self, value, context): super(Review, self).__call__(value, context) context.review = dbutils.Review.fromId(context.db, value) context.repository = context.review.repository return context.review def getSuffixedCheckers(self): return [("id", None)] class RepositoryId(PositiveInteger): def __call__(self, value, context): import gitutils super(RepositoryId, self).__call__(value, context) context.repository = gitutils.Repository.fromId(context.db, value) return context.repository class RepositoryName(StringChecker): def __call__(self, value, context): import gitutils super(RepositoryName, self).__call__(value, context) context.repository = gitutils.Repository.fromName(context.db, value) return context.repository class Repository(VariantChecker, Prioritized): def __init__(self): super(Repository, self).__init__({ "id": RepositoryId, "name": RepositoryName }) class CommitId(PositiveInteger): def __call__(self, value, context): import gitutils if context.repository is None: raise OperationError("missing repository in context") super(CommitId, self).__call__(value, context) return gitutils.Commit.fromId(context.db, context.repository, value) class CommitSHA1(SHA1): def __call__(self, value, context): import gitutils if context.repository is None: raise OperationError("missing repository in context") super(CommitSHA1, self).__call__(value, context) return gitutils.Commit.fromSHA1(context.db, context.repository, value) class Commit(VariantChecker): def __init__(self): super(Commit, self).__init__({ "id": CommitId, "sha1": CommitSHA1 }) def __call__(self, value, context): if context.repository is None: raise OperationError("missing repository in context") return super(Commit, self).__call__(value, context) class FileId(PositiveInteger): def __call__(self, value, context): super(FileId, self).__call__(value, context) return dbutils.File.fromId(context.db, value) class FilePath(StringChecker): def __call__(self, value, context): super(FilePath, self).__call__(value, context) return dbutils.File.fromPath(context.db, value, insert=False) class File(VariantChecker): def __init__(self): super(File, self).__init__({ "id": FileId, "path": FilePath }) class UserId(PositiveInteger): def __call__(self, value, context): super(UserId, self).__call__(value, context) return dbutils.User.fromId(context.db, value) class UserName(StringChecker): def __call__(self, value, context): super(UserName, self).__call__(value, context) return dbutils.User.fromName(context.db, value) class User(VariantChecker): def __init__(self): super(User, self).__init__({ "id": UserId, "name": UserName }) class ExtensionId(PositiveInteger): def __call__(self, value, context): from extensions.extension import Extension super(ExtensionId, self).__call__(value, context) return Extension.fromId(context.db, value) class ExtensionKey(StringChecker): def __call__(self, value, context): from extensions.extension import Extension super(ExtensionKey, self).__call__(value, context) author_name, _, extension_name = value.rpartition("/") return Extension(author_name, extension_name) class Extension(VariantChecker): def __init__(self): super(Extension, self).__init__({ "id": ExtensionId, "key": ExtensionKey }) ================================================ FILE: src/operation/typechecker_unittest.py ================================================ import copy import json def basic(): import htmlutils from operation.basictypes import OperationError, OperationFailure from operation.typechecker import ( Optional, TypeChecker, TypeCheckerContext, BooleanChecker, StringChecker, RestrictedString, SHA1, IntegerChecker, RestrictedInteger, PositiveInteger, NonNegativeInteger, ArrayChecker, EnumerationChecker, VariantChecker, DictionaryChecker) # Check TypeChecker.make()'s handling of basic types. assert type(TypeChecker.make(bool)) is BooleanChecker assert type(TypeChecker.make(str)) is StringChecker assert type(TypeChecker.make(int)) is IntegerChecker assert type(TypeChecker.make([bool])) is ArrayChecker assert type(TypeChecker.make(set(["foo", "bar"]))) is EnumerationChecker assert type(TypeChecker.make(set([bool, str, int]))) is VariantChecker assert type(TypeChecker.make({ "foo": bool })) is DictionaryChecker # Check TypeChecker.make()'s handling of TypeChecker sub-classes and # instances thereof. assert isinstance(TypeChecker.make(BooleanChecker), BooleanChecker) boolean_checker = BooleanChecker() assert TypeChecker.make(boolean_checker) is boolean_checker def check(checker, *values): checker = TypeChecker.make(checker) results = [] for value in values: converted = checker(value, TypeCheckerContext(None, None, None)) results.append(value if converted is None else converted) return results def should_match(checker, *values, **kwargs): results = check(checker, *values) if "result" in kwargs: expected_result = kwargs["result"] for result in results: assert result == expected_result, \ "%r != %r" % (result, expected_result) def should_not_match(checker, *values, **expected): for value in values: try: check(checker, copy.deepcopy(value)) except (OperationError, OperationFailure) as error: error = json.loads(str(error)) for key, value in expected.items(): if isinstance(value, str): value = set([value]) assert error.get(key) in value, \ ("%s: %r not among %r" % (key, error.get(key), value)) else: assert False, "checker allowed value incorrectly: %r" % value # Check some simple things that should be accepted. should_match(bool, True, False) should_match(str, "", "foo") should_match(int, -2**31, -1, 0, 1, 2**31) should_match([bool], [], [True, False]) should_match([str], ["", "foo"]) should_match([int], [-2**31, -1, 0, 1, 2**31]) should_match(set(["foo", "bar"]), "foo", "bar") should_match(set([bool, str, int]), True, False, "", "foo", -2**31, -1, 0, 1, 2**31) # Check some equally simple things that shouldn't be accepted. should_not_match(bool, 10, "foo", error="invalid input: data is not a boolean") should_not_match(str, True, 10, error="invalid input: data is not a string") should_not_match(int, True, "foo", 0.5, error="invalid input: data is not an integer") should_not_match([bool], [True, 10], [False, "foo"], error="invalid input: data[1] is not a boolean") should_not_match([str], ["", True], ["foo", 10], error="invalid input: data[1] is not a string") should_not_match([int], [0, True], [10, "foo"], error="invalid input: data[1] is not an integer") should_not_match(set(["foo", "bar"]), "fie", error="invalid input: data is not valid") should_not_match(set(["foo", "bar"]), True, 10, error="invalid input: data is not a string") should_not_match(set([bool, str, int]), [True], ["foo"], [10], error="data is of invalid type") # Check some dictionary checkers. should_match({ "b": bool, "s": str, "i": int }, { "b": True, "s": "foo", "i": 10 }) should_match({ "req": bool, "opt": Optional(bool) }, { "req": True, "opt": False }, { "req": False }) should_not_match({ "b": bool }, { "b": "foo" }, { "b": 10 }, error="invalid input: data.b is not a boolean") should_not_match({ "b": bool }, { "i": 10 }, error="invalid input: data.b missing") should_not_match({ "b": bool }, { "b": True, "i": 10 }, error="invalid input: data.i was not used") should_not_match({ "b": Optional(bool) }, { "b": "foo" }, { "b": 10 }, error="invalid input: data.b is not a boolean") # Check suffixed variant checker in dictionary. id_or_name = VariantChecker({ "id": int, "name": str }) should_match({ "thing": id_or_name }, { "thing": 10 }, { "thing_id": 10 }, result={ "thing": 10 }) should_match({ "thing": id_or_name }, { "thing": "foo" }, { "thing_name": "foo" }, result={ "thing": "foo" }) should_not_match({ "thing": id_or_name }, { "thing_id": "foo" }, error="invalid input: data.thing_id is not an integer") should_not_match({ "thing": id_or_name }, { "thing_name": 10 }, error="invalid input: data.thing_name is not a string") should_not_match({ "thing": id_or_name }, { "thing_id": 10, "thing_name": "foo" }, error=("invalid input: data.thing_id was not used", "invalid input: data.thing_name was not used")) # Check some RestrictedString types. should_match(RestrictedString, "", "foo") should_match(RestrictedString(minlength=0), "", "foo") should_match(RestrictedString(minlength=3), "foo") should_match(RestrictedString(maxlength=0), "") should_match(RestrictedString(maxlength=3), "", "foo") should_match(RestrictedString(minlength=0, maxlength=3), "", "foo") should_match(RestrictedString(allowed=lambda c: False), "") should_match(RestrictedString(allowed=lambda c: True), "", "foo") should_match(RestrictedString(allowed=lambda c: c in "foo"), "", "foo") should_not_match(RestrictedString(), True, 10, error="invalid input: data is not a string") should_not_match( RestrictedString(minlength=1), "", code="paramtooshort:data", title="Invalid data", message="invalid input: data must be at least 1 characters long") should_not_match( RestrictedString(maxlength=2), "foo", code="paramtoolong:data", title="Invalid data", message="invalid input: data must be at most 2 characters long") should_not_match( RestrictedString(allowed=lambda c: False), "foo", code="paramcontainsillegalchar:data", title="Invalid data", message="invalid input: data may not contain the characters 'f', 'o'") should_not_match( RestrictedString(allowed=lambda c: False, ui_name="gazonk"), "foo", code="paramcontainsillegalchar:data", title="Invalid gazonk", message="invalid input: gazonk may not contain the characters 'f', 'o'") # Check SHA1. sha1 = "0123456789abcdefABCDEF0123456789abcdefAB" should_match(SHA1, *[sha1[:length] for length in range(4, 41)]) should_not_match(SHA1, True, 10, error="invalid input: data is not a string") for ch in range(0, 256): ch = chr(ch) if ch in sha1: continue should_not_match( SHA1, "012" + ch, message=htmlutils.htmlify( "invalid input: data may not contain the character %r" % ch)) should_not_match( SHA1, "012", message="invalid input: data must be at least 4 characters long") should_not_match( SHA1, "0" * 41, message="invalid input: data must be at most 40 characters long") # Check some RestrictedInteger types. should_match(RestrictedInteger, -2**31, -1, 0, 1, 2**31) should_match(RestrictedInteger(minvalue=-2**31), -2**31, -1, 0, 1, 2**31) should_match(RestrictedInteger(minvalue=0), 0, 1, 2**31) should_match(RestrictedInteger(maxvalue=0), -2**31, -1, 0) should_match(RestrictedInteger(maxvalue=2**31), -2**31, -1, 0, 1, 2**31) should_match(RestrictedInteger(minvalue=0, maxvalue=0), 0) should_not_match(RestrictedInteger(), True, "foo", error="invalid input: data is not an integer") should_not_match(RestrictedInteger(minvalue=0), -2**31, -1, code="valuetoolow:data", title="Invalid data parameter", message="invalid input: data must be 0 or higher") should_not_match(RestrictedInteger(maxvalue=0), 1, 2**31, code="valuetoohigh:data", title="Invalid data parameter", message="invalid input: data must be 0 or lower") should_not_match(RestrictedInteger(minvalue=1, ui_name="gazonk"), 0, code="valuetoolow:data", title="Invalid gazonk parameter", message="invalid input: gazonk must be 1 or higher") # Check NonNegativeInteger. should_match(NonNegativeInteger, 0, 1, 2**31) should_not_match(NonNegativeInteger, True, "foo", error="invalid input: data is not an integer") should_not_match(NonNegativeInteger, -2**31, -1, code="valuetoolow:data", title="Invalid data parameter", message="invalid input: data must be 0 or higher") # Check PositiveInteger. should_match(PositiveInteger, 1, 2**31) should_not_match(PositiveInteger, True, "foo", error="invalid input: data is not an integer") should_not_match(PositiveInteger, -2**31, -1, 0, code="valuetoolow:data", title="Invalid data parameter", message="invalid input: data must be 1 or higher") print "basic: ok" ================================================ FILE: src/operation/unittest.py ================================================ def independence(): # Simply check that operation can be imported. import operation print "independence: ok" ================================================ FILE: src/operation/usersession.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. from operation import Operation, OperationResult, OperationFailure, Request import dbutils import configuration import auth class ValidateLogin(Operation): def __init__(self): Operation.__init__(self, { "req": Request, "fields": { str: str }}, accept_anonymous_user=True) def process(self, db, user, req, fields): if not user.isAnonymous(): return OperationResult() try: auth.DATABASE.authenticate(db, fields) except auth.AuthenticationFailed as error: return OperationResult(message=error.message) except auth.WrongPassword: return OperationResult(message="Wrong password!") auth.createSessionId(db, req, db.user, db.authentication_labels) return OperationResult() def sanitize(self, value): sanitized = value.copy() for field in auth.DATABASE.getFields(): if field[0]: sanitized["fields"][field[1]] = "****" return sanitized class EndSession(Operation): def __init__(self): Operation.__init__(self, { "req": Request }) def process(self, db, user, req): if not auth.deleteSessionId(db, req, user): raise OperationFailure( code="notsignedout", title="Not signed out", message="You were not signed out.") if not configuration.base.ALLOW_ANONYMOUS_USER: return OperationResult(target_url="/") return OperationResult() ================================================ FILE: src/page/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import functools import request import htmlutils import page.utils import page.parameters class Page(object): def __init__(self, name, parameters, handler): self.name = name self.parameters = parameters self.handler = handler def __call__(self, req, db, user): parameters = {} for name, checker in self.parameters.items(): if isinstance(checker, page.parameters.Optional): default = None checker = checker.actual else: default = request.NoDefault is_list = isinstance(checker, page.parameters.ListOf) if is_list: checker = checker.actual if issubclass(checker, page.parameters.Stateful): checker = checker(req, db, user) try: value = req.getParameter(name, default, str if is_list else checker) except page.parameters.InvalidParameterValue as error: raise request.InvalidParameterValue(name, req.getParameter(name), error.expected) if value is not None: if is_list: values = [] for item in value.split(","): values.append(checker(item)) value = values parameters[name] = value return self.handler(**parameters).generate(self, req, db, user) class Handler(object): def __init__(self, review=None): self.review = review def setup(self, page, req, db, user): self.page = page self.request = req self.db = db self.user = user def generate(self, page, req, db, user): self.setup(page, req, db, user) self.document = htmlutils.Document(req) self.html = self.document.html() self.head = self.html.head() self.body = self.html.body() self._generateHeader() self.generateContent() self._generateFooter() return self.document def _generateHeader(self): page.utils.generateHeader(self.body, self.db, self.user, current_page=self.page.name, generate_right=self.getGenerateHeaderRight(), extra_links=self.getExtraLinks()) self.generateHeader() def generateHeader(self): pass def generateContent(self): self.body.div("message").h1("center").text("Not implemented!") def _generateFooter(self): self.generateFooter() page.utils.generateFooter(self.body, self.db, self.user, current_page=self.page.name) def generateFooter(self): pass def getGenerateHeaderRight(self): if self.review: import reviewing.utils return functools.partial(reviewing.utils.renderDraftItems, self.db, self.user, self.review) def getExtraLinks(self): if self.review: return [("r/%d" % self.review.id, "Back to Review")] else: return [] ================================================ FILE: src/page/addrepository.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import configuration import htmlutils import dbutils def renderNewRepository(req, db, user): if not user.hasRole(db, "repositories"): raise page.utils.DisplayMessage(title="Not allowed!", body="Only users with the 'repositories' role can add new repositories.") document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user) document.addExternalStylesheet("resource/newrepository.css") document.addExternalScript("resource/newrepository.js") target = body.div("main") basic = target.table("paleyellow") basic.col(width='20%') basic.col(width='0') basic.col(width='40%') basic.col(width='40%') h1 = basic.tr().td('h1', colspan=4).h1().text("New Repository") row = basic.tr("name") row.td("heading").text("Short name:") row.td("prefix").text() row.td("value").input(name="name") row.td("suffix").text() row = basic.tr("help") row.td(colspan=4).text("Repository short name.") row = basic.tr("path") row.td("heading").text("Path:") row.td("prefix").text("%s:%s/" % (configuration.base.HOSTNAME, configuration.paths.GIT_DIR)) row.td("value").input(name="path") row.td("suffix").text(".git") row = basic.tr("help") row.td(colspan=4).text("Path of repository on the Critic server.") row = basic.tr("remote") row.td("heading").text("Source repository:") row.td("prefix").text() row.td("value").input(name="remote") row.td("suffix").text("(optional)") row = basic.tr("help") row.td(colspan=4).text("Git URL of repository to mirror.") row = basic.tr("branch") row.td("heading").text("Source branch:") row.td("prefix").text() row.td("value").input(name="branch", value="master", disabled="disabled") row.td("suffix").text() row = basic.tr("help") row.td(colspan=4).text("This branch in the source repository is automatically mirrored in Critic's repository.") row = basic.tr("buttons") row.td(colspan=4).button("add").text("Add Repository") return document ================================================ FILE: src/page/basic.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. def generateHeader(target, generate_right=generateEmpty): row = target.table(width='100%', style='border-bottom: 3px solid black').tr() b = row.td(valign='bottom', align='left').b(style='font-family: sans-serif') b.b(style='font-size: 40px; color: #d32226; cursor: pointer', onclick="location.href='/';").text("Opera") b.b(style='font-size: 50px; color: #666666; cursor: pointer', onclick="location.href='/';").text("Critic") generate_right(row.td(valign='bottom', align='right', style='padding-bottom: 10px')) ================================================ FILE: src/page/branches.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import dbutils import gitutils import log.html import htmlutils import page.utils def renderBranches(req, db, user): offset = req.getParameter("offset", 0, filter=int) count = req.getParameter("count", 50, filter=int) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="branches") document.addExternalScript("resource/branches.js") document.addExternalStylesheet("resource/branches.css") cursor = db.cursor() repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) if not repository: repository = user.getDefaultRepository(db) if repository: title = "Branches in %s" % repository.name selected = repository.name else: title = "Branches" selected = None document.setTitle(title) table = body.div("main").table("paleyellow branches", align="center", cellspacing="0") row = table.tr("title") row.td("h1", colspan=2).h1().text(title) page.utils.generateRepositorySelect(db, user, row.td("repositories", colspan=2), selected=selected) if repository: document.addInternalScript(repository.getJS()) cursor.execute("""SELECT branches.id, branches.name, branches.base, branches.review, branches.commit_time, COUNT(reachable.branch) FROM (SELECT branches.id AS id, branches.name AS name, bases.name AS base, reviews.id AS review, commits.commit_time AS commit_time FROM branches JOIN commits ON (commits.id=branches.head) LEFT OUTER JOIN reviews ON (reviews.origin=branches.id) LEFT OUTER JOIN branches AS bases ON (branches.base=bases.id) WHERE branches.type='normal' AND branches.repository=%s ORDER BY commits.commit_time DESC LIMIT %s OFFSET %s) AS branches LEFT OUTER JOIN reachable ON (reachable.branch=branches.id) GROUP BY branches.id, branches.name, branches.base, branches.review, branches.commit_time ORDER BY branches.commit_time DESC""", (repository.id, count, offset)) branches_found = False for branch_id, branch_name, base_name, review_id, commit_time, count in cursor: if not branches_found: row = table.tr("headings") row.td("name").text("Name") row.td("base").text("Base") row.td("commits").text("Commits") row.td("when").text("When") branches_found = True row = table.tr("branch") def link_to_branch(target, repository, name): url = htmlutils.URL("/log", branch=name, repository=repository.id) target.a(href=url).text(name) td_name = row.td("name") link_to_branch(td_name, repository, branch_name) if review_id is not None: span = td_name.span("review").preformatted() span.a(href="r/%d" % review_id).text("r/%d" % review_id) elif base_name: url = htmlutils.URL("/checkbranch", repository=repository.id, commit=branch_name, upstream=base_name, fetch="no") span = td_name.span("check").preformatted().a(href=url).text("check") td_base = row.td("base") if base_name: link_to_branch(td_base, repository, base_name) row.td("commits").text(count) log.html.renderWhen(row.td("when"), commit_time.timetuple()) if not branches_found: row = table.tr("nothing") row.td("nothing", colspan=4).text("No branches") else: row = table.tr("nothing") row.td("nothing", colspan=4).text("No repository selected") return document ================================================ FILE: src/page/checkbranch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import gitutils import re import log.commitset import request def addNote(req, db, user): repository_id = req.getParameter("repository", filter=int) branch = req.getParameter("branch") upstream = req.getParameter("upstream") sha1 = req.getParameter("sha1") review_id = req.getParameter("review", None) text = req.read().strip() if review_id is not None: review = dbutils.Review.fromId(db, review_id) else: review = None cursor = db.cursor() cursor.execute("DELETE FROM checkbranchnotes WHERE repository=%s AND branch=%s AND upstream=%s AND sha1=%s", (repository_id, branch, upstream, sha1)) cursor.execute("INSERT INTO checkbranchnotes (repository, branch, upstream, sha1, uid, review, text) VALUES (%s, %s, %s, %s, %s, %s, %s)", (repository_id, branch, upstream, sha1, user.id, review_id, text or None)) db.commit() response = "ok" if review and review.repository.id == repository_id: repository = gitutils.Repository.fromId(db, repository_id) commit = gitutils.Commit.fromSHA1(db, repository, sha1) commitset = log.commitset.CommitSet(review.branch.getCommits(db)) upstreams = commitset.getFilteredTails(repository) if len(upstreams) == 1: upstream = upstreams.pop() if repository.mergebase([commit.sha1, upstream]) == upstream: response = "rebase" return response def deleteNote(req, db, user): repository_id = req.getParameter("repository", filter=int) branch = req.getParameter("branch") upstream = req.getParameter("upstream") sha1 = req.getParameter("sha1") cursor = db.cursor() cursor.execute("DELETE FROM checkbranchnotes WHERE repository=%s AND branch=%s AND upstream=%s AND sha1=%s", (repository_id, branch, upstream, sha1)) db.commit() return "ok" def renderCheckBranch(req, db, user): mode = "html" if req.path == "checkbranch" else "text" repository_arg = req.getParameter("repository", None if mode == "html" else request.NoDefault) cursor = db.cursor() header_right = [] if mode == "html": document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() def generateRight(target): header_right.append(target.div("buttons")) page.utils.generateHeader(body, db, user, generate_right=generateRight) document.addExternalStylesheet("resource/checkbranch.css") document.addExternalScript("resource/checkbranch.js") document.addInternalScript(user.getJS()) target = body.div("main") else: result = "" if repository_arg is not None: repository = gitutils.Repository.fromParameter(db, repository_arg) branch_arg = commit = req.getParameter("commit") fetch = req.getParameter("fetch", "no") == "yes" upstream_arg = req.getParameter("upstream", "master") if mode == "html": header_right[0].a("button", href="tutorial?item=checkbranch").text("Help") header_right[0].a("button", href="checkbranchtext?repository=%s&commit=%s&upstream=%s" % (repository_arg, branch_arg, upstream_arg)).text("Get Textual Log") header_right[0].span("buttonscope buttonscope-global") target.addInternalScript(repository.getJS()) target.addInternalScript("var branch = %r, upstream = %r;" % (branch_arg, upstream_arg)) upstream = repository.revparse(upstream_arg) if fetch: if commit.startswith("r/"): raise page.utils.DisplayMessage("Won't fetch review branch from remote!") repository.updateBranchFromRemote(db, repository.getDefaultRemote(db), commit) try: commit = repository.revparse(commit) except: raise page.utils.DisplayMessage("Unable to interpret '%s' as a commit reference." % commit) try: gitutils.Commit.fromSHA1(db, repository, commit) except: raise page.utils.DisplayMessage("'%s' doesn't exist in the repository." % commit) if mode == "html": document.setTitle("Branch review status: %s" % branch_arg) commits = repository.revlist([commit], [upstream], "--topo-order") if not commits: try: merge_sha1 = repository.revlist([upstream], [commit], "--topo-order", "--ancestry-path")[-1] except: raise page.utils.DisplayMessage("No merged or unmerged commits found.") merge = gitutils.Commit.fromSHA1(db, repository, merge_sha1) if len(merge.parents) == 1: candidate_merges = repository.revlist([commit], [], "--topo-order", "--max-count=256") for merge_sha1 in candidate_merges: merge = gitutils.Commit.fromSHA1(db, repository, merge_sha1) if len(merge.parents) > 1: use_upstream = merge.parents[1] break else: raise page.utils.DisplayMessage("Merge into upstream was a fast-forward; can't figure out what was merged in.") else: assert commit in merge.parents use_upstream = None for parent in merge.parents: if parent != commit: use_upstream = parent break commits = repository.revlist([commit], [use_upstream], "--topo-order") title = "Merged Commits (%d)" % len(commits) display_upstream = gitutils.Commit.fromSHA1(db, repository, use_upstream).describe(db) else: title = "Unmerged Commits (%d)" % len(commits) display_upstream = upstream_arg commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in commits] if mode == "html": table = target.table("branchstatus paleyellow", align="center", cellspacing=0) table.col(width="10%") table.col(width="8%") table.col(width="77%") table.col(width="5%") thead = table.thead() h1_cell = thead.tr().td('h1', colspan=4) h1_cell.h1().text(title) p = h1_cell.p() p.text("Commits returned by the command: ") p.span("command").text("git rev-list --topo-order %s ^%s" % (branch_arg, display_upstream)) row = thead.tr("headings") row.th("sha1").text("SHA-1") row.th("user").text("Committer") row.th("summary").text("Summary") row.th("Review").text("Review") cursor.execute("""SELECT sha1, uid, review, text FROM checkbranchnotes WHERE repository=%s AND branch=%s AND upstream=%s""", (repository.id, branch_arg, upstream_arg)) notes = {} reds = False for sha1, user_id, review_id, text in cursor: notes[sha1] = dbutils.User.fromId(db, user_id), review_id, text if commits: merged = set(commits) handled = set() current_tbody = None last_tbody = None def nameFromEmail(email): offset = email.find("@") if offset != -1: return email[:offset] else: return email review = None reviewed_commits = [] text_items = {} text_order = [] for commit in commits: if commit not in handled: cursor.execute("""SELECT reviews.id FROM reviews JOIN branches ON (branches.id=reviews.branch) JOIN commits ON (commits.id=branches.head) WHERE commits.sha1=%s AND reviews.state!='dropped'""", (commit.sha1,)) if commit not in reviewed_commits: review = None reviewed = set() best = 0 for (review_id,) in cursor: candidate_review = dbutils.Review.fromId(db, review_id) candidate_reviewed = filter(lambda commit: commit in merged, candidate_review.branch.getCommits(db)) if len(candidate_reviewed) > best: review = candidate_review reviewed = candidate_reviewed best = len(reviewed) reviewed_commits = filter(lambda commit: commit in reviewed, commits) if mode == "html": if review: current_tbody = None review_tbody = table.tbody("reviewed" if review.state == 'closed' or review.accepted(db) else "pending") first = True review_tbody.tr("empty").td("empty", colspan=4) for reviewed_commit in reviewed_commits: handled.add(reviewed_commit) row = review_tbody.tr("commit") row.td("sha1", title=commit.sha1).div().text(reviewed_commit.sha1[:8]) row.td("user").text(nameFromEmail(reviewed_commit.committer.email)) row.td("summary").a(href="%s?review=%d" % (reviewed_commit.sha1, review.id)).text(reviewed_commit.niceSummary()) if first: row.td("review", rowspan=len(reviewed)).a(href="r/%d" % review.id).text("r/%d" % review.id) first = False last_tbody = review_tbody elif commit.sha1 in notes: note_user, review_id, text = notes[commit.sha1] try: review = dbutils.Review.fromId(db, review_id) if review_id else None except: review = None current_tbody = None note_tbody = table.tbody("note" if (not review_id or (review and (review.state == 'closed' or review.accepted(db)))) else "pending") note_tbody.tr("empty").td("empty", colspan=4) row = note_tbody.tr("commit", id=commit.sha1) row.td("sha1", title=commit.sha1).div().text(commit.sha1[:8]) row.td("user").text(nameFromEmail(commit.committer.email)) row.td("summary").a(href="%s?repository=%d" % (commit.sha1, repository.id)).text(commit.niceSummary()) cell = row.td("review") if review_id is None: cell.text() else: cell.a(href="r/%d" % review_id).text("r/%d" % review_id) row = note_tbody.tr("note") row.td("sha1").text() cell = row.td("note", colspan=2) cell.text("Set by ") cell.span("user").text(note_user.fullname) if text is not None: cell.text(": ") cell.span("text").text(text) row.td("edit").a("edit", href="javascript:editCommit(%r, %d, true%s);" % (commit.sha1, commit.getId(db), (", %d" % review_id) if review_id is not None else "")).text("[edit]") last_tbody = note_tbody else: handled.add(commit) if not current_tbody: current_tbody = table.tbody("unknown") current_tbody.tr("empty").td("empty", colspan=4) last_tbody = current_tbody row = current_tbody.tr("commit%s" % (" own" if commit.author.email == user.email else ""), id=commit.sha1) row.td("sha1", title=commit.sha1).div().text(commit.sha1[:8]) row.td("user").text(nameFromEmail(commit.committer.email)) row.td("summary").a(href="%s/%s" % (repository.name, commit.sha1)).text(commit.niceSummary()) row.td("edit").a("edit", href="javascript:editCommit(%r, %d, false);" % (commit.sha1, commit.getId(db))).text("[edit]") reds = True else: match = re.search("[A-Z][A-Z0-9]*-[0-9]+", commit.summary()) if match: title = match.group(0) else: title = commit.summary(maxlen=50) if title.endswith(".") and not title.endswith("..."): title = title[:-1] if commit.sha1 in notes: note_user, review_id, text = notes[commit.sha1] if review_id: review = dbutils.Review.fromId(db, review_id) else: text = None if review: item = review.getURL(db) if review.state != "closed" and not review.accepted(db): item += " (NOT ACCEPTED!)" if text: item += " (%s: %s)" % (note_user.fullname, text) elif text: item = "%s: %s" % (note_user.fullname, text) else: item = "REVIEW STATUS UNKNOWN!" if title in text_items: if item not in text_items[title]: text_items[title].append(item) else: text_items[title] = [item] text_order.append(title) if mode == "html": last_tbody.tr("empty").td("empty", colspan=4) else: for title in reversed(text_order): result += "%s: %s\n" % (title, ", ".join(text_items[title])) if mode == "html": if reds: h1_cell.h2().text("There should be no red!") legend = target.div("legend") legend.text("Color legend: ") legend.span("red").text("Status unknown") legend.text(" ") legend.span("yellow").text("Status set manually") legend.text(" ") legend.span("green").text("Verified by Critic") return document else: return page.utils.ResponseBody(result, content_type="text/plain") else: header_right[0].a("button", href="tutorial?item=checkbranch").text("Help") table = page.utils.PaleYellowTable(target, "Check branch review status") def renderRepository(target): page.utils.generateRepositorySelect(db, user, target, name="repository") def renderBranchName(target): target.input(name="commit") def renderFetch(target): target.input(name="fetch", type="checkbox", value="yes") def renderUpstream(target): target.input(name="upstream", value="master") table.addItem("Repository", renderRepository, description="Repository.") table.addItem("Branch name", renderBranchName, description="Branch name, or other reference to a commit.") table.addItem("Fetch branch from remote", renderFetch, description=("Fetch named branch from selected repository's " "primary remote (from whence its 'master' branch " "is auto-updated.)")) table.addItem("Upstream", renderUpstream, description="Name of upstream branch.") def renderCheck(target): target.button("check").text("Check branch") table.addCentered(renderCheck) return document ================================================ FILE: src/page/config.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import fnmatch import configuration import dbutils import gitutils import htmlutils import textutils import page.utils def renderConfig(req, db, user): highlight = req.getParameter("highlight", None) repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) filter_id = req.getParameter("filter", None, int) defaults = req.getParameter("defaults", "no") == "yes" if filter_id is not None: # There can't be system-wide defaults for one of a single user's # filters. defaults = False cursor = db.cursor() if filter_id is not None: cursor.execute("""SELECT filters.path, repositories.name FROM filters JOIN repositories ON (repositories.id=filters.repository) WHERE filters.id=%s""", (filter_id,)) row = cursor.fetchone() if not row: raise page.utils.InvalidParameterValue( name="filter", value=str(filter_id), expected="valid filter id") title = "Filter preferences: %s in %s" % row elif repository is not None: title = "Repository preferences: %s" % repository.name else: title = "User preferences" document = htmlutils.Document(req) document.setTitle(title) html = document.html() head = html.head() body = html.body() if user.isAnonymous(): disabled = "disabled" else: disabled = None def generate_right(target): if defaults: url = "/config" if repository is not None: url += "?repository=%d" % repository.id target.a("button", href=url).text("Edit Own") elif user.hasRole(db, "administrator"): url = "/config?defaults=yes" if repository is not None: url += "&repository=%d" % repository.id what = "Repository Defaults" else: what = "System Defaults" target.a("button", href=url).text("Edit " + what) injected = page.utils.generateHeader(body, db, user, current_page="config", generate_right=generate_right) document.addExternalStylesheet("resource/config.css") document.addExternalScript("resource/config.js") document.addInternalScript(user.getJS()) document.addInternalScript("var repository_id = %s, filter_id = %s, defaults = %s;" % (htmlutils.jsify(repository.id if repository else None), htmlutils.jsify(filter_id), htmlutils.jsify(defaults))) target = body.div("main") table = target.table('preferences paleyellow', align='center', cellspacing=0) h1 = table.tr().td('h1', colspan=3).h1() h1.text(title) if filter_id is None: page.utils.generateRepositorySelect( db, user, h1.span("right"), allow_selecting_none=True, selected=repository.name if repository else False) if filter_id is not None: conditional = "per_filter" elif repository is not None: conditional = "per_repository" elif defaults: conditional = "per_system" else: conditional = "per_user" cursor = db.cursor() cursor.execute("""SELECT item, type, description, per_repository, per_filter FROM preferences WHERE %(conditional)s""" % { "conditional": conditional }) preferences = dict((item, [preference_type, description, None, None, per_repository, per_filter]) for item, preference_type, description, per_repository, per_filter in cursor) def set_values(rows, is_overrides): index = 3 if is_overrides else 2 for item, integer, string in rows: if preferences[item][0] == "boolean": preferences[item][index] = bool(integer) elif preferences[item][0] == "integer": preferences[item][index] = integer else: preferences[item][index] = string cursor.execute("""SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid IS NULL AND repository IS NULL""", (preferences.keys(),)) set_values(cursor, is_overrides=False) if repository is not None: cursor.execute("""SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid IS NULL AND repository=%s""", (preferences.keys(), repository.id)) # These are overrides if we're editing the defaults for a specific # repository. set_values(cursor, is_overrides=defaults) if not defaults: cursor.execute("""SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND repository IS NULL AND filter IS NULL""", (preferences.keys(), user.id)) if filter_id is not None or repository is not None: # We're looking at per-filter or per-repository settings, so the # user's global settings are defaults, not the overrides. If a # per-filter or per-repository override is deleted, the user's # global setting kicks in instead. set_values(cursor, is_overrides=False) if filter_id is not None: cursor.execute("""SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND filter=%s""", (preferences.keys(), user.id, filter_id)) else: cursor.execute("""SELECT item, integer, string FROM userpreferences WHERE item=ANY (%s) AND uid=%s AND repository=%s""", (preferences.keys(), user.id, repository.id)) # Set the overrides. This is either the user's global settings, if # we're not looking at per-filter or per-repository settings, or the # user's per-filter or per-repository settings if we are. set_values(cursor, is_overrides=True) elif repository is None: # When editing global defaults, use the values from preferences.json # used when initially installing Critic as the default values. defaults_path = os.path.join(configuration.paths.INSTALL_DIR, "data/preferences.json") with open(defaults_path) as defaults_file: factory_defaults = textutils.json_decode(defaults_file.read()) for item, data in preferences.items(): data[3] = data[2] if item in factory_defaults: data[2] = factory_defaults[item]["default"] if data[2] == data[3]: data[3] = None if req.getParameter("recalculate", "no") == "yes": for item, data in preferences.items(): if data[2] == data[3]: user.setPreference(db, item, None, repository=repository, filter_id=filter_id) data[3] = None db.commit() debug_enabled = user.getPreference(db, "debug.enabled") for item, (preference_type, description, default_value, current_value, per_repository, per_filter) in sorted(preferences.items()): if item.startswith("debug.") and item != "debug.enabled" and not debug_enabled: continue line_class_name = "line" help_class_name = "help" if highlight is not None and not fnmatch.fnmatch(item, highlight): continue if current_value is None: current_value = default_value else: line_class_name += " customized" row = table.tr(line_class_name) heading = row.td("heading") heading.text("%s:" % item) value = row.td("value", colspan=2) value.preformatted() options = None optgroup = None def addOption(value, name, selected=lambda value: value==current_value, **attributes): (optgroup or options).option( value=value, selected="selected" if selected(value) else None, **attributes).text(name) if preference_type == "boolean": value.input( "setting", type="checkbox", name=item, checked="checked" if current_value else None, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif preference_type == "integer": value.input( "setting", type="number", min=0, max=2**31 - 1, name=item, value=current_value, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif item == "defaultRepository": page.utils.generateRepositorySelect( db, user, value, allow_selecting_none=True, placeholder_text="No default repository", selected=current_value, name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) elif item == "defaultPage": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) addOption("home", "Home") addOption("dashboard", "Dashboard") addOption("branches", "Branches") addOption("config", "Config") addOption("tutorial", "Tutorial") elif item == "email.urlType": cursor2 = db.cursor() cursor2.execute("""SELECT key, description, authenticated_scheme, hostname FROM systemidentities ORDER BY description ASC""") identities = cursor2.fetchall() selected = set(current_value.split(",")) options = value.select( "setting", name=item, size=len(identities), multiple="multiple", disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) for key, label, authenticated_scheme, hostname in identities: prefix = "%s://%s/" % (authenticated_scheme, hostname) addOption(key, label, selected=lambda value: value in selected, class_="url-type flex", data_text=label, data_html=("<span class=label>%s</span>" "<span class=prefix>%s</span>" % (htmlutils.htmlify(label), htmlutils.htmlify(prefix)))) elif item == "email.updatedReview.quotedComments": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) addOption("all", "All") addOption("first", "First") addOption("last", "Last") addOption("firstlast", "First & Last") elif item == "timezone": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) for group, zones in dbutils.timezones.sortedTimezones(db): optgroup = options.optgroup(label=group) for name, abbrev, utc_offset in zones: seconds = utc_offset.total_seconds() offset = "%s%02d:%02d" % ("-" if seconds < 0 else "+", abs(seconds) / 3600, (abs(seconds) % 3600) / 60) addOption("%s/%s" % (group, name), "%s (%s / UTC%s)" % (name, abbrev, offset)) elif item == "repository.urlType": options = value.select( "setting", name=item, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) long_path = os.path.join(configuration.paths.GIT_DIR, "<path>.git") if "git" in configuration.base.REPOSITORY_URL_TYPES: addOption("git", "git://%s/<path>.git" % configuration.base.HOSTNAME) if "http" in configuration.base.REPOSITORY_URL_TYPES: scheme = configuration.base.ACCESS_SCHEME if scheme == "both": if user.isAnonymous(): scheme = "http" else: scheme = "https" addOption("http", "%s://%s/<path>.git" % (scheme, configuration.base.HOSTNAME)) if "ssh" in configuration.base.REPOSITORY_URL_TYPES: addOption("ssh", "ssh://%s%s" % (configuration.base.HOSTNAME, long_path)) if "host" in configuration.base.REPOSITORY_URL_TYPES: addOption("host", "%s:%s" % (configuration.base.HOSTNAME, long_path)) else: if item.startswith("email.subjectLine."): placeholder = "Email type disabled" else: placeholder = None value.input( "setting", type="text", size=80, name=item, placeholder=placeholder, value=current_value, disabled=disabled, critic_current=htmlutils.jsify(current_value), critic_default=htmlutils.jsify(default_value)) also_configurable_per = [] if per_repository and repository is None: also_configurable_per.append("repository") if per_filter and filter_id is None: also_configurable_per.append("filter") if also_configurable_per: value.span("also-configurable-per").text( "Also configurable per: %s" % ", ".join(also_configurable_per)) reset = value.span("reset") reset.a(href="javascript:saveSettings(%s);" % htmlutils.jsify(item)).text("[reset to default]") cell = table.tr(help_class_name).td("help", colspan=3) magic_description_links = { "format string for subject line": "/tutorial?item=reconfigure#subject_line_formats", "phony recipients": "/tutorial?item=reconfigure#review_association_recipients" } for link_text, link_href in magic_description_links.items(): prefix, link_text, suffix = description.partition(link_text) if link_text: cell.text(prefix) cell.a(href=link_href).text(link_text) cell.text(suffix) break else: cell.text(description) if injected and injected.has_key("preferences") \ and not defaults \ and repository is None \ and filter_id is None: for extension_name, author, preferences in injected["preferences"]: if highlight is not None: prefix = "%s/%s" % (author.name, extension_name) preferences = [ preference for preference in preferences if fnmatch.fnmatch("%s/%s" % (prefix, preference["name"]), highlight)] if not preferences: continue h2 = table.tr("extension").td("extension", colspan=3).h2() h2.span("name").text(extension_name) h2.text(" by ") h2.span("author").text(author.fullname) for preference in preferences: preference_url = preference["url"] preference_name = preference["name"] preference_type = preference["type"] preference_value = preference["value"] preference_default = preference["default"] preference_description = preference["description"] line_class_name = "line" help_class_name = "help" if preference_value != preference_default: line_class_name += " customized" row = table.tr(line_class_name) heading = row.td("heading") heading.text("%s:" % preference_name) value = row.td("value", colspan=2) value.preformatted() if preference_type == "boolean": value.input( "setting", type="checkbox", name=preference_name, disabled=disabled, checked="checked" if preference_value else None, critic_url=preference_url, critic_default=htmlutils.jsify(bool(preference_value)), critic_extension=extension_name) elif preference_type == "integer": value.input( "setting", type="number", min=0, name=preference_name, value=preference_value, disabled=disabled, critic_url=preference_url, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) elif preference_type == "string": value.input( "setting", type="text", name=preference_name, value=preference_value, disabled=disabled, critic_url=preference_url, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) else: select = value.select( "setting", name=preference_name, disabled=disabled, critic_url=preference_url, critic_value=preference_value, critic_default=htmlutils.jsify(preference_default), critic_extension=extension_name) for choice in preference_type: select.option(value=choice["value"], selected="selected" if preference_value == choice["value"] else None).text(choice["title"]) cell = table.tr(help_class_name).td("help", colspan=3) cell.text(preference_description) critic_installed_sha1 = dbutils.getInstalledSHA1(db) div = body.div("installed_sha1") div.text("Critic version: ") div.a(href="https://critic-review.org/critic/%s" % critic_installed_sha1).text(critic_installed_sha1) return document ================================================ FILE: src/page/confirmmerge.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import gitutils import page.utils import log.html import log.commitset def renderConfirmMerge(req, db, user): confirmation_id = req.getParameter("id", filter=int) tail_sha1 = req.getParameter("tail", None) do_confirm = req.getParameter("confirm", "no") == "yes" do_cancel = req.getParameter("cancel", "no") == "yes" cursor = db.cursor() cursor.execute("SELECT review, uid, merge, confirmed, tail FROM reviewmergeconfirmations WHERE id=%s", (confirmation_id,)) row = cursor.fetchone() if not row: raise page.utils.DisplayMessage("No pending merge with that id.") review_id, user_id, merge_id, confirmed, tail_id = row review = dbutils.Review.fromId(db, review_id) merge = gitutils.Commit.fromId(db, review.repository, merge_id) if confirmed and tail_id is not None: tail_sha1 = gitutils.Commit.fromId(db, review.repository, tail_id).sha1 cursor.execute("SELECT merged FROM reviewmergecontributions WHERE id=%s", (confirmation_id,)) merged = [gitutils.Commit.fromId(db, review.repository, merged_id) for (merged_id,) in cursor] merged_set = log.commitset.CommitSet(merged) if tail_sha1 is not None: tail = gitutils.Commit.fromSHA1(db, review.repository, tail_sha1) tail_id = tail.getId(db) cut = [gitutils.Commit.fromSHA1(db, review.repository, sha1) for sha1 in tail.parents if sha1 in merged_set] merged_set = merged_set.without(cut) merged = list(merged_set) else: tail_id = None if do_confirm: cursor.execute("UPDATE reviewmergeconfirmations SET confirmed=TRUE, tail=%s WHERE id=%s", (tail_id, confirmation_id)) db.commit() elif do_cancel: cursor.execute("DELETE FROM reviewmergeconfirmations WHERE id=%s", (confirmation_id,)) db.commit() document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() def renderButtons(target): if not do_confirm and not do_cancel: target.button("confirmAll").text("Confirm (merge + contributed)") target.button("confirmNone").text("Confirm (merge only)") target.button("cancel").text("Cancel") page.utils.generateHeader(body, db, user, renderButtons, extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalStylesheet("resource/confirmmerge.css") document.addExternalScript("resource/log.js") document.addExternalScript("resource/confirmmerge.js") document.addInternalScript(user.getJS()) document.addInternalScript(review.getJS()) document.addInternalScript("var confirmation_id = %d;" % confirmation_id) document.addInternalScript("var merge_sha1 = %s;" % htmlutils.jsify(merge.sha1)) if tail_sha1 is not None: document.addInternalScript("var tail_sha1 = %s;" % htmlutils.jsify(tail_sha1)) if not do_confirm and not do_cancel: heads = merged_set.getHeads() if heads: document.addInternalScript("var automaticAnchorCommit = %s;" % htmlutils.jsify(heads.pop().sha1)) else: document.addInternalScript("var automaticAnchorCommit = null;") if do_confirm: document.addInternalScript("var confirmed = true;") else: document.addInternalScript("var confirmed = false;") target = body.div("main") basic = target.table("paleyellow") basic.col(width='15%') basic.col(width='55%') basic.col(width='30%') h1 = basic.tr().td('h1', colspan=3).h1() if do_confirm: h1.text("CONFIRMED MERGE") elif do_cancel: h1.text("CANCELLED MERGE") else: h1.text("Confirm Merge") row = basic.tr("sha1") row.td("heading").text("SHA-1:") row.td("value").preformatted().text(merge.sha1) row.td().text() row = basic.tr("message") row.td("heading").text("Message:") row.td("value").preformatted().text(merge.message) row.td().text() if not do_confirm and not do_cancel: row = basic.tr("instructions") row.td("heading").text("Instructions:") row.td("value").preformatted().text("""\ Use the top right buttons to confirm the merge with or without the contributed commits that it brings. By clicking 'Confirm (merge + contributed)' you will bring the merge commit plus all commits that it contributes into the this code review. By clicking 'Confirm (merge only)' you will bring only the merge commit itself into the code review and not the contributed commits." By clicking 'Cancel' you will abort the merge. The code review will not be modified at all from its current state.""") row.td().text() if merged: columns = [(10, log.html.WhenColumn()), (60, log.html.SummaryColumn()), (16, log.html.AuthorColumn())] log.html.render(db, target, "Contributed Commits", commits=merged, columns=columns) return document ================================================ FILE: src/page/createreview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import itertools import re import auth import dbutils import gitutils import reviewing.utils import log.html import htmlutils import page.utils import diff import configuration import linkify import changeset.utils as changeset_utils from textutils import json_decode, json_encode def generateReviewersAndWatchersTable(db, repository, target, all_reviewers, all_watchers, applyfilters=True, applyparentfilters=False): cursor = db.cursor() teams = reviewing.utils.collectReviewTeams(all_reviewers) reviewers = set() watchers = set() for file_id, file_reviewers in all_reviewers.items(): for user_id in file_reviewers: reviewers.add(user_id) for file_id, file_watchers in all_watchers.items(): for user_id in file_watchers: if user_id not in reviewers: watchers.add(user_id) table = target.table("filters paleyellow", align="center") table.tr().td("h1", colspan=3).h1().text("Reviewers and Watchers") row = table.tr("applyfilters") row.td("value").input("applyfilters", type="checkbox", checked=("checked" if applyfilters else None)) row.td("legend", colspan=2).text("Apply global filters. Only disable this in inofficial reviews!") table.tr("watchers").td("spacer", colspan=3) if repository.parent and applyfilters: parent = repository.parent parents = [] while parent: parents.append(parent.name) parent = parent.parent if len(parents) == 1: parents = "repository (%s)" % parents[0] else: parents = "repositories (%s)" % ", ".join(parents) row = table.tr("applyfilters") row.td("value").input("applyparentfilters", type="checkbox", checked=("checked" if applyparentfilters else None)) row.td("legend", colspan=2).text("Apply global filters from upstream %s." % parents) table.tr("watchers").td("spacer", colspan=3) def formatFiles(files): return diff.File.eliminateCommonPrefixes(sorted([dbutils.describe_file(db, file_id) for file_id in files])) for team in teams: if team is not None: row = table.tr("reviewers") cell = row.td("reviewers") users = sorted([dbutils.User.fromId(db, user_id).fullname for user_id in team]) for user in users: cell.text(user).br() row.td("willreview").innerHTML("will review") cell = row.td("files") for file in formatFiles(teams[team]): cell.span("file").innerHTML(file).br() if None in teams: row = table.tr("reviewers") row.td("no-one", colspan=2).text("No reviewers found for changes in") cell = row.td("files") for file in formatFiles(teams[None]): cell.span("file").innerHTML(file).br() if watchers: table.tr("watchers").td("spacer", colspan=3) row = table.tr("watchers") row.td("heading", colspan=2).text("Watchers") cell = row.td("watchers") for user_id in watchers: cell.text(dbutils.User.fromId(db, user_id).fullname).br() table.tr("buttons").td("spacer", colspan=3) buttons = table.tr("buttons").td("buttons", colspan=3) buttons.button(onclick="addReviewer();").text("Add Reviewer") buttons.button(onclick="addWatcher();").text("Add Watcher") def renderSelectSource(req, db, user): cursor = db.cursor() document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="createreview") document.addExternalStylesheet("resource/createreview.css") document.addExternalScript("resource/createreview.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(user.getJS(db)) document.setTitle("Create Review") target = body.div("main") table = page.utils.PaleYellowTable(target, "Create Review") table.titleRight.innerHTML("Step 1") default_repository = user.getDefaultRepository(db) default_remotes = {} default_branches = {} def renderLocalRepository(target): page.utils.generateRepositorySelect(db, user, target, access_type="modify") cursor.execute("""SELECT repositories.id, repositories.name, repositories.path FROM repositories ORDER BY repositories.id""") for repository_id, name, path in cursor.fetchall(): def findRemote(local_name): cursor.execute("""SELECT remote FROM trackedbranches WHERE repository=%s AND local_name=%s""", (repository_id, local_name)) row = cursor.fetchone() if row: return row[0] try: repository = gitutils.Repository.fromId(db, repository_id) except auth.AccessDenied: continue remote = branch_name = None for branch in repository.getSignificantBranches(db): remote = findRemote(branch.name) if remote: branch_name = branch.name break if not remote: remote = findRemote("*") default_remotes[name] = remote default_branches[name] = branch_name document.addInternalScript("var default_remotes = %s;" % json_encode(default_remotes)) document.addInternalScript("var default_branches = %s;" % json_encode(default_branches)) def renderRemoteRepository(target): value = default_remotes.get(default_repository.name) if default_repository else None target.input("remote", value=value) def renderWorkBranch(target): target.text("refs/heads/") target.input("workbranch") def renderUpstreamCommit(target): default_branch = default_branches.get(default_repository.name) if default_repository else None target.input("upstreamcommit", value=("refs/heads/%s" % default_branch) if default_branch else "") table.addItem("Local Repository", renderLocalRepository, "Critic repository to create review in.") table.addItem("Remote Repository", renderRemoteRepository, "Remote repository to fetch commits from.") table.addItem("Work Branch", renderWorkBranch, "Work branch (in remote repository) containing commits to create review of.") table.addItem("Upstream Commit", renderUpstreamCommit, "Upstream commit from which the work branch was branched.") def renderButtons(target): target.button("fetchbranch").text("Fetch Branch") table.addCentered(renderButtons) return document def renderCreateReview(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin(req) repository = req.getParameter("repository", filter=gitutils.Repository.FromParameter(db), default=None) applyparentfilters = req.getParameter("applyparentfilters", "yes" if user.getPreference(db, 'review.applyUpstreamFilters') else "no") == "yes" cursor = db.cursor() if req.method == "POST": data = json_decode(req.read()) summary = data.get("summary") description = data.get("description") review_branch_name = data.get("review_branch_name") commit_ids = data.get("commit_ids") commit_sha1s = data.get("commit_sha1s") else: summary = req.getParameter("summary", None) description = req.getParameter("description", None) review_branch_name = req.getParameter("reviewbranchname", None) commit_ids = None commit_sha1s = None commits_arg = req.getParameter("commits", None) remote = req.getParameter("remote", None) upstream = req.getParameter("upstream", "master") branch_name = req.getParameter("branch", None) if commits_arg: try: commit_ids = map(int, commits_arg.split(",")) except: commit_sha1s = [repository.revparse(ref) for ref in commits_arg.split(",")] elif branch_name: cursor.execute("""SELECT commit FROM reachable JOIN branches ON (branch=id) WHERE repository=%s AND name=%s""", (repository.id, branch_name)) commit_ids = [commit_id for (commit_id,) in cursor] if len(commit_ids) > configuration.limits.MAXIMUM_REVIEW_COMMITS: raise page.utils.DisplayMessage( "Too many commits!", (("<p>The branch <code>%s</code> contains %d commits. Reviews can" "be created from branches that contain at most %d commits.</p>" "<p>This limit can be adjusted by modifying the system setting" "<code>configuration.limits.MAXIMUM_REVIEW_COMMITS</code>.</p>") % (htmlutils.htmlify(branch_name), len(commit_ids), configuration.limits.MAXIMUM_REVIEW_COMMITS)), html=True) else: return renderSelectSource(req, db, user) req.content_type = "text/html; charset=utf-8" if commit_ids: commits = [gitutils.Commit.fromId(db, repository, commit_id) for commit_id in commit_ids] elif commit_sha1s: commits = [gitutils.Commit.fromSHA1(db, repository, commit_sha1) for commit_sha1 in commit_sha1s] else: commits = [] if not commit_ids: commit_ids = [commit.getId(db) for commit in commits] if not commit_sha1s: commit_sha1s = [commit.sha1 for commit in commits] if summary is None: if len(commits) == 1: summary = commits[0].summary() else: summary = "" if review_branch_name: invalid_branch_name = "false" default_branch_name = review_branch_name else: invalid_branch_name = htmlutils.jsify(user.name + "/") default_branch_name = user.name + "/" match = re.search("(?:^|[Ff]ix(?:e[ds])?(?: +for)?(?: +bug)? +)([A-Z][A-Z0-9]+-[0-9]+)", summary) if match: invalid_branch_name = "false" default_branch_name = htmlutils.htmlify(match.group(1)) changesets = [] changeset_utils.createChangesets(db, repository, commits) for commit in commits: changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False)) changeset_ids = [changeset.id for changeset in changesets] all_reviewers, all_watchers = reviewing.utils.getReviewersAndWatchers( db, repository, changesets=changesets, applyparentfilters=applyparentfilters) document = htmlutils.Document(req) html = document.html() head = html.head() document.addInternalScript(user.getJS(db)) if branch_name: document.addInternalScript("var fromBranch = %s;" % htmlutils.jsify(branch_name)) if remote: document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(remote), htmlutils.jsify(branch_name))) head.title().text("Create Review") body = html.body(onload="document.getElementById('branch_name').focus()") page.utils.generateHeader(body, db, user, lambda target: target.button(onclick="submitReview();").text("Submit Review")) document.addExternalStylesheet("resource/createreview.css") document.addExternalScript("resource/createreview.js") document.addExternalScript("resource/reviewfilters.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(""" var invalid_branch_name = %s; var review_data = { commit_ids: %r, commit_sha1s: %r, changeset_ids: %r };""" % (invalid_branch_name, commit_ids, commit_sha1s, changeset_ids)) document.addInternalScript(repository.getJS()) main = body.div("main") table = main.table("basic paleyellow", align="center") table.tr().td("h1", colspan=3).h1().text("Create Review") row = table.tr("line") row.td("heading").text("Branch Name:") row.td("value").text("r/").input("value", id="branch_name", value=default_branch_name) row.td("status") row = table.tr() if not remote: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below. Reviewers can fetch it from there, and additional commits can be added to the review later by pushing them to this branch in the review repository.""") else: row.td("help", colspan=3).div().text("""\ This is the main identifier of the review. It will be created in the review repository to contain the commits below, and reviewers can fetch it from there.""") if remote: row = table.tr("line") row.td("heading").text("Tracked Branch:") value = row.td("value") value.code("branch inset").text(branch_name, linkify=linkify.Context(remote=remote)) value.text(" in ") value.code("remote inset").text(remote, linkify=linkify.Context()) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ Rather than pushing directly to the review branch in Critic's repository to add commits to the review, you will be pushing to this branch (in a separate repository,) from which Critic will fetch commits and add them to the review automatically.""") row = table.tr("line") row.td("heading").text("Summary:") row.td("value").input("value", id="summary", value=summary) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The summary should be a short summary of the changes in the review. It will appear in the subject of all emails sent about the review. """) row = table.tr("line description") row.td("heading").text("Description:") textarea = row.td("value").textarea(id="description", rows=12) textarea.preformatted() if description: textarea.text(description) row.td("status") row = table.tr() row.td("help", colspan=3).div().text("""\ The description should describe the changes to be reviewed. It is usually fine to leave the description empty, since the commit messages are also available in the review. """) generateReviewersAndWatchersTable(db, repository, main, all_reviewers, all_watchers, applyparentfilters=applyparentfilters) row = table.tr("line recipients") row.td("heading").text("Recipient List:") cell = row.td("value", colspan=2).preformatted() cell.span("mode").text("Everyone") cell.span("users") cell.text(".") buttons = cell.div("buttons") buttons.button(onclick="editRecipientList();").text("Edit Recipient List") row = table.tr() row.td("help", colspan=3).div().text("""\ The basic recipient list for e-mails sent about the review. """) log.html.render(db, main, "Commits", commits=commits) return document ================================================ FILE: src/page/createuser.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import htmlutils import page import auth import dbutils import configuration from page.parameters import Optional class CreateUserHandler(page.Page.Handler): def __init__(self, target=None, username=None, email=None, fullname=None, provider=None, account=None, token=None): super(CreateUserHandler, self).__init__() self.target = target self.username = username self.email = email self.fullname = fullname self.provider = provider self.account = account self.token = token def generateHeader(self): self.document.addExternalStylesheet("resource/createuser.css") self.document.addExternalScript("resource/createuser.js") if self.target: self.document.addInternalScript( "var target = %s;" % htmlutils.jsify(self.target)) def generateContent(self): table = page.utils.PaleYellowTable(self.body, "Create user") if self.provider and self.token and self.provider in auth.PROVIDERS: provider = auth.PROVIDERS[self.provider] if not provider.validateToken(self.db, self.account, self.token): raise page.utils.DisplayMessage("Invalid OAuth2 token") allow_user_registration = \ provider.configuration.get("allow_user_registration", False) else: provider = None allow_user_registration = configuration.base.ALLOW_USER_REGISTRATION if not allow_user_registration: administrators = dbutils.getAdministratorContacts( self.db, as_html=True) raise page.utils.DisplayMessage( title="User registration not enabled", body=(("<p>The administrator of this system has not enabled " "registration of new users.</p>" "<p>Contact %s if you want to use this system.</p>") % administrators), html=True) def render(target): table = target.table("createuser", align="center") def header(label): row = table.tr("header") row.td(colspan=2).text(label) def item(key): row = table.tr("item") row.td("key").text("%s:" % key) return row.td("value") def button(class_name): row = table.tr("button") return row.td(colspan=2).button(class_name) def separator(): table.tr("separator1").td(colspan=2) table.tr("separator2").td(colspan=2) if provider: self.document.addInternalScript( "var external = { provider: %s, account: %s, token: %s };" % (htmlutils.jsify(self.provider), htmlutils.jsify(self.account), htmlutils.jsify(self.token))) url = provider.getAccountURL(self.account) item(provider.getTitle()).a("external", href=url).text(self.account) separator() else: self.document.addInternalScript("var external = null;") message = table.tr("status disabled").td(colspan=2).div("message") if self.username: try: dbutils.User.fromName(self.db, self.username) except dbutils.NoSuchUser: try: auth.validateUserName(self.username) except auth.InvalidUserName as error: message.u("Invalid user name") message.br() message.text(str(error)) else: message.text("A user named '%s' already exists!" % self.username) item("New user name").input(id="newusername", value=self.username, size=40) item("Display name").input(id="fullname", value=self.fullname, size=40) item("Email").input(id="email", value=self.email, size=40) if not provider: separator() item("Password").input(id="password1", type="password", size=40) item("Password (again)").input(id="password2", type="password", size=40) button("create").text("Create user") table.addCentered(render) class CreateUser(page.Page): def __init__(self): super(CreateUser, self).__init__("createuser", { "target": Optional(str), "username": Optional(str), "email": Optional(str), "fullname": Optional(str), "provider": Optional(str), "account": Optional(str), "token": Optional(str) }, CreateUserHandler) ================================================ FILE: src/page/dashboard.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import htmlutils import profiling import page.utils import auth def renderDashboard(req, db, user): if user.isAnonymous(): default_show = "open" else: default_show = user.getPreference(db, "dashboard.defaultGroups") show = req.getParameter("show", default_show) if user.isAnonymous(): def possible(group): return group in ("open", "closed") else: def possible(group): return True showlist = filter(possible, show.split(",")) showset = set(showlist) if user.getPreference(db, "commit.diff.compactMode"): default_compact = "yes" else: default_compact = "no" repository_arg = req.getParameter("repository", None) repository = gitutils.Repository.fromParameter(db, repository_arg) if repository_arg else None compact = req.getParameter("compact", default_compact) == "yes" cursor = db.cursor() profiler = profiling.Profiler() document = htmlutils.Document(req) document.setTitle("Dashboard") html = document.html() head = html.head() body = html.body() def generateRight(target): def addLink(key, title=None): if not title: title = key if key not in showset: target.text("[") target.a(href="dashboard?show=%s" % ",".join(showlist + [key])).text("show %s" % title) target.text("]") if user.isAnonymous(): addLink("open", "open") addLink("closed") else: target.text("[") target.a(href="config?highlight=dashboard.defaultGroups").text("configure defaults") target.text("]") addLink("owned") addLink("draft") addLink("active") addLink("watched") addLink("open", "other open") addLink("closed") page.utils.generateHeader(body, db, user, current_page="dashboard", generate_right=generateRight, profiler=profiler) document.addExternalStylesheet("resource/dashboard.css") document.addExternalScript("resource/dashboard.js") document.addInternalScript(user.getJS()) target = body.div("main") def flush(target): return document.render(stop=target, pretty=not compact) def includeReview(review_id): if repository: cursor = db.cursor() cursor.execute("SELECT branches.repository FROM branches JOIN reviews ON (reviews.branch=branches.id) WHERE reviews.id=%s", (review_id,)) return cursor.fetchone()[0] == repository.id else: return True def sortedReviews(data): reviews = [] for review_id in sorted(data.keys()): reviews.append((review_id, data[review_id])) return reviews def isAccepted(review_ids): cursor.execute("""SELECT reviews.id, COUNT(reviewfiles.id)=0 AND COUNT(commentchains.id)=0 FROM reviews LEFT OUTER JOIN reviewfiles ON (reviewfiles.review=reviews.id AND reviewfiles.state='pending') LEFT OUTER JOIN commentchains ON (commentchains.review=reviews.id AND commentchains.type='issue' AND commentchains.state='open') WHERE reviews.id=ANY (%s) GROUP BY reviews.id""", (review_ids,)) return dict(cursor) checked_repositories = {} def accessRepository(repository_id): already_checked = checked_repositories.get(repository_id) if already_checked is not None: return already_checked is_allowed = auth.AccessControlProfile.isAllowedRepository( db.profiles, "read", repository_id) checked_repositories[repository_id] = is_allowed return is_allowed def renderReviews(target, reviews, lines_and_comments=True, links=True): cursor.execute("SELECT id, repository, name FROM branches WHERE id=ANY (%s)", (list(branch_id for _, (_, branch_id, _, _) in reviews),)) branch_data = { branch_id: (repository_id, name) for branch_id, repository_id, name in cursor } for review_id, (summary, branch_id, lines, comments) in reviews: repository_id, branch_name = branch_data[branch_id] if not accessRepository(repository_id): continue row = target.tr("review") row.td("name").text(branch_name) row.td("title").a(href="r/%d" % review_id).text(summary) if lines_and_comments: if lines: if links: row.td("lines").a(href="showcommit?review=%d&filter=pending" % review_id).text("%d lines" % (sum(lines))) else: row.td("lines").text("%d lines" % (sum(lines))) else: row.td("lines").text() if comments: if links: row.td("comments").a(href="showcomments?review=%s&filter=toread" % review_id).text("%d comment%s" % (comments, "s" if comments > 1 else "")) else: row.td("comments").text("%d comment%s" % (comments, "s" if comments > 1 else "")) else: row.td("comments").text() def hidden(what): new_show = ",".join(filter(lambda item: item != what, showlist)) if new_show: return "dashboard?show=%s" % new_show else: return "dashboard" profiler.check("generate: prologue") def renderOwned(): owned_accepted = [] owned_open = [] cursor.execute("""SELECT id, summary, branch FROM reviews JOIN reviewusers ON (review=id AND reviewusers.owner) WHERE state='open' AND uid=%s ORDER BY id DESC""", (user.id,)) owned = cursor.fetchall() profiler.check("query: owned") is_accepted = isAccepted(list(review_id for review_id, _, _ in owned)) for review_id, summary, branch_id in owned: if includeReview(review_id): if is_accepted[review_id]: owned_accepted.append((review_id, (summary, branch_id, None, None))) else: owned_open.append((review_id, (summary, branch_id, None, None))) profiler.check("processing: owned") if owned_accepted or owned_open: table = target.table("paleyellow reviews", id="owned", align="center", cellspacing=0) table.col(width="15%") table.col(width="55%") table.col(width="15%") table.col(width="15%") header = table.tr().td("h1", colspan=4).h1() header.text("Owned By You") header.span("right").a(href=hidden("owned")).text("[hide]") if owned_accepted: table.tr(id="accepted").td("h2", colspan=4).h2().text("Accepted") renderReviews(table, owned_accepted) if owned_open: table.tr(id="open").td("h2", colspan=4).h2().text("Pending") renderReviews(table, owned_open) profiler.check("generate: owned") return True def renderDraft(): draft_changes = {} draft_comments = {} draft_both = {} cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviews JOIN reviewfiles ON (reviewfiles.review=reviews.id) JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviews.state='open' AND reviewfiles.state=reviewfilechanges.from_state AND reviewfilechanges.state='draft' AND reviewfilechanges.uid=%s GROUP BY reviews.id, reviews.summary, reviews.branch""", (user.id,)) profiler.check("query: draft lines") for review_id, summary, branch_id, deleted_count, inserted_count in cursor: if includeReview(review_id): draft_changes[review_id] = (summary, branch_id, (deleted_count, inserted_count), None) profiler.check("processing: draft lines") cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch, COUNT(comments.id) FROM reviews JOIN commentchains ON (commentchains.review=reviews.id) JOIN comments ON (comments.chain=commentchains.id) WHERE comments.state='draft' AND comments.uid=%s GROUP BY reviews.id, reviews.summary, reviews.branch""", [user.id]) profiler.check("query: draft comments") for review_id, summary, branch_id, comments_count in cursor: if includeReview(review_id): if draft_changes.has_key(review_id): draft_both[review_id] = (summary, branch_id, draft_changes[review_id][2], comments_count) del draft_changes[review_id] else: draft_comments[review_id] = (summary, branch_id, None, comments_count) profiler.check("processing: draft comments") if draft_both or draft_changes or draft_comments: table = target.table("paleyellow reviews", id="draft", align="center", cellspacing=0) table.col(width="15%") table.col(width="55%") table.col(width="15%") table.col(width="15%") header = table.tr().td("h1", colspan=4).h1() header.text("Reviews With Unsubmitted Work") header.span("right").a(href=hidden("draft")).text("[hide]") if draft_both: table.tr(id="draft-changes-comments").td("h2", colspan=4).h2().text("Draft Changes And Comments") renderReviews(table, sortedReviews(draft_both), links=False) if draft_changes: table.tr(id="draft-changes").td("h2", colspan=4).h2().text("Draft Changes") renderReviews(table, sortedReviews(draft_changes), links=False) if draft_comments: table.tr(id="draft-comments").td("h2", colspan=4).h2().text("Draft Comments") renderReviews(table, sortedReviews(draft_comments), links=False) profiler.check("generate: draft") return True active = {} def fetchActive(): if not active: with_changes = {} with_comments = {} with_both = {} cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviews JOIN reviewusers ON (reviewusers.review=reviews.id AND reviewusers.uid=%s) JOIN reviewfiles ON (reviewfiles.review=reviews.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviews.state='open' AND reviewfiles.state='pending' GROUP BY reviews.id, reviews.summary, reviews.branch""", (user.id, user.id)) profiler.check("query: active lines") for review_id, summary, branch_id, deleted_count, inserted_count in cursor: if includeReview(review_id): with_changes[review_id] = (summary, branch_id, (deleted_count, inserted_count), None) profiler.check("processing: active lines") cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch, unread.count FROM (SELECT commentchains.review AS review, COUNT(commentstoread.comment) AS count FROM commentchains JOIN comments ON (comments.chain=commentchains.id) JOIN commentstoread ON (commentstoread.comment=comments.id AND commentstoread.uid=%s) GROUP BY commentchains.review) AS unread JOIN reviews ON (reviews.id=unread.review) WHERE reviews.state='open'""", (user.id,)) profiler.check("query: active comments") for review_id, summary, branch_id, comments_count in cursor: if includeReview(review_id): if with_changes.has_key(review_id): with_both[review_id] = (summary, branch_id, with_changes[review_id][2], comments_count) del with_changes[review_id] else: with_comments[review_id] = (summary, branch_id, None, comments_count) profiler.check("processing: active comments") active["changes"] = with_changes active["comments"] = with_comments active["both"] = with_both def renderActive(): fetchActive() if active["both"] or active["changes"] or active["comments"]: table = target.table("paleyellow reviews", id="active", align="center", cellspacing=0) table.col(width="15%") table.col(width="55%") table.col(width="15%") table.col(width="15%") header = table.tr().td("h1", colspan=4).h1() header.text("Active Reviews") header.span("right").a(href=hidden("active")).text("[hide]") if active["both"]: review_ids = ",".join(map(str, active["both"].keys())) h2 = table.tr(id="active-changes-comments").td("h2", colspan=4).h2().text("Has Changes And Comments") h2.a(href="javascript:void(0);", onclick="markChainsAsRead([%s]);" % review_ids).text("[mark all as read]") renderReviews(table, sortedReviews(active["both"])) if active["changes"]: table.tr(id="active-changes").td("h2", colspan=4).h2().text("Has Changes") renderReviews(table, sortedReviews(active["changes"])) if active["comments"]: review_ids = ",".join(map(str, active["comments"].keys())) h2 = table.tr(id="active-comments").td("h2", colspan=4).h2().text("Has Comments") h2.a(href="javascript:void(0);", onclick="markChainsAsRead([%s]);" % review_ids).text("[mark all as read]") renderReviews(table, sortedReviews(active["comments"])) profiler.check("generate: active") return True other = {} def fetchWatchedAndClosed(): if not other: if "watched" not in showset: state_filter = " WHERE reviews.state='closed'" elif "closed" not in showset: state_filter = " WHERE reviews.state='open'" else: state_filter = "" profiler.check("query: watched/closed") watched = {} owned_closed = {} other_closed = {} if "watched" in showset: fetchActive() cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch, reviews.state, reviewusers.owner, reviewusers.uid IS NULL FROM reviews LEFT OUTER JOIN reviewusers ON (reviewusers.review=reviews.id AND reviewusers.uid=%s)""" + state_filter, (user.id,)) for review_id, summary, branch_id, review_state, is_owner, not_associated in cursor: if includeReview(review_id): if review_state == 'open': if is_owner or not_associated: continue fetchActive() if active["both"].has_key(review_id) or active["changes"].has_key(review_id) or active["comments"].has_key(review_id): continue watched[review_id] = summary, branch_id, None, None elif is_owner: owned_closed[review_id] = summary, branch_id, None, None else: other_closed[review_id] = summary, branch_id, None, None profiler.check("processing: watched/closed") other["watched"] = watched other["owned-closed"] = owned_closed other["other-closed"] = other_closed def renderWatched(): fetchWatchedAndClosed() watched = other["watched"] accepted = [] pending = [] is_accepted = isAccepted(watched.keys()) for review_id, (summary, branch_id, lines, comments) in sortedReviews(watched): if is_accepted[review_id]: accepted.append((review_id, (summary, branch_id, lines, comments))) else: pending.append((review_id, (summary, branch_id, lines, comments))) if accepted or pending: table = target.table("paleyellow reviews", id="watched", align="center", cellspacing=0) table.col(width="30%") table.col(width="70%") header = table.tr().td("h1", colspan=4).h1() header.text("Watched Reviews") header.span("right").a(href=hidden("watched")).text("[hide]") if accepted: table.tr(id="active-changes-comments").td("h2", colspan=4).h2().text("Accepted") renderReviews(table, accepted, False) if pending: table.tr(id="active-changes-comments").td("h2", colspan=4).h2().text("Pending") renderReviews(table, pending, False) profiler.check("generate: watched") return True def renderClosed(): fetchWatchedAndClosed() owned_closed = other["owned-closed"] other_closed = other["other-closed"] if owned_closed or other_closed: table = target.table("paleyellow reviews", id="closed", align="center", cellspacing=0) table.col(width="30%") table.col(width="70%") header = table.tr().td("h1", colspan=4).h1() header.text("Closed Reviews") header.span("right").a(href=hidden("closed")).text("[hide]") if not user.isAnonymous(): if owned_closed: table.tr().td("h2", colspan=4).h2().text("Owned") renderReviews(table, sortedReviews(owned_closed), False) if other_closed: table.tr().td("h2", colspan=4).h2().text("Other") renderReviews(table, sortedReviews(other_closed), False) else: renderReviews(table, sortedReviews(other_closed), False) profiler.check("generate: closed") return True def renderOpen(): other_open = {} cursor.execute("""SELECT reviews.id, reviews.summary, reviews.branch FROM reviews LEFT OUTER JOIN reviewusers ON (reviewusers.review=reviews.id AND reviewusers.uid=%s) WHERE reviews.state='open' AND reviewusers.uid IS NULL""", [user.id]) profiler.check("query: open") for review_id, summary, branch_id in cursor: if includeReview(review_id): other_open[review_id] = summary, branch_id, None, None profiler.check("processing: open") if other_open: accepted = [] pending = [] for review_id, (summary, branch_id, lines, comments) in sortedReviews(other_open): if dbutils.Review.isAccepted(db, review_id): accepted.append((review_id, (summary, branch_id, lines, comments))) else: pending.append((review_id, (summary, branch_id, lines, comments))) table = target.table("paleyellow reviews", id="open", align="center", cellspacing=0) table.col(width="30%") table.col(width="70%") header = table.tr().td("h1", colspan=4).h1() header.text("Open Reviews" if user.isAnonymous() else "Other Open Reviews") header.span("right").a(href=hidden("open")).text("[hide]") if accepted: table.tr().td("h2", colspan=4).h2().text("Accepted") renderReviews(table, accepted, False) if pending: table.tr().td("h2", colspan=4).h2().text("Pending") renderReviews(table, pending, False) profiler.check("generate: open") return True render = { "owned": renderOwned, "draft": renderDraft, "active": renderActive, "watched": renderWatched, "closed": renderClosed, "open": renderOpen } empty = True for item in showlist: if item in render: target.comment(repr(item)) if render[item](): empty = False yield flush(target) if empty: document.addExternalStylesheet("resource/message.css") body.div("message paleyellow").h1("center").text("No reviews!") profiler.output(db=db, user=user, target=document) yield flush(None) ================================================ FILE: src/page/editresource.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import configuration def renderEditResource(req, db, user): name = req.getParameter("name", None) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user) document.addExternalStylesheet("resource/editresource.css") document.addExternalScript("resource/editresource.js") target = body.div("main") table = target.table('paleyellow', align='center') table.col(width='10%') table.col(width='60%') table.tr().td('h1', colspan=2).h1().text("Resource Editor") select_row = table.tr('select') select_row.td('heading').text('Resource:') select = select_row.td('value').select() if name is None: select.option(selected="selected").text("Select resource") select.option(value="diff.css", selected="selected" if name=="diff.css" else None).text("Diff coloring") select.option(value="syntax.css", selected="selected" if name=="syntax.css" else None).text("Syntax highlighting") help_row = table.tr('help') help_row.td('help', colspan=2).text("Select the resource to edit.") is_edited = False is_reset = False source = None if name is None: document.addInternalScript("var resource_name = null;"); source = "" else: if name not in ("diff.css", "syntax.css"): raise page.utils.DisplayMessage("Invalid resource name", body="Must be one of 'diff.css' and 'syntax.css'.") document.addInternalScript("var resource_name = %s;" % htmlutils.jsify(name)); cursor = db.cursor() cursor.execute("SELECT source FROM userresources WHERE uid=%s AND name=%s ORDER BY revision DESC FETCH FIRST ROW ONLY", (user.id, name)) row = cursor.fetchone() if row: is_edited = True source = row[0] if source is None: is_reset = is_edited source = open(configuration.paths.INSTALL_DIR + "/resources/" + name).read() document.addInternalScript("var original_source = %s;" % htmlutils.jsify(source)); table.tr('value').td('value', colspan=2).textarea(rows=source.count("\n") + 10).preformatted().text(source) buttons = table.tr('buttons').td('buttons', colspan=2) buttons.button('save').text("Save changes") if is_edited and not is_reset: buttons.button('reset').text("Reset to built-in version") if is_reset: buttons.button('restore').text("Restore last edited version") return document ================================================ FILE: src/page/filterchanges.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import gitutils import page.utils import reviewing.utils as review_utils import log.commitset def renderFilterChanges(req, db, user): review_id = req.getParameter("review", filter=int) first_sha1 = req.getParameter("first", None) last_sha1 = req.getParameter("last", None) cursor = db.cursor() review = dbutils.Review.fromId(db, review_id) root_directories = {} root_files = {} def processFile(file_id): components = dbutils.describe_file(db, file_id).split("/") directories, files = root_directories, root_files for directory_name in components[:-1]: directories, files = directories.setdefault(directory_name, ({}, {})) files[components[-1]] = file_id if first_sha1 and last_sha1: cursor.execute("""SELECT commits.sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s""", (review.id,)) first_commit = gitutils.Commit.fromSHA1(db, review.repository, first_sha1) last_commit = gitutils.Commit.fromSHA1(db, review.repository, last_sha1) if len(first_commit.parents) > 1: raise page.utils.DisplayMessage( title="Filtering failed!", body=("First selected commit is a merge commit. Please go back " "and select a different range of commits."), review=review) if first_commit.parents: from_commit = gitutils.Commit.fromSHA1(db, review.repository, first_commit.parents[0]) else: from_commit = None to_commit = last_commit commits = log.commitset.CommitSet.fromRange(db, from_commit, to_commit) if not commits: raise page.utils.DisplayMessage( title="Filtering failed!", body=("The range of commits selected includes merges with " "ancestors not included in the range. Please go back " "and select a different range of commits."), review=review) cursor.execute("""SELECT DISTINCT reviewfiles.file FROM reviewfiles JOIN changesets ON (changesets.id=reviewfiles.changeset) JOIN commits ON (commits.id=changesets.child) WHERE reviewfiles.review=%s AND commits.sha1=ANY (%s)""", (review.id, [commit.sha1 for commit in commits])) else: cursor.execute("SELECT DISTINCT file FROM reviewfiles WHERE review=%s", (review.id,)) for (file_id,) in cursor: processFile(file_id) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalStylesheet("resource/filterchanges.css") document.addExternalScript("resource/filterchanges.js") document.addInternalScript(user.getJS()) document.addInternalScript(review.getJS()) if first_sha1 and last_sha1: document.addInternalScript("var commitRange = { first: %s, last: %s };" % (htmlutils.jsify(first_sha1), htmlutils.jsify(last_sha1))) else: document.addInternalScript("var commitRange = null;") target = body.div("main") basic = target.table('filter paleyellow', align='center', cellspacing=0) basic.col(width='10%') basic.col(width='60%') basic.col(width='30%') row = basic.tr("header") row.td('h1', colspan=2).h1().text("Filter Changes") row.td('h1 button').button("display").text("Display Diff") row = basic.tr("headings") row.td("select").text("Include") row.td("path").text("Path") row.td().text() def outputDirectory(base, name, directories, files): if name: level = base.count("/") row = basic.tr("directory", critic_level=level) row.td("select").input(type="checkbox") if level > 1: row.td("path").preformatted().innerHTML((" " * (len(base) - 2)) + "…/" + name + "/") else: row.td("path").preformatted().innerHTML(base + name + "/") row.td().text() else: row = basic.tr("directory", critic_level=-1) row.td("select").input(type="checkbox") row.td("path").preformatted().i().text("Everything") row.td().text() level = -1 for directory_name in sorted(directories.keys()): outputDirectory(base + name + "/" if name else "", directory_name, directories[directory_name][0], directories[directory_name][1]) for file_name in sorted(files.keys()): row = basic.tr("file", critic_file_id=files[file_name], critic_level=level + 1) row.td("select").input(type="checkbox") if level > -1: row.td("path").preformatted().innerHTML((" " * (len(base + name) - 1)) + "…/" + htmlutils.htmlify(file_name)) else: row.td("path").preformatted().innerHTML(htmlutils.htmlify(file_name)) row.td().text() outputDirectory("", "", root_directories, root_files) row = basic.tr("footer") row.td('spacer', colspan=3) row = basic.tr("footer") row.td('button', colspan=3).button("display").text("Display Diff") if user.getPreference(db, "ui.keyboardShortcuts"): page.utils.renderShortcuts(body, "filterchanges", review=review) return document ================================================ FILE: src/page/home.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import gitutils import configuration import reviewing.filters import profiling import auth import extensions.role.filterhook from htmlutils import jsify from textutils import json_encode def renderHome(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin(req) profiler = profiling.Profiler() cursor = db.cursor() readonly = req.getParameter("readonly", "yes" if user.name != req.user else "no") == "yes" repository = req.getParameter("repository", None, gitutils.Repository.FromParameter(db)) verified_email_id = req.getParameter("email_verified", None, int) if not repository: repository = user.getDefaultRepository(db) title_fullname = user.fullname if title_fullname[-1] == 's': title_fullname += "'" else: title_fullname += "'s" cursor.execute("SELECT email FROM usergitemails WHERE uid=%s ORDER BY email ASC", (user.id,)) gitemails = ", ".join([email for (email,) in cursor]) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() if user == req.user: actual_user = None else: actual_user = req.user def renderHeaderItems(target): if readonly and actual_user and actual_user.hasRole(db, "administrator"): target.a("button", href="/home?user=%s&readonly=no" % user.name).text("Edit") page.utils.generateHeader(body, db, user, generate_right=renderHeaderItems, current_page="home") document.addExternalStylesheet("resource/home.css") document.addExternalScript("resource/home.js") document.addExternalScript("resource/autocomplete.js") if repository: document.addInternalScript(repository.getJS()) else: document.addInternalScript("var repository = null;") if actual_user and actual_user.hasRole(db, "administrator"): document.addInternalScript("var administrator = true;") else: document.addInternalScript("var administrator = false;") document.addInternalScript(user.getJS()) document.addInternalScript("user.gitEmails = %s;" % jsify(gitemails)) document.addInternalScript("var verifyEmailAddresses = %s;" % jsify(configuration.base.VERIFY_EMAIL_ADDRESSES)) document.setTitle("%s Home" % title_fullname) target = body.div("main") basic = target.table('paleyellow basic', align='center') basic.tr().td('h1', colspan=3).h1().text("%s Home" % title_fullname) def row(heading, value, help=None, extra_class=None): if extra_class: row_class = "line " + extra_class else: row_class = "line" main_row = basic.tr(row_class) main_row.td('heading').text("%s:" % heading) value_cell = main_row.td('value', colspan=2) if callable(value): value(value_cell) else: value_cell.text(value) basic.tr('help').td('help', colspan=3).text(help) def renderFullname(target): if readonly: target.text(user.fullname) else: target.input("value", id="user_fullname", value=user.fullname) target.span("status", id="status_fullname") buttons = target.span("buttons") buttons.button(onclick="saveFullname();").text("Save") buttons.button(onclick="resetFullname();").text("Reset") def renderEmail(target): if not actual_user or actual_user.hasRole(db, "administrator"): cursor.execute("""SELECT id, email, verified FROM useremails WHERE uid=%s ORDER BY id ASC""", (user.id,)) rows = cursor.fetchall() if rows: if len(rows) > 1: target.addClass("multiple") addresses = target.div("addresses") for email_id, email, verified in rows: checked = "checked" if email == user.email else None selected = " selected" if email == user.email else "" label = addresses.label("address inset flex" + selected, data_email_id=email_id) if len(rows) > 1: label.input(name="email", type="radio", value=email, checked=checked) label.span("value").text(email) actions = label.span("actions") if verified is False: actions.a("action unverified", href="#").text("[unverified]") elif verified is True: now = " now" if email_id == verified_email_id else "" actions.span("action verified" + now).text("[verified]") actions.a("action delete", href="#").text("[delete]") else: target.i().text("No email address") target.span("buttons").button("addemail").text( "Add email address") elif user.email is None: target.i().text("No email address") elif user.email_verified is False: # Pending verification: don't show to other users. target.i().text("Email address not verified") else: target.span("inset").text(user.email) def renderGitEmails(target): if readonly: target.text(gitemails) else: target.input("value", id="user_gitemails", value=gitemails) target.span("status", id="status_gitemails") buttons = target.span("buttons") buttons.button(onclick="saveGitEmails();").text("Save") buttons.button(onclick="resetGitEmails();").text("Reset") def renderPassword(target): cursor.execute("SELECT password IS NOT NULL FROM users WHERE id=%s", (user.id,)) has_password = cursor.fetchone()[0] if not has_password: target.text("not set") else: target.text("****") if not readonly: if not has_password or (actual_user and actual_user.hasRole(db, "administrator")): target.span("buttons").button(onclick="setPassword();").text("Set password") else: target.span("buttons").button(onclick="changePassword();").text("Change password") row("User ID", str(user.id)) row("User Name", user.name) row("Display Name", renderFullname, "This is the name used when displaying commits or comments.") row("Primary Email", renderEmail, "This is the primary email address, to which emails are sent.", extra_class="email") row("Git Emails", renderGitEmails, "These email addresses (comma-separated) are used to map Git commits to the user.") if configuration.base.AUTHENTICATION_MODE == "critic" \ and auth.DATABASE.supportsPasswordChange(): row("Password", renderPassword, extra_class="password") cursor.execute("""SELECT provider, account FROM externalusers WHERE uid=%s""", (user.id,)) external_accounts = [(auth.PROVIDERS[provider_name], account) for provider_name, account in cursor if provider_name in auth.PROVIDERS] if external_accounts: basic.tr().td('h2', colspan=3).h2().text("External Accounts") for provider, account in external_accounts: def renderExternalAccount(target): url = provider.getAccountURL(account) target.a("external", href=url).text(account) row(provider.getTitle(), renderExternalAccount) profiler.check("user information") filters = page.utils.PaleYellowTable(body, "Filters") filters.titleRight.a("button", href="/tutorial?item=filters").text("Tutorial") cursor.execute("""SELECT repositories.id, repositories.name, repositories.path, filters.id, filters.type, filters.path, NULL, filters.delegate FROM repositories JOIN filters ON (filters.repository=repositories.id) WHERE filters.uid=%s""", (user.id,)) rows = cursor.fetchall() if configuration.extensions.ENABLED: cursor.execute("""SELECT repositories.id, repositories.name, repositories.path, filters.id, 'extensionhook', filters.path, filters.name, filters.data FROM repositories JOIN extensionhookfilters AS filters ON (filters.repository=repositories.id) WHERE filters.uid=%s""", (user.id,)) rows.extend(cursor.fetchall()) FILTER_TYPES = ["reviewer", "watcher", "ignored", "extensionhook"] def rowSortKey(row): (repository_id, repository_name, repository_path, filter_id, filter_type, filter_path, filter_name, filter_data) = row # Rows are grouped by repository first and type second, so sort by # repository name and filter type primarily. # # Secondarily sort by filter name (only for extension hook filters; is # None for regular filters) and filter path. This sorting is mostly to # achieve a stable order; it has no greater meaning. return (repository_name, FILTER_TYPES.index(filter_type), filter_name, filter_path) rows.sort(key=rowSortKey) if rows: repository = None repository_filters = None tbody_reviewer = None tbody_watcher = None tbody_ignored = None tbody_extensionhook = None count_matched_files = {} for (repository_id, repository_name, repository_path, filter_id, filter_type, filter_path, filter_name, filter_data) in rows: if not repository or repository.id != repository_id: try: repository = gitutils.Repository.fromId(db, repository_id) except auth.AccessDenied: continue repository_url = repository.getURL(db, user) filters.addSection(repository_name, repository_url) repository_filters = filters.addCentered().table("filters callout") tbody_reviewer = tbody_watcher = tbody_ignored = tbody_extensionhook = None if filter_type == "reviewer": if not tbody_reviewer: tbody_reviewer = repository_filters.tbody() tbody_reviewer.tr().th(colspan=5).text("Reviewer") tbody = tbody_reviewer elif filter_type == "watcher": if not tbody_watcher: tbody_watcher = repository_filters.tbody() tbody_watcher.tr().th(colspan=5).text("Watcher") tbody = tbody_watcher elif filter_type == "ignored": if not tbody_ignored: tbody_ignored = repository_filters.tbody() tbody_ignored.tr().th(colspan=5).text("Ignored") tbody = tbody_ignored else: if not tbody_extensionhook: tbody_extensionhook = repository_filters.tbody() tbody_extensionhook.tr().th(colspan=5).text("Extension hooks") tbody = tbody_extensionhook row = tbody.tr() row.td("path").text(filter_path) if filter_type != "extensionhook": delegates = row.td("delegates", colspan=2) if filter_data: delegates.i().text("Delegates: ") delegates.span("names").text(", ".join(filter_data.split(","))) else: role = extensions.role.filterhook.getFilterHookRole(db, filter_id) if role: title = row.td("title") title.text(role.title) data = row.td("data") data.text(filter_data) else: row.td(colspan=2).i().text("Invalid filter") if filter_path == "/": row.td("files").text("all files") else: href = "javascript:void(showMatchedFiles(%s, %s));" % (jsify(repository.name), jsify(filter_path)) row.td("files").a(href=href, id=("f%d" % filter_id)).text("? files") count_matched_files.setdefault(repository_id, []).append(filter_id) links = row.td("links") arguments = (jsify(repository.name), filter_id, jsify(filter_type), jsify(filter_path), jsify(filter_data)) links.a(href="javascript:void(editFilter(%s, %d, %s, %s, %s));" % arguments).text("[edit]") if filter_type != "extensionhook": links.a(href="javascript:if (deleteFilterById(%d)) location.reload(); void(0);" % filter_id).text("[delete]") links.a(href="javascript:location.href='/config?filter=%d';" % filter_id).text("[preferences]") else: links.a(href="javascript:if (deleteExtensionHookFilterById(%d)) location.reload(); void(0);" % filter_id).text("[delete]") document.addInternalScript("var count_matched_files = %s;" % json_encode(count_matched_files.values())) else: filters.addCentered().p().b().text("No filters") # Additionally check if there are in fact no repositories. cursor.execute("SELECT 1 FROM repositories") if not cursor.fetchone(): document.addInternalScript("var no_repositories = true;") if not readonly: filters.addSeparator() filters.addCentered().button(onclick="editFilter();").text("Add filter") profiler.check("filters") hidden = body.div("hidden", style="display: none") if configuration.extensions.ENABLED: filterhooks = extensions.role.filterhook.listFilterHooks(db, user) else: filterhooks = [] with hidden.div("filterdialog") as dialog: paragraph = dialog.p() paragraph.b().text("Repository:") paragraph.br() page.utils.generateRepositorySelect(db, user, paragraph, name="repository") paragraph = dialog.p() paragraph.b().text("Filter type:") paragraph.br() filter_type = paragraph.select(name="type") filter_type.option(value="reviewer").text("Reviewer") filter_type.option(value="watcher").text("Watcher") filter_type.option(value="ignored").text("Ignored") for extension, manifest, roles in filterhooks: optgroup = filter_type.optgroup(label=extension.getTitle(db)) for role in roles: option = optgroup.option( value="extensionhook", data_extension_id=extension.getExtensionID(db), data_filterhook_name=role.name) option.text(role.title) paragraph = dialog.p() paragraph.b().text("Path:") paragraph.br() paragraph.input(name="path", type="text") paragraph.span("matchedfiles") regular_div = dialog.div("regular") paragraph = regular_div.p() paragraph.b().text("Delegates:") paragraph.br() paragraph.input(name="delegates", type="text") paragraph = regular_div.p() label = paragraph.label() label.input(name="apply", type="checkbox", checked="checked") label.b().text("Apply to existing reviews") for extension, manifest, roles in filterhooks: for role in roles: if not role.data_description: continue filterhook_id = "%d_%s" % (extension.getExtensionID(db), role.name) extensionhook_div = dialog.div( "extensionhook " + filterhook_id, style="display: none") extensionhook_div.innerHTML(role.data_description) paragraph = extensionhook_div.p() paragraph.b().text("Data:") paragraph.br() paragraph.input(type="text") profiler.output(db, user, document) return document ================================================ FILE: src/page/loadmanifest.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import extensions import page.utils def renderLoadManifest(req, db, user): key = req.getParameter("key") if "/" in key: author_name, extension_name = key.split("/", 1) else: author_name, extension_name = None, key def load(): try: extension = extensions.extension.Extension(author_name, extension_name) except extensions.extension.ExtensionError as error: return str(error) try: extension.getManifest() except extensions.manifest.ManifestError as error: return str(error) return "That's a valid manifest, friend." return page.utils.ResponseBody(load(), content_type="text/plain") ================================================ FILE: src/page/login.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import urllib import auth import page import page.utils import auth import configuration import request from page.parameters import Optional class LoginHandler(page.Page.Handler): def __init__(self, target="/", optional="no"): super(LoginHandler, self).__init__() self.target = target self.optional = optional == "yes" def generateHeader(self): self.document.addExternalStylesheet("resource/login.css") self.document.addExternalScript("resource/login.js") def generateContent(self): if not self.user.isAnonymous(): raise page.utils.MovedTemporarily(self.target, True) self.request.ensureSecure() if configuration.base.AUTHENTICATION_MODE != "critic": raise request.DoExternalAuthentication( configuration.base.AUTHENTICATION_MODE, self.target) self.document.setTitle("Sign in") def render(target): redirect_url = "redirect?" + urllib.urlencode( { "target": self.target }) form = target.form(name="login", method="POST", action=redirect_url) table = form.table("login callout", align="center") row = table.tr("status disabled") row.td(colspan=2).text() autofocus = "autofocus" for field in auth.DATABASE.getFields(): if len(field) == 3: hidden, identifier, label = field description = None else: hidden, identifier, label, description = field if hidden: field_type = "password" else: field_type = None row = table.tr("field") row.td("key").text(label) row.td("value").input("field", name=identifier, type=field_type, autofocus=autofocus) # Only autofocus the first field. autofocus = None row = table.tr("login") row.td(colspan=2).input("login", type="submit", value="Sign in") providers = [] for name, provider in auth.PROVIDERS.items(): providers.append((provider.getTitle(), name)) if providers: table.tr("separator1").td(colspan=2) table.tr("separator2").td(colspan=2) external = table.tr("external").td(colspan=2) first = True for title, name in sorted(providers): div = external.div("provider") url = "/externalauth/%s?%s" % (name, urllib.urlencode( { "target": self.target })) if first: div.text("Sign in using your ") first = False else: div.text("or ") div.a(href=url).text(title) if configuration.base.ALLOW_USER_REGISTRATION: table.tr("separator1").td(colspan=2) table.tr("separator2").td(colspan=2) register = table.tr("register").td(colspan=2) register.text("New to this system? ") register.a(href="/createuser").text("Create a user") register.text(" to start using it.") if self.optional and configuration.base.ALLOW_ANONYMOUS_USER: table.tr("separator1").td(colspan=2) table.tr("separator2").td(colspan=2) row = table.tr("continue") row.td(colspan=2).a(href=self.target).innerHTML( "… or, continue anonymously") paleyellow = page.utils.PaleYellowTable(self.body, "Sign in") paleyellow.addCentered(render) class Login(page.Page): def __init__(self): super(Login, self).__init__("login", { "target": Optional(str), "optional": Optional(str) }, LoginHandler) ================================================ FILE: src/page/manageextensions.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import htmlutils import dbutils import configuration from extensions.extension import Extension, ExtensionError from extensions.manifest import (ManifestError, PageRole, InjectRole, ProcessCommitsRole, FilterHookRole, ScheduledRole) def renderManageExtensions(req, db, user): if not configuration.extensions.ENABLED: administrators = dbutils.getAdministratorContacts(db, as_html=True) raise page.utils.DisplayMessage( title="Extension support not enabled", body=(("<p>This Critic system does not support extensions.</p>" "<p>Contact %s to have it enabled, or see the " "<a href='/tutorial?item=administration#extensions'>" "section on extensions</a> in the system administration " "tutorial for more information.</p>") % administrators), html=True) cursor = db.cursor() what = req.getParameter("what", "available") selected_versions = page.utils.json_decode(req.getParameter("select", "{}")) focused = req.getParameter("focus", None) if what == "installed": title = "Installed Extensions" listed_extensions = [] for extension_id, _, _, _ in Extension.getInstalls(db, user): try: listed_extensions.append(Extension.fromId(db, extension_id)) except ExtensionError as error: listed_extensions.append(error) else: title = "Available Extensions" listed_extensions = Extension.find(db) req.content_type = "text/html; charset=utf-8" document = htmlutils.Document(req) document.setTitle("Manage Extensions") html = document.html() head = html.head() body = html.body() def generateRight(target): target.a("button", href="tutorial?item=extensions").text("Tutorial") target.text(" ") target.a("button", href="tutorial?item=extensions-api").text("API Documentation") page.utils.generateHeader(body, db, user, current_page="extensions", generate_right=generateRight) document.addExternalStylesheet("resource/manageextensions.css") document.addExternalScript("resource/manageextensions.js") document.addInternalScript(user.getJS()) table = page.utils.PaleYellowTable(body, title) def addTitleRightLink(url, label): if user.name != req.user: url += "&user=%s" % user.name table.titleRight.text(" ") table.titleRight.a(href=url).text("[" + label + " extensions]") if what != "installed" or focused: addTitleRightLink("/manageextensions?what=installed", "installed") if what != "available" or focused: addTitleRightLink("/manageextensions?what=available", "available") for item in listed_extensions: if isinstance(item, ExtensionError): extension_error = item extension = item.extension else: extension_error = None extension = item if focused and extension.getKey() != focused: continue extension_path = extension.getPath() if extension.isSystemExtension(): hosting_user = None else: hosting_user = extension.getAuthor(db) selected_version = selected_versions.get(extension.getKey(), False) installed_sha1, installed_version = extension.getInstalledVersion(db, user) universal_sha1, universal_version = extension.getInstalledVersion(db, None) installed_upgradeable = universal_upgradeable = False if extension_error is None: if installed_sha1: current_sha1 = extension.getCurrentSHA1(installed_version) installed_upgradeable = installed_sha1 != current_sha1 if universal_sha1: current_sha1 = extension.getCurrentSHA1(universal_version) universal_upgradeable = universal_sha1 != current_sha1 def massage_version(version): if version is None: return "live" elif version: return "version/%s" % version else: return None if selected_version is False: selected_version = installed_version if selected_version is False: selected_version = universal_version install_version = massage_version(selected_version) installed_version = massage_version(installed_version) universal_version = massage_version(universal_version) manifest = None if extension_error is None: try: if selected_version is False: manifest = extension.getManifest() else: manifest = extension.getManifest(selected_version) except ManifestError as error: pass elif installed_sha1: manifest = extension.getManifest(installed_version, installed_sha1) elif universal_sha1: manifest = extension.getManifest(universal_version, universal_sha1) if manifest: if what == "available" and manifest.hidden: # Hide from view unless the user is hosting the extension, or is # an administrator and the extension is a system extension. if extension.isSystemExtension(): if not user.hasRole(db, "administrator"): continue elif hosting_user != user: continue else: if hosting_user != user: continue extension_id = extension.getExtensionID(db, create=False) if not user.isAnonymous(): buttons = [] if extension_id is not None: cursor.execute("""SELECT 1 FROM extensionstorage WHERE extension=%s AND uid=%s""", (extension_id, user.id)) if cursor.fetchone(): buttons.append(("Clear storage", ("clearExtensionStorage(%s, %s)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()))))) if not installed_version: if manifest and install_version and install_version != universal_version: buttons.append(("Install", ("installExtension(%s, %s, %s)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()), htmlutils.jsify(install_version))))) else: buttons.append(("Uninstall", ("uninstallExtension(%s, %s)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()))))) if manifest and (install_version != installed_version or (installed_sha1 and installed_upgradeable)): if install_version == installed_version: label = "Upgrade" else: label = "Install" buttons.append(("Upgrade", ("reinstallExtension(%s, %s, %s)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()), htmlutils.jsify(install_version))))) if user.hasRole(db, "administrator"): if not universal_version: if manifest and install_version: buttons.append(("Install (universal)", ("installExtension(%s, %s, %s, true)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()), htmlutils.jsify(install_version))))) else: buttons.append(("Uninstall (universal)", ("uninstallExtension(%s, %s, true)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()))))) if manifest and (install_version != universal_version or (universal_sha1 and universal_upgradeable)): if install_version == universal_version: label = "Upgrade (universal)" else: label = "Install (universal)" buttons.append((label, ("reinstallExtension(%s, %s, %s, true)" % (htmlutils.jsify(extension.getAuthorName()), htmlutils.jsify(extension.getName()), htmlutils.jsify(universal_version))))) else: buttons = None def renderItem(target): target.span("name").innerHTML(extension.getTitle(db, html=True)) if hosting_user: is_author = manifest and manifest.isAuthor(db, hosting_user) is_sole_author = is_author and len(manifest.authors) == 1 else: is_sole_author = False if extension_error is None: span = target.span("details") span.b().text("Details: ") select = span.select("details", critic_author=extension.getAuthorName(), critic_extension=extension.getName()) select.option(value='', selected="selected" if selected_version is False else None).text("Select version") versions = extension.getVersions() if versions: optgroup = select.optgroup(label="Official Versions") for version in versions: optgroup.option(value="version/%s" % version, selected="selected" if selected_version == version else None).text("%s" % version.upper()) optgroup = select.optgroup(label="Development") optgroup.option(value='live', selected="selected" if selected_version is None else None).text("LIVE") if manifest: is_installed = bool(installed_version) if is_installed: target.span("installed").text(" [installed]") else: is_installed = bool(universal_version) if is_installed: target.span("installed").text(" [installed (universal)]") target.div("description").preformatted().text(manifest.description, linkify=True) if not is_sole_author: authors = target.div("authors") authors.b().text("Author%s:" % ("s" if len(manifest.authors) > 1 else "")) authors.text(", ".join(author.name for author in manifest.getAuthors())) else: is_installed = False div = target.div("description broken").preformatted() if extension_error is None: anchor = div.a(href="loadmanifest?key=%s" % extension.getKey()) anchor.text("[This extension has an invalid MANIFEST file]") else: div.text("[This extension has been deleted or has become inaccessible]") if selected_version is False: return pages = [] injects = [] processcommits = [] filterhooks = [] scheduled = [] if manifest: for role in manifest.roles: if isinstance(role, PageRole): pages.append(role) elif isinstance(role, InjectRole): injects.append(role) elif isinstance(role, ProcessCommitsRole): processcommits.append(role) elif isinstance(role, FilterHookRole): filterhooks.append(role) elif isinstance(role, ScheduledRole): scheduled.append(role) role_table = target.table("roles") if pages: role_table.tr().th(colspan=2).text("Pages") for role in pages: row = role_table.tr() url = "%s/%s" % (dbutils.getURLPrefix(db, user), role.pattern) if is_installed and "*" not in url: row.td("pattern").a(href=url).text(url) else: row.td("pattern").text(url) td = row.td("description") td.text(role.description) if injects: role_table.tr().th(colspan=2).text("Page Injections") for role in injects: row = role_table.tr() row.td("pattern").text("%s/%s" % (dbutils.getURLPrefix(db, user), role.pattern)) td = row.td("description") td.text(role.description) if processcommits: role_table.tr().th(colspan=2).text("ProcessCommits hooks") ul = role_table.tr().td(colspan=2).ul() for role in processcommits: li = ul.li() li.text(role.description) if filterhooks: role_table.tr().th(colspan=2).text("FilterHook hooks") for role in filterhooks: row = role_table.tr() row.td("title").text(role.title) row.td("description").text(role.description) if scheduled: role_table.tr().th(colspan=2).text("Scheduled hooks") for role in scheduled: row = role_table.tr() row.td("pattern").text("%s @ %s" % (role.frequency, role.at)) td = row.td("description") td.text(role.description) installed_by = "" if extension_id is not None: cursor.execute("""SELECT uid FROM extensioninstalls JOIN extensions ON (extensions.id=extensioninstalls.extension) WHERE extensions.id=%s""", (extension.getExtensionID(db, create=False),)) user_ids = set(user_id for user_id, in cursor.fetchall()) if user_ids: installed_by = " (installed" if None in user_ids: installed_by += " universally" user_ids.remove(None) if user_ids: installed_by += " and" if user_ids: installed_by += (" by %d user%s" % (len(user_ids), "s" if len(user_ids) > 1 else "")) installed_by += ")" table.addItem("Extension", renderItem, extension_path + "/" + installed_by, buttons) document.addInternalScript("var selected_versions = %s;" % page.utils.json_encode(selected_versions)) return document ================================================ FILE: src/page/managereviewers.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import reviewing.utils as review_utils def renderManageReviewers(req, db, user): review_id = req.getParameter("review", filter=int) cursor = db.cursor() review = dbutils.Review.fromId(db, review_id) root_directories = {} root_files = {} def processFile(file_id): components = dbutils.describe_file(db, file_id).split("/") directories, files = root_directories, root_files for directory_name in components[:-1]: directories, files = directories.setdefault(directory_name, ({}, {})) files[components[-1]] = file_id cursor.execute("SELECT file FROM reviewfiles WHERE review=%s", (review.id,)) for (file_id,) in cursor: processFile(file_id) cursor.execute("SELECT name FROM users WHERE name IS NOT NULL") users = [user_name for (user_name,) in cursor if user_name] document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalStylesheet("resource/managereviewers.css") document.addExternalScript("resource/managereviewers.js") document.addInternalScript(user.getJS()); document.addInternalScript(review.getJS()); document.addInternalScript("var users = [ %s ];" % ", ".join([htmlutils.jsify(user_name) for user_name in sorted(users)])) target = body.div("main") basic = target.table('manage paleyellow', align='center') basic.col(width='10%') basic.col(width='60%') basic.col(width='30%') basic.tr().td('h1', colspan=3).h1().text("Manage Reviewers") row = basic.tr("current") row.td("select").text("Current:") cell = row.td("value") for index, reviewer in enumerate(review.reviewers): if index != 0: cell.text(", ") cell.span("reviewer", critic_username=reviewer.name).innerHTML(htmlutils.htmlify(reviewer.fullname).replace(" ", " ")) row.td("right").text() row = basic.tr("reviewer") row.td("select").text("Reviewer:") row.td("value").input("reviewer").span("message") row.td("right").button("save").text("Save") row = basic.tr("help") row.td("help", colspan=3).text("Enter the name of a current reviewer to edit assignments (or unassign.) Enter the name of another user to add a new reviewer.") row = basic.tr("headings") row.td("select").text("Assigned") row.td("path").text("Path") row.td().text() def outputDirectory(base, name, directories, files): if name: level = base.count("/") row = basic.tr("directory", critic_level=level) row.td("select").input(type="checkbox") if level > 1: row.td("path").preformatted().innerHTML((" " * (len(base) - 2)) + "…/" + name + "/") else: row.td("path").preformatted().innerHTML(base + name + "/") row.td().text() else: level = 0 for directory_name in sorted(directories.keys()): outputDirectory(base + name + "/" if name else "", directory_name, directories[directory_name][0], directories[directory_name][1]) for file_name in sorted(files.keys()): row = basic.tr("file", critic_file_id=files[file_name], critic_level=level + 1) row.td("select").input(type="checkbox") row.td("path").preformatted().innerHTML((" " * (len(base + name) - 1)) + "…/" + htmlutils.htmlify(file_name)) row.td().text() outputDirectory("", "", root_directories, root_files) return document ================================================ FILE: src/page/news.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import textformatting import htmlutils def renderNewsItem(db, user, target, text, timestamp): table = target.table("paleyellow", align="center") textformatting.renderFormatted(db, user, table, text.splitlines(), toc=False, title_right=timestamp) table.tr("back").td("back").div().a("back", href="news").text("Back") def renderNewsItems(db, user, target, display_unread, display_read): target.setTitle("News") table = target.table("paleyellow", align="center") table.tr("h1").td("h1", colspan=3).h1().text("News") cursor = db.cursor() cursor.execute("""SELECT id, date, text, uid IS NULL FROM newsitems LEFT OUTER JOIN newsread ON (item=id AND uid=%s) ORDER BY date DESC, id DESC""", (user.id,)) nothing = True for item_id, date, text, unread in cursor: if (unread and display_unread) or (not unread and display_read): row = table.tr("item", critic_item_id=item_id) row.td("date").text(date) row.td("title").text(text.split("\n", 1)[0]) row.td("status").text("unread" if unread else None) nothing = False if nothing: row = table.tr("nothing") row.td("nothing", colspan=3).text("No %s news!" % "unread" if display_unread else "read") if not display_unread or not display_read: table.tr("show").td("show", colspan=3).div().a("show", href="news?display=all").text("Show All") def renderNews(req, db, user): item_id = req.getParameter("item", None, filter=int) display = req.getParameter("display", "unread") document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() cursor = db.cursor() def renderButtons(target): if user.hasRole(db, "newswriter"): if item_id is not None: target.button("editnewsitem").text("Edit Item") target.button("addnewsitem").text("Add News Item") page.utils.generateHeader(body, db, user, current_page="news", generate_right=renderButtons) document.addExternalStylesheet("resource/tutorial.css") document.addExternalStylesheet("resource/comment.css") document.addExternalStylesheet("resource/news.css") document.addExternalScript("resource/news.js") document.addInternalStylesheet("div.main table td.text { %s }" % user.getPreference(db, "style.tutorialFont")) target = body.div("main") if item_id: cursor.execute("SELECT text, date FROM newsitems WHERE id=%s", (item_id,)) text, date = cursor.fetchone() document.addInternalScript("var news_item_id = %d;" % item_id) document.addInternalScript("var news_text = %s;" % htmlutils.jsify(text)) renderNewsItem(db, user, target, text, date.isoformat()) if not user.isAnonymous() and user.name == req.user: cursor.execute("SELECT 1 FROM newsread WHERE item=%s AND uid=%s", (item_id, user.id)) if not cursor.fetchone(): cursor.execute("INSERT INTO newsread (item, uid) VALUES (%s, %s)", (item_id, user.id)) db.commit() else: renderNewsItems(db, user, target, display in ("unread", "all"), display in ("read", "all")) return document ================================================ FILE: src/page/parameters.py ================================================ import base import dbutils class InvalidParameterValue(base.Error): def __init__(self, expected): self.expected = expected class Optional(object): def __init__(self, actual): self.actual = actual class ListOf(object): def __init__(self, actual): self.actual = actual def check_integer(value, what="value"): try: value = int(value) except ValueError: raise InvalidParameterValue("an integer %s" % what) else: return value class Stateful(object): def __init__(self, req, db, user): self.req = req self.db = db self.user = user class ReviewId(Stateful): def __call__(self, value): review_id = check_integer(value, "review id") try: review = dbutils.Review.fromId(self.db, review_id) except dbutils.NoSuchReview: raise InvalidParameterValue("a valid review id") return review ================================================ FILE: src/page/processcommits.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import cStringIO import extensions import gitutils import log.commitset import page.utils def renderProcessCommits(req, db, user): review_id = req.getParameter("review", filter=int) commit_ids = map(int, req.getParameter("commits").split(",")) review = dbutils.Review.fromId(db, review_id) all_commits = [gitutils.Commit.fromId(db, review.repository, commit_id) for commit_id in commit_ids] commitset = log.commitset.CommitSet(all_commits) heads = commitset.getHeads() tails = commitset.getTails() def process(): if len(heads) != 1: return "invalid commit-set; multiple heads" if len(tails) != 1: return "invalid commit-set; multiple tails" old_head = gitutils.Commit.fromSHA1(db, review.repository, tails.pop()) new_head = heads.pop() output = cStringIO.StringIO() extensions.role.processcommits.execute(db, user, review, all_commits, old_head, new_head, output) return output.getvalue() return page.utils.ResponseBody(process(), content_type="text/plain") ================================================ FILE: src/page/rebasetrackingreview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page import htmlutils import gitutils import request from page.parameters import Optional, ReviewId class RebaseTrackingReview(page.Page): def __init__(self): super(RebaseTrackingReview, self).__init__("rebasetrackingreview", { "review": ReviewId, "newbranch": Optional(str), "upstream": Optional(str), "newhead": Optional(str), "newupstream": Optional(str) }, RebaseTrackingReview.Handler) class Handler(page.Page.Handler): def __init__(self, review, newbranch=None, upstream=None, newhead=None, newupstream=None): super(RebaseTrackingReview.Handler, self).__init__(review) self.newbranch = newbranch self.upstream = upstream self.newhead = newhead self.newupstream = newupstream def generateHeader(self): self.document.addExternalStylesheet("resource/rebasetrackingreview.css") self.document.addExternalScript("resource/autocomplete.js") self.document.addExternalScript("resource/rebasetrackingreview.js") def generateContent(self): trackedbranch = self.review.getTrackedBranch(self.db) if not trackedbranch: raise request.DisplayMessage("Not supported!", "The review r/%d is not tracking a remote branch." % self.review.id) self.document.addInternalScript(self.review.repository.getJS()) self.document.addInternalScript(self.review.getJS()) self.document.addInternalScript("var trackedbranch = { remote: %s, name: %s };" % (htmlutils.jsify(trackedbranch.remote), htmlutils.jsify(trackedbranch.name))) table = page.utils.PaleYellowTable(self.body, "Rebase tracking review") def renderRemote(target): target.span("value inset", id="remote").text(trackedbranch.remote) def renderCurrentBranch(target): target.span("value inset", id="currentbranch").text("refs/heads/" + trackedbranch.name) table.addItem("Remote", renderRemote) table.addItem("Current branch", renderCurrentBranch) table.addSeparator() if self.newbranch is not None and self.upstream is not None and self.newhead is not None and self.newupstream is not None: import log.html import log.commitset sha1s = self.review.repository.revlist(included=[self.newhead], excluded=[self.newupstream]) new_commits = log.commitset.CommitSet(gitutils.Commit.fromSHA1(self.db, self.review.repository, sha1) for sha1 in sha1s) new_heads = new_commits.getHeads() if len(new_heads) != 1: raise page.utils.DisplayMessage("Invalid commit-set!", "Multiple heads. (This ought to be impossible...)") new_upstreams = new_commits.getFilteredTails(self.review.repository) if len(new_upstreams) != 1: raise page.utils.DisplayMessage("Invalid commit-set!", "Multiple upstreams. (This ought to be impossible...)") new_head = new_heads.pop() new_upstream_sha1 = new_upstreams.pop() old_commits = log.commitset.CommitSet(self.review.branch.getCommits(self.db)) old_upstreams = old_commits.getFilteredTails(self.review.repository) if len(old_upstreams) != 1: raise page.utils.DisplayMessage("Rebase not supported!", "The review has mulitple upstreams and can't be rebased.") if len(old_upstreams) == 1 and new_upstream_sha1 in old_upstreams: # This appears to be a history rewrite. new_upstream = None new_upstream_sha1 = None rebase_type = "history" else: old_upstream = gitutils.Commit.fromSHA1(self.db, self.review.repository, old_upstreams.pop()) new_upstream = gitutils.Commit.fromSHA1(self.db, self.review.repository, new_upstream_sha1) if old_upstream.isAncestorOf(new_upstream): rebase_type = "move:ff" else: rebase_type = "move" self.document.addInternalScript("var check = { rebase_type: %s, old_head_sha1: %s, new_head_sha1: %s, new_upstream_sha1: %s, new_trackedbranch: %s };" % (htmlutils.jsify(rebase_type), htmlutils.jsify(self.review.branch.head_sha1), htmlutils.jsify(new_head.sha1), htmlutils.jsify(new_upstream_sha1), htmlutils.jsify(self.newbranch[len("refs/heads/"):]))) def renderNewBranch(target): target.span("value inset", id="newbranch").text(self.newbranch) target.text(" @ ") target.span("value inset").text(new_head.sha1[:8] + " " + new_head.niceSummary()) def renderUpstream(target): target.span("value inset", id="upstream").text(self.upstream) target.text(" @ ") target.span("value inset").text(new_upstream.sha1[:8] + " " + new_upstream.niceSummary()) table.addItem("New branch", renderNewBranch) if new_upstream: table.addItem("New upstream", renderUpstream) table.addSeparator() def renderMergeStatus(target): target.a("status", id="status_merge").text("N/A") def renderConflictsStatus(target): target.a("status", id="status_conflicts").text("N/A") def renderHistoryRewriteStatus(target): target.a("status", id="status_historyrewrite").text("N/A") table.addSection("Status") if rebase_type == "history": table.addItem("History rewrite", renderHistoryRewriteStatus) else: if rebase_type == "move:ff": table.addItem("Merge", renderMergeStatus) table.addItem("Conflicts", renderConflictsStatus) def renderRebaseReview(target): target.button(id="rebasereview", onclick="rebaseReview();", disabled="disabled").text("Rebase Review") table.addSeparator() table.addCentered(renderRebaseReview) log.html.render(self.db, self.body, "Rebased commits", commits=list(new_commits)) else: try: from customization.branches import getRebasedBranchPattern except ImportError: def getRebasedBranchPattern(branch_name): return None pattern = getRebasedBranchPattern(trackedbranch.name) try: from customization.branches import isRebasedBranchCandidate except ImportError: isRebasedBranchCandidate = None if pattern or isRebasedBranchCandidate: candidates = [name[len("refs/heads/"):] for sha1, name in gitutils.Repository.lsremote(trackedbranch.remote, pattern=pattern, include_heads=True) if name.startswith("refs/heads/")] if isRebasedBranchCandidate is not None: def isCandidate(name): return isRebasedBranchCandidate(trackedbranch.name, name) candidates = filter(isCandidate, candidates) else: candidates = [] if len(candidates) > 1: def renderCandidates(target): target.text("refs/heads/") dropdown = target.select(id="newbranch") for name in candidates: dropdown.option(value=name).text(name) table.addItem("New branch", renderCandidates, buttons=[("Edit", "editNewBranch(this);")]) else: if len(candidates) == 1: default_value = candidates[0] else: default_value = trackedbranch.name def renderEdit(target): target.text("refs/heads/") target.input(id="newbranch", value=default_value) table.addItem("New branch", renderEdit) def renderUpstreamInput(target): target.input(id="upstream", value="refs/heads/master") table.addItem("Upstream", renderUpstreamInput) def renderFetchBranch(target): target.button(onclick="fetchBranch();").text("Fetch Branch") table.addSeparator() table.addCentered(renderFetchBranch) ================================================ FILE: src/page/repositories.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import htmlutils import dbutils import gitutils import configuration def renderRepositories(req, db, user): req.content_type = "text/html; charset=utf-8" document = htmlutils.Document(req) document.setTitle("Repositories") html = document.html() head = html.head() body = html.body() def generateRight(target): if user.hasRole(db, "repositories"): target.a("button", href="newrepository").text("Add Repository") page.utils.generateHeader(body, db, user, current_page="repositories", generate_right=generateRight) document.addExternalStylesheet("resource/repositories.css") document.addExternalScript("resource/repositories.js") document.addInternalScript(user.getJS()) if user.name == req.user and user.hasRole(db, "administrator"): document.addInternalScript("user.administrator = true;") cursor = db.cursor() cursor.execute("SELECT id, name, path, parent FROM repositories ORDER BY name ASC") rows = cursor.fetchall() class Repository: def __init__(self, repository_id, name, path, parent_id): self.id = repository_id self.name = name self.path = path self.parent_id = parent_id self.default_remote = None self.location = gitutils.Repository.constructURL(db, user, path) repositories = list(Repository(*row) for row in rows) repository_by_id = dict((repository.id, repository) for repository in repositories) def render(target): table = target.table("repositories callout") headings = table.tr("headings") headings.th("name").text("Short name") headings.th("location").text("Location") headings.th("upstream").text("Upstream") table.tr("spacer").td("spacer", colspan=3) for repository in repositories: row = table.tr("repository %s" % repository.name) row.td("name").text(repository.name) row.td("location").text(repository.location) if repository.parent_id: row.td("upstream").text(repository_by_id[repository.parent_id].name) else: row.td("upstream").text() cursor.execute("""SELECT id, local_name, remote, remote_name, disabled FROM trackedbranches WHERE repository=%s ORDER BY id ASC""", (repository.id,)) details = table.tr("details %s" % repository.name).td(colspan=3) branches = [(branch_id, local_name, remote, remote_name, disabled) for branch_id, local_name, remote, remote_name, disabled in cursor if not local_name.startswith("r/")] if branches: trackedbranches = details.table("trackedbranches", cellspacing=0) trackedbranches.tr().th("title", colspan=5).text("Tracked Branches") row = trackedbranches.tr("headings") row.th("localname").text("Local branch") row.th("remote").text("Repository") row.th("remotename").text("Remote branch") row.th("enabled").text("Enabled") row.th("users").text("Users") default_remote = "" for branch_id, local_name, remote, remote_name, disabled in sorted(branches, key=lambda branch: branch[1]): cursor.execute("SELECT uid FROM trackedbranchusers WHERE branch=%s", (branch_id,)) user_ids = [user_id for (user_id,) in cursor.fetchall()] row = trackedbranches.tr("branch", critic_branch_id=branch_id, critic_user_ids=",".join(map(str, user_ids))) if local_name == "*": row.td("localname").i().text("Tags") default_remote = remote else: row.td("localname").text(local_name) if local_name == "master" and not default_remote: default_remote = remote row.td("remote").text(remote) if remote_name == "*": row.td("remotename").i().text("N/A") else: row.td("remotename").text(remote_name) row.td("enabled").text("No" if disabled else "Yes") cell = row.td("users") for index, user_id in enumerate(user_ids): if index: cell.text(", ") trackedbranch_user = dbutils.User.fromId(db, user_id) cell.span("user").text(trackedbranch_user.name) if default_remote: repository.default_remote = default_remote buttons = details.div("buttons") buttons.button(onclick="addTrackedBranch(%d);" % repository.id).text("Add Tracked Branch") paleyellow = page.utils.PaleYellowTable(body, "Repositories") paleyellow.addCentered(render) repositories_js = [] for repository in repositories: name = htmlutils.jsify(repository.name) path = htmlutils.jsify(repository.path) location = htmlutils.jsify(repository.location) default_remote = htmlutils.jsify(repository.default_remote) repositories_js.append(("%d: { name: %s, path: %s, location: %s, defaultRemoteLocation: %s }" % (repository.id, name, path, location, default_remote))) document.addInternalScript("var repositories = { %s };" % ", ".join(repositories_js)) return document ================================================ FILE: src/page/search.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import urlparse import htmlutils import textutils import page.utils def renderSearch(req, db, user): document = htmlutils.Document(req) document.setTitle("Review Search") html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="search") document.addExternalStylesheet("resource/search.css") document.addExternalScript("resource/search.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(user.getJS()) cursor = db.cursor() cursor.execute("SELECT name, fullname FROM users") users = dict(cursor) document.addInternalScript("var users = %s;" % textutils.json_encode(users)) def renderQuickSearch(target): wrap = target.div("quicksearch callout") wrap.p().text("""A Quick Search dialog can be opened from any page using the "F" keyboard shortcut.""") wrap.p().a(href="/tutorial?item=search").text("More information") def renderInput(target, label, name, placeholder=""): fieldset = target.fieldset("search-" + name) fieldset.label("input-label").text(label) fieldset.input(type="text", name=name, placeholder=placeholder) def renderInputWithOptions(target, label, name, options, placeholder=""): fieldset = target.fieldset("search-" + name) fieldset.label("input-label").text(label) checkGroup = fieldset.div("input-options checkbox-group") for option in options: opt_label = checkGroup.label() opt_label.input(type="checkbox", name=option["name"], checked="checked" if "checked" in option else None) opt_label.text(option["label"]) fieldset.input(type="text", name=name, placeholder=placeholder) def renderFreetext(target): options=[{ "name": "freetextSummary", "label": "Summary", "checked": True }, { "name": "freetextDescription", "label": "Description", "checked": True }] renderInputWithOptions(target, label="Search term", name="freetext", placeholder="free text search", options=options) def renderState(target): state = target.fieldset("search-state") state.label("input-label").text("State") select = state.select(name="state") select.option(value="", selected="selected").text("Any state") select.option(value="open").text("Open") select.option(value="pending").text("Pending") select.option(value="accepted").text("Accepted") select.option(value="closed").text("Finished") select.option(value="dropped").text("Dropped") def renderUser(target): options=[{ "name": "userOwner", "label": "Owner", "checked": True }, { "name": "userReviewer", "label": "Reviewer" }] renderInputWithOptions(target, label="User", name="user", placeholder="user name(s)", options=options) def renderRepository(target): fieldset = target.fieldset("search-repository") fieldset.label("input-label").text("Repository") page.utils.generateRepositorySelect( db, user, fieldset, name="repository", selected=False, placeholder_text="Any repository", allow_selecting_none=True) section = body.section("paleyellow section") section.h1("section-heading").text("Review Search") url_terms = [] for name, value in urlparse.parse_qsl(req.query): if name == "q": url_terms.append(value) elif name.startswith("q"): url_terms.append("%s:%s" % (name[1:], value)) wrap = section.div("flex") search = wrap.form("search", name="search") if url_terms: row = search.div("flex") query = row.fieldset("search-query") query.label("input-label").text("Search query") query.input(type="text", name="query", value=" ".join(url_terms)) result = section.div("search-result", style="display: none") result.h2().text("Search result") result.div("callout") else: row = search.div("flex") renderFreetext(row) renderState(row) renderUser(search) row = search.div("flex") renderRepository(row) renderInput(row, "Branch", "branch") renderInput(search, "Path", "path") buttons = search.div("search-buttons") if url_terms: buttons.button(type="submit").text("Search again") buttons.a("button", href="/search").text("Show full search form") else: buttons.button(type="submit").text("Search") renderQuickSearch(wrap) return document ================================================ FILE: src/page/services.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import socket import time import errno import page.utils import htmlutils import dbutils import configuration import textutils def renderServices(req, db, user): req.content_type = "text/html; charset=utf-8" document = htmlutils.Document(req) document.setTitle("Services") html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page="services") document.addExternalStylesheet("resource/services.css") document.addExternalScript("resource/services.js") document.addInternalScript(user.getJS()) delay = 0.5 connected = False while not connected and delay <= 10: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # This loop is for the case where we just restarted the service manager # via the /services UI. The client-side script immediately reloads the # page after restart, which typically leads to us trying to connect to # the service manager while it's in the process of restarting. So just # try a couple of times if at first the connection fails. try: connection.connect(configuration.services.SERVICEMANAGER["address"]) connected = True except socket.error as error: if error[0] in (errno.ENOENT, errno.ECONNREFUSED): time.sleep(delay) delay += delay else: raise if not connected: raise page.utils.DisplayMessage("Service manager not responding!") connection.send(textutils.json_encode({ "query": "status" })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received result = textutils.json_decode(data) if result["status"] == "error": raise page.utils.DisplayMessage(result["error"]) paleyellow = page.utils.PaleYellowTable(body, "Services") def render(target): table = target.table("services callout") headings = table.tr("headings") headings.th("name").text("Name") headings.th("module").text("Module") headings.th("pid").text("PID") headings.th("rss").text("RSS") headings.th("cpu").text("CPU") headings.th("uptime").text("Uptime") headings.th("commands").text() table.tr("spacer").td("spacer", colspan=4) def formatUptime(seconds): def inner(seconds): if seconds < 60: return "%d seconds" % seconds elif seconds < 60 * 60: return "%d minutes" % (seconds / 60) elif seconds < 60 * 60 * 24: return "%d hours" % (seconds / (60 * 60)) else: return "%d days" % (seconds / (60 * 60 * 24)) return inner(int(seconds)).replace(" ", " ") def formatRSS(bytes): if bytes < 1024: return "%d B" % bytes elif bytes < 1024 ** 2: return "%.1f kB" % (float(bytes) / 1024) elif bytes < 1024 ** 3: return "%.1f MB" % (float(bytes) / 1024 ** 2) else: return "%.1f GB" % (float(bytes) / 1024 ** 3) def formatCPU(seconds): minutes = int(seconds / 60) seconds = seconds - minutes * 60 seconds = "%2.2f" % seconds if seconds.find(".") == 1: seconds = "0" + seconds return "%d:%s" % (minutes, seconds) def getProcessData(pid): try: items = open("/proc/%d/stat" % pid).read().split() return { "cpu": formatCPU(float(int(items[13]) + int(items[14])) / os.sysconf("SC_CLK_TCK")), "rss": formatRSS(int(items[23]) * os.sysconf("SC_PAGE_SIZE")) } except: return { "cpu": "N/A", "rss": "N/A" } for service_name, service_data in sorted(result["services"].items()): process_data = getProcessData(service_data["pid"]) row = table.tr("service") row.td("name").text(service_name) row.td("module").text(service_data["module"]) row.td("pid").text(service_data["pid"] if service_data["pid"] != -1 else "(not running)") row.td("rss").text(process_data["rss"]) row.td("cpu").text(process_data["cpu"]) row.td("uptime").innerHTML(formatUptime(service_data["uptime"])) commands = row.td("commands") commands.a(href="javascript:void(restartService(%s));" % htmlutils.jsify(service_name)).text("[restart]") commands.a(href="javascript:void(getServiceLog(%s));" % htmlutils.jsify(service_name)).text("[log]") for index, pid in enumerate(os.listdir(configuration.paths.WSGI_PIDFILE_DIR)): startup = float(open(os.path.join(configuration.paths.WSGI_PIDFILE_DIR, pid)).read()) uptime = time.time() - startup process_data = getProcessData(int(pid)) row = table.tr("service") row.td("name").text("wsgi:%d" % index) row.td("module").text() row.td("pid").text(pid) row.td("rss").text(process_data["rss"]) row.td("cpu").text(process_data["cpu"]) row.td("uptime").innerHTML(formatUptime(uptime)) commands = row.td("commands") commands.a(href="javascript:void(restartService('wsgi'));").text("[restart]") paleyellow.addCentered(render) return document ================================================ FILE: src/page/showbatch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import dbutils import reviewing.comment as review_comment import reviewing.utils as review_utils import htmlutils import diff from htmlutils import jsify, htmlify def renderShowBatch(req, db, user): batch_id = req.getParameter("batch", None, filter=int) review_id = req.getParameter("review", None, filter=int) cursor = db.cursor() if batch_id is None and review_id is None: return page.utils.displayMessage(db, req, user, "Missing argument: 'batch'") if batch_id: cursor.execute("SELECT review, uid, comment FROM batches WHERE id=%s", (batch_id,)) row = cursor.fetchone() if not row: raise page.utils.DisplayMessage("Invalid batch ID: %d" % batch_id) review_id, author_id, chain_id = row author = dbutils.User.fromId(db, author_id) else: chain_id = None author = user review = dbutils.Review.fromId(db, review_id) if chain_id: batch_chain = review_comment.CommentChain.fromId(db, chain_id, user, review=review) batch_chain.loadComments(db, user) else: batch_chain = None document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalStylesheet("resource/showreview.css") document.addExternalStylesheet("resource/showbatch.css") document.addExternalStylesheet("resource/review.css") document.addExternalStylesheet("resource/comment.css") document.addExternalScript("resource/review.js") document.addExternalScript("resource/comment.js") document.addInternalScript(user.getJS()) document.addInternalScript(review.getJS()) if batch_chain: document.addInternalScript("commentChainById[%d] = %s;" % (batch_chain.id, batch_chain.getJSConstructor())) target = body.div("main") table = target.table('paleyellow basic comments', align='center') table.col(width='10%') table.col(width='80%') table.col(width='10%') table.tr().td('h1', colspan=3).h1().text("Review by %s" % htmlify(author.fullname)) if batch_chain: batch_chain.loadComments(db, user) row = table.tr("line") row.td("heading").text("Comment:") row.td("value").preformatted().div("text").text(htmlify(batch_chain.comments[0].comment)) row.td("status").text() def renderFiles(title, cursor): files = [] for file_id, delete_count, insert_count in cursor.fetchall(): files.append((dbutils.describe_file(db, file_id), delete_count, insert_count)) paths = [] deleted = [] inserted = [] for path, delete_count, insert_count in sorted(files): paths.append(path) deleted.append(delete_count) inserted.append(insert_count) if paths: diff.File.eliminateCommonPrefixes(paths) row = table.tr("line") row.td("heading").text(title) files_table = row.td().table("files callout") headers = files_table.thead().tr() headers.th("path").text("Changed Files") headers.th("lines", colspan=2).text("Lines") files = files_table.tbody() for path, delete_count, insert_count in zip(paths, deleted, inserted): file = files.tr() file.td("path").preformatted().innerHTML(path) file.td("lines").preformatted().text("-%d" % delete_count if delete_count else None) file.td("lines").preformatted().text("+%d" % insert_count if insert_count else None) row.td("status").text() def condition(table_name): if batch_id: return "%s.batch=%d" % (table_name, batch_id) else: return "review=%d AND %s.batch IS NULL AND %s.uid=%d" % (review.id, table_name, table_name, author.id) cursor.execute("""SELECT reviewfiles.file, SUM(deleted), SUM(inserted) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE %s AND reviewfilechanges.to_state='reviewed' GROUP BY reviewfiles.file""" % condition("reviewfilechanges")) renderFiles("Reviewed:", cursor) cursor.execute("""SELECT reviewfiles.file, SUM(deleted), SUM(inserted) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE %s AND reviewfilechanges.to_state='pending' GROUP BY reviewfiles.file""" % condition("reviewfilechanges")) renderFiles("Unreviewed:", cursor) def renderChains(title, cursor, replies): all_chains = [review_comment.CommentChain.fromId(db, chain_id, user, review=review) for (chain_id,) in cursor] if not all_chains: return for chain in all_chains: chain.loadComments(db, user) issue_chains = filter(lambda chain: chain.type == "issue", all_chains) draft_issues = filter(lambda chain: chain.state == "draft", issue_chains) open_issues = filter(lambda chain: chain.state == "open", issue_chains) addressed_issues = filter(lambda chain: chain.state == "addressed", issue_chains) closed_issues = filter(lambda chain: chain.state == "closed", issue_chains) note_chains = filter(lambda chain: chain.type == "note", all_chains) draft_notes = filter(lambda chain: chain.state == "draft" and chain != batch_chain, note_chains) open_notes = filter(lambda chain: chain.state == "open" and chain != batch_chain, note_chains) def renderChains(target, chains): for chain in chains: row = target.tr("comment") row.td("author").text(chain.user.fullname) row.td("title").a(href="showcomment?chain=%d" % chain.id).innerHTML(chain.leader()) row.td("when").text(chain.when()) def showcomments(filter_param): params = { "review": review.id, "filter": filter_param } if batch_id: params["batch"] = batch_id return htmlutils.URL("/showcomments", **params) if draft_issues or open_issues or addressed_issues or closed_issues: h2 = table.tr().td("h2", colspan=3).h2().text(title) if len(draft_issues) + len(open_issues) + len(addressed_issues) + len(closed_issues) > 1: h2.a(href=showcomments("issues")).text("[display all]") if draft_issues: h3 = table.tr(id="draft-issues").td("h3", colspan=3).h3().text("Draft issues") if len(draft_issues) > 1: h3.a(href=showcomments("draft-issues")).text("[display all]") renderChains(table, draft_issues) if batch_id is not None or replies: if open_issues: h3 = table.tr(id="open-issues").td("h3", colspan=3).h3().text("Still open issues") if batch_id and len(open_issues) > 1: h3.a(href=showcomments("open-issues")).text("[display all]") renderChains(table, open_issues) if addressed_issues: h3 = table.tr(id="addressed-issues").td("h3", colspan=3).h3().text("Now addressed issues") if batch_id and len(addressed_issues) > 1: h3.a(href=showcomments("addressed-issues")).text("[display all]") renderChains(table, addressed_issues) if closed_issues: h3 = table.tr(id="closed-issues").td("h3", colspan=3).h3().text("Now closed issues") if batch_id and len(closed_issues) > 1: h3.a(href=showcomments("closed-issues")).text("[display all]") renderChains(table, closed_issues) if draft_notes or open_notes: h2 = table.tr().td("h2", colspan=3).h2().text(title) if len(draft_notes) + len(open_notes) > 1: h2.a(href=showcomments("notes")).text("[display all]") if draft_notes: h3 = table.tr(id="draft-notes").td("h3", colspan=3).h3().text("Draft notes") if len(draft_notes) > 1: h3.a(href=showcomments("draft-notes")).text("[display all]") renderChains(table, draft_notes) if open_notes: h3 = table.tr(id="notes").td("h3", colspan=3).h3().text("Notes") if batch_id and len(open_notes) > 1: h3.a(href=showcomments("open-notes")).text("[display all]") renderChains(table, open_notes) cursor.execute("SELECT id FROM commentchains WHERE %s AND type='issue'" % condition("commentchains")) renderChains("Raised issues", cursor, False) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE %s AND to_state='closed'""" % condition("commentchainchanges")) renderChains("Resolved issues", cursor, False) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE %s AND to_state='open'""" % condition("commentchainchanges")) renderChains("Reopened issues", cursor, False) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE %s AND to_type='issue'""" % condition("commentchainchanges")) renderChains("Converted into issues", cursor, False) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN commentchainchanges ON (commentchainchanges.chain=commentchains.id) WHERE %s AND to_type='note'""" % condition("commentchainchanges")) renderChains("Converted into notes", cursor, False) cursor.execute("SELECT id FROM commentchains WHERE %s AND type='note'" % condition("commentchains")) renderChains("Written notes", cursor, False) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN comments ON (comments.chain=commentchains.id) WHERE %s AND comments.id!=commentchains.first_comment""" % condition("comments")) renderChains("Replied to", cursor, True) return document ================================================ FILE: src/page/showbranch.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import gitutils import dbutils import htmlutils import configuration import request import log.html as log_html def renderShowBranch(req, db, user): branch_name = req.getParameter("branch") base_name = req.getParameter("base", None) review_id = req.getParameter("review", None, filter=int) repository = req.getParameter("repository", user.getPreference(db, "defaultRepository")) if not repository: raise request.MissingParameter("repository") repository = gitutils.Repository.fromParameter(db, repository) cursor = db.cursor() cursor.execute("SELECT id, type, base, head, tail FROM branches WHERE name=%s AND repository=%s", (branch_name, repository.id)) try: branch_id, branch_type, base_id, head_id, tail_id = cursor.fetchone() except: return page.utils.displayMessage(db, req, user, "'%s' doesn't name a branch!" % branch_name) branch = dbutils.Branch.fromName(db, repository, branch_name) rebased = False if base_name: base = dbutils.Branch.fromName(db, repository, base_name) if base is None: return page.utils.displayMessage(db, req, user, "'%s' doesn't name a branch!" % base_name) old_count, new_count, base_old_count, base_new_count = branch.rebase(db, base) if base_old_count is not None: new_base_base_name = base.base.name else: new_base_base_name = None rebased = True document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() document.addExternalStylesheet("resource/showbranch.css") def renderCreateReview(target): if not user.isAnonymous() and branch and branch.review is None and not rebased: url = htmlutils.URL("/createreview", repository=repository.id, branch=branch_name) target.a("button", href=url).text("Create Review") if review_id is not None: review = dbutils.Review.fromId(db, review_id) else: review = None if review: extra_links = [("r/%d" % review.id, "Back to Review")] document.addInternalScript(review.getJS()) else: extra_links = [] page.utils.generateHeader(body, db, user, renderCreateReview, extra_links=extra_links) document.addInternalScript(branch.getJS()) title_right = None if rebased: def renderPerformRebase(db, target): target.button("perform", onclick="rebase(%s, %s, %s, %s, %s, %s, %s)" % tuple(map(htmlutils.jsify, [branch_name, base_name, new_base_base_name, old_count, new_count, base_old_count, base_new_count]))).text("Perform Rebase") title_right = renderPerformRebase elif base_id is not None: bases = [] base = branch.base if base: if base.type == "review": bases.append("master") else: base = base.base while base: bases.append(base.name) base = base.base cursor.execute("SELECT name FROM branches WHERE base=%s", (branch.id,)) for (name,) in cursor: bases.append(name) def renderSelectBase(db, target): select = target.select("base") select.option(value="*").text("Select new base") select.option(value="*").text("---------------") for name in bases: select.option("base", value=name.split(" ")[0]).text(name) if not bases and branch.base: cursor.execute("SELECT commit FROM reachable WHERE branch=%s", (branch.id,)) commit_ids = [commit_id for (commit_id,) in cursor] for commit_id in commit_ids: cursor.execute("SELECT 1 FROM reachable WHERE branch=%s AND commit=%s", (branch.base.id, commit_id)) if cursor.fetchone(): bases.append("%s (trim)" % branch.base.name) break if bases: title_right = renderSelectBase target = body.div("main") if branch_type == 'normal': cursor.execute("SELECT COUNT(*) FROM reachable WHERE branch=%s", (branch_id,)) commit_count = cursor.fetchone()[0] if commit_count > configuration.limits.MAXIMUM_REACHABLE_COMMITS: offset = req.getParameter("offset", default=0, filter=int) limit = req.getParameter("limit", default=200, filter=int) head = gitutils.Commit.fromId(db, repository, head_id) tail = gitutils.Commit.fromId(db, repository, tail_id) if tail_id else None sha1s = repository.revlist([head], [tail] if tail else [], "--skip=%d" % offset, "--max-count=%d" % limit) commits = [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in sha1s] def moreCommits(db, target): target.a(href="/log?branch=%s&offset=%d&limit=%d" % (branch_name, offset + limit, limit)).text("More commits...") log_html.renderList(db, target, branch.name, commits, title_right=title_right, bottom_right=moreCommits) return document log_html.render(db, target, branch.name, branch=branch, title_right=title_right) return document ================================================ FILE: src/page/showcomment.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import profiling import linkify import reviewing.comment as review_comment import reviewing.utils as review_utils import reviewing.html as review_html import changeset.utils as changeset_utils import operation.blame import log.commitset def renderShowComment(req, db, user): chain_id = req.getParameter("chain", filter=int) context_lines = req.getParameter("context", user.getPreference(db, "comment.diff.contextLines"), filter=int) default_compact = "yes" if user.getPreference(db, "commit.diff.compactMode") else "no" compact = req.getParameter("compact", default_compact) == "yes" default_tabify = "yes" if user.getPreference(db, "commit.diff.visualTabs") else "no" tabify = req.getParameter("tabify", default_tabify) == "yes" original = req.getParameter("original", "no") == "yes" chain = review_comment.CommentChain.fromId(db, chain_id, user) if chain is None or chain.state == "empty": raise page.utils.DisplayMessage("Invalid comment chain ID: %d" % chain_id) review = chain.review document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() document.setTitle("%s in review %s" % (chain.title(False), review.branch.name)) def renderHeaderItems(target): review_utils.renderDraftItems(db, user, review, target) target.div("buttons").span("buttonscope buttonscope-global") page.utils.generateHeader(body, db, user, renderHeaderItems, extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalScript("resource/showcomment.js") document.addInternalScript(user.getJS(db)) document.addInternalScript(review.repository.getJS()) document.addInternalScript(review.getJS()) document.addInternalScript("var contextLines = %d;" % context_lines) document.addInternalScript("var keyboardShortcuts = %s;" % (user.getPreference(db, "ui.keyboardShortcuts") and "true" or "false")) if not user.isAnonymous() and user.name == req.user: document.addInternalScript("$(function () { markChainsAsRead([%d]); });" % chain_id) review_html.renderCommentChain(db, body.div("main"), user, review, chain, context_lines=context_lines, compact=compact, tabify=tabify, original=original, linkify=linkify.Context(db=db, request=req, review=review)) if user.getPreference(db, "ui.keyboardShortcuts"): page.utils.renderShortcuts(body, "showcomment", review=review) yield document.render(pretty=not compact) def renderShowComments(req, db, user): context_lines = req.getParameter("context", user.getPreference(db, "comment.diff.contextLines"), filter=int) default_compact = "yes" if user.getPreference(db, "commit.diff.compactMode") else "no" compact = req.getParameter("compact", default_compact) == "yes" default_tabify = "yes" if user.getPreference(db, "commit.diff.visualTabs") else "no" tabify = req.getParameter("tabify", default_tabify) == "yes" original = req.getParameter("original", "no") == "yes" review_id = req.getParameter("review", filter=int) batch_id = req.getParameter("batch", None, filter=int) filter = req.getParameter("filter", "all") blame = req.getParameter("blame", None) profiler = profiling.Profiler() review = dbutils.Review.fromId(db, review_id) review.repository.enableBlobCache() cursor = db.cursor() profiler.check("create review") if blame is not None: blame_user = dbutils.User.fromName(db, blame) cursor.execute("""SELECT commentchains.id FROM commentchains JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) JOIN fileversions ON (fileversions.new_sha1=commentchainlines.sha1) JOIN changesets ON (changesets.id=fileversions.changeset) JOIN commits ON (commits.id=changesets.child) JOIN gitusers ON (gitusers.id=commits.author_gituser) JOIN usergitemails USING (email) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id AND reviewchangesets.review=commentchains.review) WHERE commentchains.review=%s AND usergitemails.uid=%s AND commentchains.state!='empty' AND (commentchains.state!='draft' OR commentchains.uid=%s) ORDER BY commentchains.file, commentchainlines.first_line""", (review.id, blame_user.id, user.id)) include_chain_ids = set([chain_id for (chain_id,) in cursor]) profiler.check("initial blame filtering") else: include_chain_ids = None if filter == "toread": query = """SELECT commentchains.id FROM commentchains JOIN comments ON (comments.chain=commentchains.id) JOIN commentstoread ON (commentstoread.comment=comments.id) LEFT OUTER JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) WHERE review=%s AND commentstoread.uid=%s ORDER BY file, first_line""" cursor.execute(query, (review.id, user.id)) else: query = """SELECT commentchains.id FROM commentchains LEFT OUTER JOIN commentchainlines ON (chain=id) WHERE review=%s AND commentchains.state!='empty'""" arguments = [review.id] if filter == "issues": query += " AND type='issue' AND (commentchains.state!='draft' OR commentchains.uid=%s)" arguments.append(user.id) elif filter == "draft-issues": query += " AND type='issue' AND commentchains.state='draft' AND commentchains.uid=%s" arguments.append(user.id) elif filter == "open-issues": query += " AND type='issue' AND commentchains.state='open'" elif filter == "addressed-issues": query += " AND type='issue' AND commentchains.state='addressed'" elif filter == "closed-issues": query += " AND type='issue' AND commentchains.state='closed'" elif filter == "notes": query += " AND type='note' AND (commentchains.state!='draft' OR commentchains.uid=%s)" arguments.append(user.id) elif filter == "draft-notes": query += " AND type='note' AND commentchains.state='draft' AND commentchains.uid=%s" arguments.append(user.id) elif filter == "open-notes": query += " AND type='note' AND commentchains.state='open'" else: query += " AND (commentchains.state!='draft' OR commentchains.uid=%s)" arguments.append(user.id) if batch_id is not None: query += " AND batch=%s" arguments.append(batch_id) # This ordering is inaccurate if comments apply to the same file but # different commits, but then, in that case there isn't really a # well-defined natural order either. Two comments that apply to the # same file and commit will at least be order by line number, and that's # better than nothing. query += " ORDER BY file, first_line" cursor.execute(query, arguments) profiler.check("main query") if include_chain_ids is None: chain_ids = [chain_id for (chain_id,) in cursor] else: chain_ids = [chain_id for (chain_id,) in cursor if chain_id in include_chain_ids] profiler.check("query result") document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() document.addInternalScript(user.getJS(db)) document.addInternalScript(review.getJS()) page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) profiler.check("page header") target = body.div("main") if chain_ids and not user.isAnonymous() and user.name == req.user: document.addInternalScript("$(function () { markChainsAsRead([%s]); });" % ", ".join(map(str, chain_ids))) if chain_ids: processed = set() chains = [] file_ids = set() changesets_files = {} changesets = {} if blame is not None: annotators = {} commits = log.commitset.CommitSet(review.branch.getCommits(db)) for chain_id in chain_ids: if chain_id in processed: continue else: processed.add(chain_id) chain = review_comment.CommentChain.fromId(db, chain_id, user, review=review) chains.append(chain) if chain.file_id is not None: file_ids.add(chain.file_id) parent, child = review_html.getCodeCommentChainChangeset(db, chain, original) if parent and child: changesets_files.setdefault((parent, child), set()).add(chain.file_id) profiler.check("load chains") changeset_cache = {} for (from_commit, to_commit), filtered_file_ids in changesets_files.items(): changesets[(from_commit, to_commit)] = changeset_utils.createChangeset(db, user, review.repository, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=filtered_file_ids)[0] profiler.check("create changesets") if blame is not None: annotators[(from_commit, to_commit)] = operation.blame.LineAnnotator(db, from_commit, to_commit, file_ids=file_ids, commits=commits, changeset_cache=changeset_cache) profiler.check("create annotators") for chain in chains: if blame is not None and chain.file_id is not None: try: changeset = changesets[(chain.first_commit, chain.last_commit)] annotator = annotators[(chain.first_commit, chain.last_commit)] except KeyError: # Most likely a comment created via /showfile. Such a # comment could be in code that 'blame_user' modified in the # review, but for now, let's skip the comment. continue else: file_in_changeset = changeset.getFile(chain.file_id) if not file_in_changeset: continue try: offset, count = chain.lines_by_sha1[file_in_changeset.new_sha1] except KeyError: # Probably a chain raised against the "old" side of the diff. continue else: if not annotator.annotate(chain.file_id, offset, offset + count - 1, check_user=blame_user): continue profiler.check("detailed blame filtering") if chain.file_id is not None: from_commit, to_commit = review_html.getCodeCommentChainChangeset(db, chain, original) changeset = changesets.get((from_commit, to_commit)) else: changeset = None review_html.renderCommentChain(db, target, user, review, chain, context_lines=context_lines, compact=compact, tabify=tabify, original=original, changeset=changeset, linkify=linkify.Context(db=db, request=req, review=review)) profiler.check("rendering") yield document.render(stop=target, pretty=not compact) + "<script>console.log((new Date).toString());</script>" profiler.check("transfer") page.utils.renderShortcuts(target, "showcomments", review=review) else: target.h1(align="center").text("No comments.") profiler.output(db, user, document) yield document.render(pretty=not compact) ================================================ FILE: src/page/showcommit.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import htmlutils import page.utils import dbutils import gitutils import diff import changeset.html as changeset_html import changeset.utils as changeset_utils import changeset.detectmoves as changeset_detectmoves import reviewing.utils as review_utils import reviewing.comment as review_comment import reviewing.filters as review_filters import log.html as log_html from log.commitset import CommitSet import profiling import re def renderCommitInfo(db, target, user, repository, review, commit, conflicts=False, minimal=False): cursor = db.cursor() msg = commit.message.splitlines() commit_info = target.table("commit-info") def outputBranches(target, commit): cursor.execute("""SELECT branches.name, reviews.id FROM branches JOIN reachable ON (reachable.branch=branches.id) JOIN commits ON (commits.id=reachable.commit) LEFT OUTER JOIN reviews ON (reviews.branch=branches.id) WHERE branches.repository=%s AND commits.sha1=%s""", (repository.id, commit.sha1)) for branch, review_id in cursor: span = cell.span("branch") if review_id is None: url = htmlutils.URL("/log", repository=repository.id, branch=branch) title = branch else: url = "/r/%d" % review_id title = url span.text("[") span.a("branch", href=url).text(title) span.text("]") def outputTags(target, commit): cursor.execute("SELECT name FROM tags WHERE repository=%s AND sha1=%s", (repository.id, commit.sha1)) for (tag,) in cursor: target.span("tag").text("[%s]" % tag) if len(commit.parents) > 1: row = commit_info.tr("commit-info") row.th(align='right').text("Alternate view:") review_arg = "&review=%d" % review.id if review else "" if conflicts: row.td(align='left').a(href="/showcommit?sha1=%s&repository=%d%s" % (commit.sha1, repository.id, review_arg)).text("display changes relative to parents") else: row.td(align='left').a(href="/showcommit?sha1=%s&repository=%d%s&conflicts=yes" % (commit.sha1, repository.id, review_arg)).text("display conflict resolution changes") row = commit_info.tr("commit-info") row.th(align='right').text("SHA-1:") cell = row.td(align='left') if minimal: cell.a(href="/%s/%s?review=%d" % (repository.name, commit.sha1, review.id)).text(commit.sha1) else: cell.text(commit.sha1) if repository.name != user.getPreference(db, "defaultRepository"): cell.text(" in ") cell.b().text(repository.getURL(db, user)) if not minimal: if review: review_arg = "&review=%d" % review.id else: review_arg = "" span = cell.span("links").span("link") span.text("[") span.a("link", href="/showtree?sha1=%s%s" % (commit.sha1, review_arg)).innerHTML("browse tree") span.text("]") if not review: outputBranches(cell.span("branches"), commit) outputTags(cell.span("tags"), commit) if minimal or commit.author.email != commit.committer.email or commit.author.time != commit.committer.time: row = commit_info.tr("commit-info") row.th(align='right').text("Author:") row.td(align='left').text(str(commit.author)) if not minimal: row = commit_info.tr("commit-info") row.th(align='right').text("Commit:") row.td(align='left').text(str(commit.committer)) else: row = commit_info.tr("commit-info") row.th(align='right').text("Author/Commit:") row.td(align='left').text(str(commit.author)) if not minimal: if review: review_url_contribution = "?review=%d" % review.id else: review_url_contribution = "" for parent_sha1 in commit.parents: parent = gitutils.Commit.fromSHA1(db, repository, parent_sha1) if not review or review.containsCommit(db, parent): parent_href = "/%s/%s%s" % (repository.name, parent.sha1, review_url_contribution) row = commit_info.tr("commit-info") row.th(align='right').text("Parent:") cell = row.td(align='left') cell.a(href=parent_href, rel="previous").text("%s" % parent.niceSummary()) cell.setLink("previous", parent_href) if not review: outputBranches(cell.span("branches"), parent) outputTags(cell.span("tags"), parent) cursor.execute("SELECT child FROM edges WHERE parent=%s", [commit.id]) child_ids = cursor.fetchall() for (child_id,) in child_ids: if not review or review.containsCommit(db, child_id): try: child = gitutils.Commit.fromId(db, repository, child_id) except: continue child_href = "/%s/%s%s" % (repository.name, child.sha1, review_url_contribution) row = commit_info.tr("commit-info") row.th(align='right').text("Child:") cell = row.td(align='left') cell.a(href=child_href).text("%s" % child.niceSummary()) if len(child_ids) == 1: cell.setLink("next", child_href) if not review: outputBranches(cell.span("branches"), child) outputTags(cell.span("tags"), child) def linkToCommit(commit): if review: cursor.execute("SELECT 1 FROM commits JOIN changesets ON (child=commits.id) JOIN reviewchangesets ON (changeset=changesets.id) WHERE sha1=%s AND review=%s", (commit.sha1, review.id)) if cursor.fetchone(): return "%s/%s?review=%d" % (repository.name, commit.sha1, review.id) return "%s/%s" % (repository.name, commit.sha1) highlight_index = 0 if msg[0].startswith("fixup!") or msg[0].startswith("squash!"): for candidate_index, line in enumerate(msg[1:]): if line.strip(): highlight_index = candidate_index + 1 break commit_msg = commit_info.tr("commit-msg").td(colspan=2).table("commit-msg", cellspacing=0) for index, text in enumerate(msg): className = "line single" if index == 0: className += " first" elif index == len(msg) - 1: className += " last" if index < highlight_index or len(commit.parents) > 1: lengthLimit = None elif index == highlight_index: lengthLimit = "60-80" else: lengthLimit = "70-90" if index == highlight_index: className += " highlight" row = commit_msg.tr(className) row.td("edge").text() cell = row.td("line single commit-msg", id="msg%d" % index, critic_length_limit=lengthLimit) if text: cell.preformatted().text(text, linkify=linkToCommit, repository=repository) else: cell.text() row.td("edge").text() commit_msg.script(type="text/javascript").text("applyLengthLimit($(\"table.commit-msg td.line.commit-msg\"))"); if review: chains = review_comment.loadCommentChains(db, review, user, commit=commit) for chain in chains: commit_info.addInternalScript("commentChains.push(%s);" % chain.getJSConstructor(commit.sha1)) def renderCommitFiles(db, target, user, repository, review, changeset=None, changesets=None, file_id="f%d", approve_file_id="a%d", parent_index=None, nparents=1, conflicts=False, files=None): def countChanges(file): delete_count = 0 insert_count = 0 if file.chunks: for chunk in file.chunks: delete_count += chunk.delete_count insert_count += chunk.insert_count return delete_count, insert_count commit_files = target.table("commit-files", cellspacing=0) if nparents > 1: def getpath(x): return x[1] def setpath(x, p): x[1] = p diff.File.eliminateCommonPrefixes(files, getpath=getpath, setpath=setpath) for data in files: in_parent = data[2] for index in range(len(in_parent)): file_in_parent = in_parent[index] if file_in_parent: in_parent[index] = (file_in_parent, countChanges(file_in_parent)) else: in_parent[index] = (None, None) section = commit_files.thead() row = section.tr("parents") if review: row.th("approve").text("Reviewed") row.th().text("Changed Files") review_files = [] for index in range(nparents): if conflicts and index + 1 == nparents: text = "Conflicts" else: text = "Parent %d" % (index + 1) row.th("parent", colspan=2).text(text) if review: review_files.append(changesets[index].getReviewFiles(db, user, review)) if review: row.th("reviewed-by").text("Reviewed By") section = commit_files.tbody() for file_id, file_path, in_parent in files: row = section.tr(critic_file_id=file_id) fully_approved = True if review: approve = row.td("approve file") reviewers = {} for index, (file, lines) in enumerate(in_parent): if file: span = approve.span("parent%d" % index) if review_files[index].has_key(file.id): review_file = review_files[index][file.id] can_approve = review_file[0] is_approved = review_file[1] == "reviewed" for user_id in review_file[2]: reviewers[user_id] = dbutils.User.fromId(db, user_id) if not is_approved: fully_approved = False else: can_approve = False is_approved = True if can_approve: if is_approved: checked = "checked" else: checked = None input = span.input(type="checkbox", critic_parent_index=index, id="p%da%d" % (index, file.id), checked=checked) elif not is_approved: span.text("pending") row.td("path").a(href="#f%d" % file_id).innerHTML(file_path) for index, (file, lines) in enumerate(in_parent): if file: if file.isBinaryChanges(): row.td("parent", colspan=2, critic_parent_index=index).i().text("binary") elif file.isEmptyFile(): row.td("parent", colspan=2, critic_parent_index=index).i().text("empty") elif file.old_mode == "160000" and file.new_mode == "160000": if conflicts and index + 1 == nparents: row.td(colspan=2).text() else: module_repository = repository.getModuleRepository(db, changesets[index].child, file.path) if module_repository: url = "/showcommit?repository=%d&from=%s&to=%s" % (module_repository.id, file.old_sha1, file.new_sha1) row.td("parent", critic_parent_index=index, colspan=2).i().a(href=url).text("updated submodule") else: row.td("parent", critic_parent_index=index, colspan=2).i().text("updated submodule") else: row.td("parent", critic_parent_index=index).text(lines[0] and "-%d" % lines[0] or "") row.td("parent", critic_parent_index=index).text(lines[1] and "+%d" % lines[1] or "") else: row.td(colspan=2).text() if review: cell = row.td("reviewed-by") names = sorted([user.fullname for user in reviewers.values()]) if names: if fully_approved: cell.text(", ".join(names)) else: cell.text("( " + ", ".join(names) + " )") else: cell.text() return paths = diff.File.eliminateCommonPrefixes([file.path for file in changeset.files]) changes = map(countChanges, changeset.files) if review: review_files = changeset.getReviewFiles(db, user, review) additional = False for file in changeset.files: if (file.old_mode and file.new_mode and file.old_mode != file.new_mode) or (file.wasRemoved() and file.old_mode) or (file.wasAdded() and file.new_mode): additional = True break elif (file.old_mode and file.old_mode == "160000") or (file.new_mode and file.new_mode == "160000"): additional = True section = commit_files.thead() row = section.tr() ncolumns = 3 if review: row.th("approve").text("Reviewed") ncolumns += 1 row.th().text("Changed Files") row.th(colspan=2).text("Lines") if additional: row.th().text("Additional") ncolumns += 1 if review: row.th().text("Reviewed By") ncolumns += 1 if review: for is_reviewer, state, reviewers in review_files.values(): if is_reviewer: can_approve_anything = True break else: can_approve_anything = False section = commit_files.tbody() if review and can_approve_anything: row = section.tr() checkbox_everything = row.td("approve everything").input(type="checkbox", __generator__=True) row.td(colspan=ncolumns - 1).i().text("Everything") else: checkbox_everything = None all_reviewed = True for file, path, lines in zip(changeset.files, paths, changes): row = section.tr(critic_file_id=file.id) fully_reviewed = True if parent_index is not None: row.setAttribute("critic-parent-index", parent_index) if review: if file.id in review_files: review_file = review_files[file.id] can_review = review_file[0] is_reviewed = review_file[1] == "reviewed" reviewers = [dbutils.User.fromId(db, user_id) for user_id in review_file[2]] else: can_review = False is_reviewed = True reviewers = [] if not is_reviewed: fully_reviewed = False if can_review: if is_reviewed: checked = "checked" else: checked = None all_reviewed = False input = row.td("approve file").input(type="checkbox", critic_parent_index=parent_index, id=approve_file_id % file.id, checked=checked) else: if is_reviewed: cell = row.td() cell.text() else: row.td("approve file").text("pending") row.td("path").a(href=("#" + file_id) % file.id).innerHTML(path) if file.hasChanges(): if file.isBinaryChanges(): row.td(colspan=2).i().text("binary") elif file.isEmptyFile(): row.td(colspan=2).i().text("empty") else: row.td().text(lines[0] and "-%d" % lines[0] or "") row.td().text(lines[1] and "+%d" % lines[1] or "") else: row.td(colspan=2).i().text("no changes") if file.old_mode is not None and file.new_mode is not None and file.old_mode != file.new_mode: cell = row.td() cell.i().text("mode: ") cell.text("%s => %s" % (file.old_mode, file.new_mode)) elif (file.wasRemoved() and file.old_mode) or (file.wasAdded() and file.new_mode): cell = row.td() if file.old_mode == "160000" or file.new_mode == "160000": cell.i().text("added submodule" if file.wasAdded() else "removed submodule") else: cell.i().text("added: " if file.wasAdded() else "removed: ") cell.text("%s" % file.new_mode if file.wasAdded() else file.old_mode) elif file.old_mode == "160000" and file.new_mode == "160000": module_repository = repository.getModuleRepository(db, changeset.child, file.path) if module_repository: url = "/showcommit?repository=%d&from=%s&to=%s" % (module_repository.id, file.old_sha1, file.new_sha1) row.td().i().a(href=url).text("updated submodule") else: row.td().i().text("updated submodule") elif additional: row.td().text() if review: cell = row.td("reviewed-by") names = sorted([user.fullname for user in reviewers]) if names: if fully_reviewed: cell.text(", ".join(names)) else: cell.text("( " + ", ".join(names) + " )") else: cell.text() if all_reviewed and checkbox_everything: checkbox_everything.setAttribute("checked", "checked") target.script().text("registerPathHandlers();") def render(db, target, user, repository, review, changesets, commits, listed_commits=None, context_lines=3, is_merge=False, conflicts=False, moves=False, compact=False, wrap=True, tabify=False, profiler=None, rebases=None): cursor = db.cursor() main = target.div("main") options = {} if not user.getPreference(db, "ui.keyboardShortcuts"): options['show'] = True if user.getPreference(db, "commit.expandAllFiles"): options['show'] = True options['expand'] = True if compact: options['compact'] = True if tabify: options['tabify'] = True options['commit'] = changesets[0].child if len(changesets) == 1: if commits and len(commits) > 1: columns = [(10, log_html.WhenColumn()), (5, log_html.TypeColumn()), (65, log_html.SummaryColumn()), (20, log_html.AuthorColumn())] log_html.render(db, main, "Squashed History", commits=commits, listed_commits=listed_commits, rebases=rebases, review=review, columns=columns, collapsable=True) elif changesets[0].parent is None or (changesets[0].parent.sha1 in changesets[0].child.parents) or conflicts: if conflicts and len(changesets[0].child.parents) == 1: commit = changesets[0].parent else: commit = changesets[0].child renderCommitInfo(db, main, user, repository, review, commit, conflicts) else: main.setAttribute("style", "margin-bottom: 20px; padding-bottom: 10px") if moves: def renderMoveHeaderLeft(db, target, file): target.text(file.move_source_file.path) def renderMoveHeaderRight(db, target, file): target.text(file.move_target_file.path) options['show'] = True options['expand'] = True options['support_expand'] = False options['header_left'] = renderMoveHeaderLeft options['header_right'] = renderMoveHeaderRight context_lines = 0 else: renderCommitFiles(db, target, user, repository, review, changeset=changesets[0]) yield target for stop in changeset_html.render(db, target, user, repository, changesets[0], review, context_lines=context_lines, options=options, wrap=wrap): yield stop else: commit = changesets[0].child renderCommitInfo(db, main, user, repository, review, commit) if profiler: profiler.check("render commit info") nparents = len(changesets) target.addInternalScript("var parentsCount = %d;" % nparents) files = {} for index, changeset in enumerate(changesets): for file in changeset.files: files.setdefault(file.id, [file.id, file.path, [None] * nparents])[2][index] = file renderCommitFiles(db, target, user, repository, review, changesets=changesets, file_id="p%df%%d" % index, approve_file_id="p%da%%d" % index, nparents=nparents, conflicts=changesets[-1].conflicts, files=diff.File.sorted(files.values(), key=lambda x: x[1])) if profiler: profiler.check("render commit files") mergebase = repository.mergebase(commit, db=db) if profiler: profiler.check("merge base") yield target relevant_commits = [] cursor.execute("SELECT parent, file, sha1 FROM relevantcommits JOIN commits ON (relevant=id) WHERE commit=%s", (commit.getId(db),)) rows = cursor.fetchall() if rows: for index in range(len(changesets)): relevant_commits.append({}) commits_by_sha1 = {} for parent_index, file_id, sha1 in rows: if sha1 not in commits_by_sha1: commits_by_sha1[sha1] = gitutils.Commit.fromSHA1(db, repository, sha1) relevant_commits[parent_index].setdefault(file_id, []).append(commits_by_sha1[sha1]) else: values = [] commits_by_sha1 = {} for index, changeset in enumerate(changesets): relevant_files = set([file.path for file in changeset.files]) files = {} if not changeset.conflicts: commit_range = "%s..%s" % (mergebase, changeset.parent.sha1) relevant_lines = repository.run("log", "--name-only", "--full-history", "--format=sha1:%H", commit_range, "--", *relevant_files).splitlines() for line in relevant_lines: if line.startswith("sha1:"): sha1 = line[5:] elif line in relevant_files: if sha1 not in commits_by_sha1: commits_by_sha1[sha1] = gitutils.Commit.fromSHA1(db, repository, sha1) relevant_commit = commits_by_sha1[sha1] file_id = dbutils.find_file(db, path=line) values.append((commit.getId(db), index, file_id, relevant_commit.getId(db))) files.setdefault(file_id, []).append(relevant_commit) relevant_commits.append(files) cursor.executemany("INSERT INTO relevantcommits (commit, parent, file, relevant) VALUES (%s, %s, %s, %s)", values) if profiler: profiler.check("collecting relevant commits") if tabify: target.script(type="text/javascript").text("calculateTabWidth();") for index, changeset in enumerate(changesets): parent = target.div("parent", id="p%d" % index) options['support_expand'] = bool(changeset.conflicts) options['file_id'] = lambda base: "p%d%s" % (index, base) options['line_id'] = lambda base: "p%d%s" % (index, base) options['line_cell_id'] = lambda base: base is not None and "p%d%s" % (index, base) or None options['file_id_format'] = "p%df%%d" % index relevant_commits_per_file = {} for file in changeset.files: relevant_commits_per_file[file.id] = [] for index1, changeset1 in enumerate(changesets): if index1 != index: relevant_commits_per_file[file.id].extend(relevant_commits[index1].get(file.id, [])) if changeset.conflicts: text = "Merge conflict resolutions" else: text = "Changes relative to %s parent" % ("first", "second", "third", "fourth", "fifth", "seventh", "eight", "ninth")[index] parent.h1().text(text) def renderRelevantCommits(db, target, file): commits = relevant_commits_per_file.get(file.id) if commits: def linkToCommit(commit, overrides={}): return "%s/%s?file=%d" % (commit.repository.name, commit.sha1, file.id) columns = [(70, log_html.SummaryColumn(linkToCommit=linkToCommit)), (30, log_html.AuthorColumn())] log_html.renderList(db, target, "Relevant Commits", commits, columns=columns, hide_merges=True, className="log relevant") options['content_after'] = renderRelevantCommits options['parent_index'] = index options['merge'] = True for stop in changeset_html.render(db, parent, user, repository, changeset, review, context_lines=context_lines, options=options, wrap=wrap, parent_index=index): yield stop if profiler: profiler.check("render diff") if user.getPreference(db, "ui.keyboardShortcuts"): page.utils.renderShortcuts(target, "showcommit", review=review, merge_parents=len(changesets), squashed_diff=commits and len(commits) > 1) class NoChangesFound(Exception): pass def commitRangeFromReview(db, user, review, filter_value, file_ids): edges = cursor = db.cursor() if filter_value == "everything": cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s""", (review.id,)) elif filter_value == "pending": cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s AND reviewfiles.state='pending'""", (review.id, user.id)) elif filter_value == "reviewable": cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s""", (review.id, user.id)) elif filter_value == "relevant": filters = review_filters.Filters() filters.setFiles(db, review=review) filters.load(db, review=review, user=user) cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child, reviewfiles.file, reviewuserfiles.uid IS NOT NULL FROM changesets JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s""", (user.id, review.id)) edges = set() for parent_id, child_id, file_id, is_reviewer in cursor: if is_reviewer or filters.isRelevant(user, file_id): edges.add((parent_id, child_id)) elif filter_value == "files": assert len(file_ids) != 0 cursor.execute("""SELECT DISTINCT changesets.parent, changesets.child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN fileversions ON (fileversions.changeset=changesets.id) WHERE reviewchangesets.review=%s AND fileversions.file=ANY (%s)""", (review.id, list(file_ids))) else: raise page.utils.InvalidParameterValue( name="filter", value=filter_value, expected="one of 'everything', 'pending', 'reviewable', 'relevant' and 'files'") listed_commits = set() with_pending = set() for parent_id, child_id in edges: listed_commits.add(child_id) with_pending.add((parent_id, child_id)) if len(listed_commits) == 1: commit = gitutils.Commit.fromId(db, review.repository, child_id) return commit.sha1, commit.sha1, list(listed_commits), listed_commits if filter_value in ("everything", "reviewable", "relevant", "files"): cursor.execute("SELECT child FROM changesets JOIN reviewchangesets ON (changeset=id) WHERE review=%s", (review.id,)) all_commits = [gitutils.Commit.fromId(db, review.repository, commit_id) for (commit_id,) in cursor] commitset = CommitSet(review.branch.getCommits(db)) tails = commitset.getFilteredTails(review.repository) if len(commitset) == 0: raise page.utils.DisplayMessage( title="Empty review", body=("This review contains no commits. It is thus not " "meaningful to display a filtered view of the changes " "in it."), review=review) elif len(tails) > 1: ancestor = review.repository.getCommonAncestor(tails) paths = [] cursor.execute("SELECT DISTINCT file FROM reviewfiles WHERE review=%s", (review.id,)) files_in_review = set(file_id for (file_id,) in cursor) if filter_value == "files": files_in_review &= file_ids paths_in_review = set(dbutils.describe_file(db, file_id) for file_id in files_in_review) paths_in_upstreams = set() for tail in tails: paths_in_upstream = set(review.repository.run("diff", "--name-only", "%s..%s" % (ancestor, tail)).splitlines()) paths_in_upstreams |= paths_in_upstream paths.append((tail, paths_in_upstream)) overlapping_changes = paths_in_review & paths_in_upstreams if overlapping_changes: candidates = [] for index1, data in enumerate(paths): for index2, (tail, paths_in_upstream) in enumerate(paths): if index1 != index2 and paths_in_upstream & paths_in_review: break else: candidates.append(data) else: candidates = paths if not candidates: paths.sort(cmp=lambda a, b: cmp(len(a[1]), len(b[1]))) url = "/%s/%s..%s?file=%s" % (review.repository.name, paths[0][0][:8], review.branch.head_sha1[:8], ",".join(map(str, sorted(files_in_review)))) message = """\ <p>It is not possible to generate a diff of the requested set of commits that contains only changes from those commits.</p> <p>The following files would contain unrelated changes:<p> <pre style='padding-left: 2em'>%s</pre> <p>You can use the URL below if you want to view this diff anyway, including the unrelated changes.</p> <pre style='padding-left: 2em'><a href='%s'>%s%s</a></pre>""" % ("\n".join(sorted(overlapping_changes)), url, dbutils.getURLPrefix(db, user), url) raise page.utils.DisplayMessage(title="Impossible Diff", body=message, review=review, html=True) else: candidates.sort(cmp=lambda a, b: cmp(len(b[1]), len(a[1]))) return candidates[0][0], review.branch.head_sha1, all_commits, listed_commits if tails: from_sha1 = tails.pop() else: # Review starts with the initial commit. from_sha1 = None return from_sha1, review.branch.head_sha1, all_commits, listed_commits if not with_pending: raise NoChangesFound() cursor.execute("""SELECT parent, child FROM changesets JOIN reviewchangesets ON (id=changeset) WHERE review=%s""", (review.id,)) children = set() parents = set() edges = {} for parent_id, child_id in cursor.fetchall(): children.add(child_id) parents.add(parent_id) edges.setdefault(child_id, set()).add(parent_id) def isAncestorOf(ancestor_id, descendant_id): ancestors = edges.get(descendant_id, set()).copy() pending = ancestors.copy() while pending and ancestor_id not in ancestors: commit_id = pending.pop() parents = edges.get(commit_id, set()) pending.update(parents - ancestors) ancestors.update(parents) return ancestor_id in ancestors candidates = listed_commits.copy() heads = set() tails = set() for candidate_id in listed_commits: for other_id in candidates: if other_id != candidate_id and isAncestorOf(candidate_id, other_id): break else: heads.add(candidate_id) for other_id in candidates: if other_id != candidate_id and isAncestorOf(other_id, candidate_id): break else: tails.add(candidate_id) if len(heads) != 1 or len(tails) != 1: raise page.utils.DisplayMessage("Filtered view not possible since it includes a merge commit.") head = gitutils.Commit.fromId(db, review.repository, heads.pop()) tail = gitutils.Commit.fromId(db, review.repository, tails.pop()) # if tail.parents greater than 1, it means it's a merge commit if len(tail.parents) > 1: raise page.utils.DisplayMessage("Filtered view not possible since it includes a merge commit.") if len(tail.parents) == 0: tail = None else: tail = gitutils.Commit.fromSHA1(db, review.repository, tail.parents[0]) commits = getCommitList(db, review.repository, tail, head) if not commits: raise page.utils.DisplayMessage("Filtered view not possible since it includes a merge commit.") tail_to_return = tail.sha1 if tail is not None else None return tail_to_return, head.sha1, commits, listed_commits def getCommitList(db, repository, from_commit=None, to_commit=None, first_commit=None, last_commit=None): # This function should be called with either from_commit/to_commit *or* # first_commit/last_commit. The other two should be None. When called # for a range that starts with a root commit, from_commit will be None # (and to_commit not.) assert (from_commit is None) or (to_commit is not None) assert (first_commit is None) == (last_commit is None) assert (to_commit is None) != (last_commit is None) if first_commit is not None: sha1s = repository.revlist( [last_commit], [first_commit], "--ancestry-path") commits = [first_commit] commits.extend(gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in sha1s) return commits commits = set() class NotPossible(Exception): pass def process(iter_commit): while iter_commit != from_commit and iter_commit not in commits: commits.add(iter_commit) if len(iter_commit.parents) > 1: try: mergebase = repository.mergebase(iter_commit) # Include the parents of the merge commit (and their # ancestors) if 'from_commit' is an ancestor of the merge # base (but isn't the merge base.) include_parents = (from_commit != mergebase and from_commit.isAncestorOf(mergebase)) except gitutils.GitCommandError: raise NotPossible if include_parents: map(process, [gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in iter_commit.parents]) return else: raise NotPossible else: if from_commit is None and len(iter_commit.parents) == 0: return iter_commit = gitutils.Commit.fromSHA1(db, repository, iter_commit.parents[0]) if from_commit == to_commit: return [to_commit] try: process(to_commit) return list(commits) except NotPossible: return [] def getApproximativeCommitList(db, repository, from_commit, to_commit, paths): try: ancestor = repository.getCommonAncestor([from_commit, to_commit]) except gitutils.GitCommandError: return [], [] return ([gitutils.Commit.fromSHA1(db, repository, sha1) for sha1 in repository.revlist([to_commit], [ancestor])], [gitutils.Commit.fromSHA1(db, repository, sha1).getId(db) for sha1 in repository.revlist([to_commit], [ancestor], paths=paths)]) def renderShowCommit(req, db, user): profiler = profiling.Profiler() files_arg = req.getParameter("file", None) if files_arg is None: file_ids = None else: file_ids = set() for file_arg in files_arg.split(","): try: file_id = int(file_arg) except ValueError: try: file_id = dbutils.find_file(db, file_arg.strip(), insert=False) except dbutils.InvalidPath: file_id = None if file_id is not None: file_ids.add(file_id) review_id = req.getParameter("review", None, filter=int) review_filter = req.getParameter("filter", None) context = req.getParameter("context", None, int) style = req.getParameter("style", "horizontal", str) rescan = req.getParameter("rescan", "no", str) == "yes" reanalyze = req.getParameter("reanalyze", None) wrap = req.getParameter("wrap", "yes", str) == "yes" conflicts = req.getParameter("conflicts", "no") == "yes" moves = req.getParameter("moves", "no") == "yes" full = req.getParameter("full", "no") == "yes" default_tabify = "yes" if user.getPreference(db, "commit.diff.visualTabs") else "no" tabify = req.getParameter("tabify", default_tabify) == "yes" if user.getPreference(db, "commit.diff.compactMode"): default_compact = "yes" else: default_compact = "no" compact = req.getParameter("compact", default_compact) == "yes" if moves: move_source_file_ids = req.getParameter("sourcefiles", None) move_target_file_ids = req.getParameter("targetfiles", None) if move_source_file_ids: move_source_file_ids = set(map(int, move_source_file_ids.split(","))) if move_target_file_ids: move_target_file_ids = set(map(int, move_target_file_ids.split(","))) all_commits = None listed_commits = None first_sha1 = None last_sha1 = None repository = None document = htmlutils.Document(req) document.setBase(None) if review_id is None: review = None else: review = dbutils.Review.fromId(db, review_id) if not review: raise page.utils.DisplayMessage("Invalid review ID: %d" % review_id) branch = review.branch repository = review.repository title = "" if review: title += "[r/%d] " % review.id if review_filter == "pending": title += "Pending: " elif review_filter == "reviewable": title += "Reviewable: " elif review_filter == "relevant": title += "Relevant: " elif review_filter == "everything": title += "Everything: " if not repository: parameter = req.getParameter("repository", None) if parameter: repository = gitutils.Repository.fromParameter(db, parameter) if not repository: raise page.utils.DisplayMessage("'%s' is not a valid repository!" % repository.name, review=review) cursor = db.cursor() def expand_sha1(sha1): if review and re.match("^[0-9a-f]+$", sha1): cursor.execute("""SELECT sha1 FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) WHERE reviewchangesets.review=%s AND commits.sha1 LIKE %s""", (review.id, sha1 + "%")) try: return cursor.fetchone()[0] except: pass if len(sha1) == 40: return sha1 else: return repository.revparse(sha1) sha1 = req.getParameter("sha1", None, filter=expand_sha1) if sha1 is None: from_sha1 = req.getParameter("from", None, filter=expand_sha1) to_sha1 = req.getParameter("to", None, filter=expand_sha1) if (from_sha1 is None) != (to_sha1 is None): raise page.utils.DisplayMessage("invalid parameters; one of 'from'/'to' specified but not both") if from_sha1 is None: first_sha1 = req.getParameter("first", None, filter=expand_sha1) last_sha1 = req.getParameter("last", None, filter=expand_sha1) if (first_sha1 is None) != (last_sha1 is None): raise page.utils.DisplayMessage("invalid parameters; one of 'first'/'last' specified but not both") if first_sha1 is None: if review_id and review_filter: try: from_sha1, to_sha1, all_commits, listed_commits = commitRangeFromReview(db, user, review, review_filter, file_ids) except NoChangesFound: if review_filter == "pending": raise page.utils.DisplayMessage("Your work here is done!", None, review) else: assert review_filter != "files" raise page.utils.DisplayMessage("No %s changes found." % review_filter, None, review) if from_sha1 == to_sha1: sha1 = to_sha1 to_sha1 = None else: raise page.utils.DisplayMessage("invalid parameters; need 'sha1', 'from'/'to' or 'first'/'last'") else: from_sha1 = None to_sha1 = None if context is None: context = user.getPreference(db, "commit.diff.contextLines") one_sha1 = filter(None, (sha1, from_sha1, to_sha1, first_sha1, last_sha1))[0] if repository: if not repository.iscommit(one_sha1): raise page.utils.DisplayMessage("'%s' is not a valid commit in the repository '%s'!" % (one_sha1, repository.name), review=review) else: repository = user.getDefaultRepository(db) if repository and not repository.iscommit(one_sha1): repository = None if not repository: repository = gitutils.Repository.fromSHA1(db, one_sha1) if first_sha1 is not None: try: first_commit = gitutils.Commit.fromSHA1(db, repository, first_sha1) except gitutils.GitReferenceError as error: raise page.utils.DisplayMessage("Invalid SHA-1", "%s is not a commit in %s" % (error.sha1, repository.path)) if len(first_commit.parents) > 1: raise page.utils.DisplayMessage("Invalid parameters; 'first' can not be a merge commit.", review=review) from_sha1 = first_commit.parents[0] if first_commit.parents else None to_sha1 = last_sha1 try: commit = gitutils.Commit.fromSHA1(db, repository, sha1) if sha1 else None from_commit = gitutils.Commit.fromSHA1(db, repository, from_sha1) if from_sha1 else None to_commit = gitutils.Commit.fromSHA1(db, repository, to_sha1) if to_sha1 else None except gitutils.GitReferenceError as error: raise page.utils.DisplayMessage("Invalid SHA-1", "%s is not a commit in %s" % (error.sha1, repository.path)) if commit: title += "%s (%s)" % (commit.niceSummary(), commit.describe(db)) elif from_commit: title += "%s..%s" % (from_commit.describe(db), to_commit.describe(db)) else: title += "..%s" % to_commit.describe(db) document.setTitle(title) if review_filter == "pending": document.setLink("next", "javascript:submitChanges();") commits = None rebases = None profiler.check("prologue") if to_commit: changesets = changeset_utils.createChangeset(db, user, repository, from_commit=from_commit, to_commit=to_commit, conflicts=conflicts, rescan=rescan, reanalyze=reanalyze, filtered_file_ids=file_ids) assert len(changesets) == 1 if not conflicts: if review and (review_filter in ("everything", "reviewable", "relevant") or (review_filter == "files" and all_commits)): # We're displaying the full changes in the review (possibly # filtered by file) => include rebase information when rendering # the "Squashed History" log. cursor.execute("""SELECT id, old_head, new_head, new_upstream, equivalent_merge, replayed_rebase, uid, branch FROM reviewrebases WHERE review=%s AND new_head IS NOT NULL""", (review.id,)) rebases = [(rebase_id, gitutils.Commit.fromId(db, repository, old_head), gitutils.Commit.fromId(db, repository, new_head), dbutils.User.fromId(db, user_id), gitutils.Commit.fromId(db, repository, new_upstream) if new_upstream is not None else None, gitutils.Commit.fromId(db, repository, equivalent_merge) if equivalent_merge is not None else None, gitutils.Commit.fromId(db, repository, replayed_rebase) if replayed_rebase is not None else None, branch_name) for rebase_id, old_head, new_head, new_upstream, equivalent_merge, replayed_rebase, user_id, branch_name in cursor] if all_commits: commits = all_commits else: if first_sha1: commits = getCommitList( db, repository, first_commit=first_commit, last_commit=to_commit) else: commits = getCommitList( db, repository, from_commit=from_commit, to_commit=to_commit) if not commits and not review: paths = [changed_file.path for changed_file in changesets[0].files] commits, listed_commits = getApproximativeCommitList(db, repository, from_commit, to_commit, paths) if commits: changesets[0].setCommits(commits) else: if len(commit.parents) > 1: if review: cursor.execute("SELECT COUNT(changeset) FROM reviewchangesets JOIN changesets ON (changeset=id) WHERE review=%s AND child=%s", (review.id, commit.getId(db))) if cursor.fetchone()[0] > len(commit.parents): full = True else: full = False if full: changesets = changeset_utils.createFullMergeChangeset(db, user, repository, commit, review=review) commits = [commit] else: changesets = changeset_utils.createChangeset(db, user, repository, commit=commit, rescan=rescan, reanalyze=reanalyze, conflicts=conflicts, filtered_file_ids=file_ids, review=review) commits = [commit] profiler.check("create changeset") if review and commits: all_files = set() pending_files = set() reviewable_files = set() cursor.execute("""SELECT reviewfiles.file, reviewfiles.state, reviewuserfiles.uid IS NOT NULL FROM commits JOIN changesets ON (changesets.child=commits.id) JOIN reviewfiles ON (reviewfiles.changeset=changesets.id) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE commits.sha1=ANY (%s) AND reviewfiles.review=%s""", (user.id, [commit.sha1 for commit in commits], review.id)) for file_id, current_state, is_reviewer in cursor: all_files.add(file_id) if is_reviewer: if current_state == 'pending': pending_files.add(file_id) reviewable_files.add(file_id) profiler.check("reviewfiles query") for changeset in changesets: all_files_local = all_files.copy() for file in changeset.files: if file.id in all_files_local: all_files_local.remove(file.id) for file_id in all_files_local: if not file_ids or file_id in file_ids: changeset.files.append(diff.File(file_id, dbutils.describe_file(db, file_id), None, None, repository)) if review_filter == "pending": def isPending(file): return file.id in pending_files changeset.files = filter(isPending, changeset.files) elif review_filter == "reviewable": def isReviewable(file): return file.id in reviewable_files changeset.files = filter(isReviewable, changeset.files) elif review_filter == "relevant": filters = review_filters.Filters() filters.setFiles(db, review=review) filters.load(db, review=review, user=user) def isRelevant(file): if file.id in reviewable_files: return True elif filters.isRelevant(user, file): return True else: return False changeset.files = filter(isRelevant, changeset.files) elif review_filter == "files": def isFiltered(file): return file.id in file_ids changeset.files = filter(isFiltered, changeset.files) profiler.check("review filtering") if moves: if len(changesets) != 1: raise page.utils.DisplayMessage("Can't detect moves in a merge commit!", review=review) move_changeset = changeset_detectmoves.detectMoves(db, changesets[0], move_source_file_ids, move_target_file_ids) if not move_changeset: raise page.utils.DisplayMessage("No moved code found!", review=review) changesets = [move_changeset] profiler.check("moves detection") html = document.html() head = html.head() body = html.body() if review: def generateButtons(target): review_utils.renderDraftItems(db, user, review, target) buttons = target.div("buttons") if user.getPreference(db, "debug.extensions.customProcessCommits"): buttons.button(onclick='customProcessCommits();').text("Process Commits") buttons.span("buttonscope buttonscope-global") page.utils.generateHeader(body, db, user, generateButtons, extra_links=[("r/%d" % review.id, "Back to Review")]) else: def generateButtons(target): buttons = target.div("buttons") if not user.isAnonymous() and (commit or commits): buttons.button(onclick='createReview();').text('Create Review') buttons.span("buttonscope buttonscope-global") page.utils.generateHeader(body, db, user, generateButtons) log_html.addResources(document) changeset_html.addResources(db, user, repository, review, compact, tabify, document) document.addInternalScript(user.getJS(db)) document.addInternalScript(repository.getJS()) document.addInternalScript("var keyboardShortcuts = %s;" % (user.getPreference(db, "ui.keyboardShortcuts") and "true" or "false")) for stop in render(db, body, user, repository, review, changesets, commits, listed_commits, context_lines=context, conflicts=conflicts, moves=moves, compact=compact, wrap=wrap, tabify=tabify, profiler=profiler, rebases=rebases): yield document.render(stop=stop, pretty=not compact) profiler.check("rendering") profiler.output(db, user, document) db.commit() yield document.render(pretty=not compact) ================================================ FILE: src/page/showfile.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import urllib import dbutils import gitutils import page.utils import htmlutils import textutils import diff import reviewing.utils as review_utils import reviewing.comment as review_comment from syntaxhighlight.request import requestHighlights def renderShowFile(req, db, user): cursor = db.cursor() sha1 = req.getParameter("sha1") path = req.getParameter("path") line = req.getParameter("line", None) review_id = req.getParameter("review", None, filter=int) default_tabify = "yes" if user.getPreference(db, "commit.diff.visualTabs") else "no" tabify = req.getParameter("tabify", default_tabify) == "yes" if line is None: first, last = None, None else: if "-" in line: first, last = map(int, line.split("-")) else: first = last = int(line) context = req.getParameter("context", user.getPreference(db, "commit.diff.contextLines"), int) first_with_context = max(1, first - context) last_with_context = last + context if user.getPreference(db, "commit.diff.compactMode"): default_compact = "yes" else: default_compact = "no" compact = req.getParameter("compact", default_compact) == "yes" if len(path) == 0 or path[-1:] == "/": raise page.utils.DisplayMessage( title="Invalid path parameter", body="<p>The path must be non-empty and must not end with a <code>/</code>.</p>", html=True) if path[0] == '/': full_path = path if path != "/": path = path[1:] else: full_path = "/" + path if not path: path = "/" if review_id is None: review = None repository_arg = req.getParameter("repository", "") if repository_arg: repository = gitutils.Repository.fromParameter(db, repository_arg) else: repository = gitutils.Repository.fromSHA1(db, sha1) else: review = dbutils.Review.fromId(db, review_id) repository = review.repository document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() if review: page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) else: page.utils.generateHeader(body, db, user) document.addExternalStylesheet("resource/showfile.css") document.addInternalStylesheet(htmlutils.stripStylesheet(user.getResource(db, "syntax.css")[1], compact)) commit = gitutils.Commit.fromSHA1(db, repository, sha1) file_sha1 = commit.getFileSHA1(full_path) file_id = dbutils.find_file(db, path=path) if file_sha1 is None: raise page.utils.DisplayMessage( title="File does not exist", body=("<p>There is no file named <code>%s</code> in the commit " "<a href='/showcommit?repository=%s&sha1=%s'>" "<code>%s</code></a>.</p>" % (htmlutils.htmlify(textutils.escape(full_path)), htmlutils.htmlify(repository.name), htmlutils.htmlify(sha1), htmlutils.htmlify(sha1[:8]))), html=True) file = diff.File(file_id, path, None, file_sha1, repository) # A new file ID might have been added to the database, so need to commit. db.commit() if file.canHighlight(): requestHighlights(repository, { file.new_sha1: (file.path, file.getLanguage()) }, "legacy") file.loadNewLines(True, request_highlight=True) if review: document.addInternalScript(user.getJS()) document.addInternalScript(review.getJS()) document.addInternalScript("var changeset = { parent: { id: %(id)d, sha1: %(sha1)r }, child: { id: %(id)d, sha1: %(sha1)r } };" % { 'id': commit.getId(db), 'sha1': commit.sha1 }) document.addInternalScript("var files = { %(id)d: { new_sha1: %(sha1)r }, %(sha1)r: { id: %(id)d, side: 'n' } };" % { 'id': file_id, 'sha1': file_sha1 }) document.addExternalStylesheet("resource/review.css") document.addExternalScript("resource/review.js") cursor.execute("""SELECT DISTINCT commentchains.id FROM commentchains JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) WHERE commentchains.review=%s AND commentchains.file=%s AND commentchainlines.sha1=%s AND ((commentchains.state!='draft' OR commentchains.uid=%s) AND commentchains.state!='empty') GROUP BY commentchains.id""", (review.id, file_id, file_sha1, user.id)) comment_chain_script = "" for (chain_id,) in cursor.fetchall(): chain = review_comment.CommentChain.fromId(db, chain_id, user, review=review) chain.loadComments(db, user) comment_chain_script += "commentChains.push(%s);\n" % chain.getJSConstructor(file_sha1) if comment_chain_script: document.addInternalScript(comment_chain_script) document.addExternalStylesheet("resource/comment.css") document.addExternalScript("resource/comment.js") document.addExternalScript("resource/showfile.js") if tabify: document.addExternalStylesheet("resource/tabify.css") document.addExternalScript("resource/tabify.js") tabwidth = file.getTabWidth() indenttabsmode = file.getIndentTabsMode() if user.getPreference(db, "commit.diff.highlightIllegalWhitespace"): document.addInternalStylesheet(user.getResource(db, "whitespace.css")[1], compact) if first is not None: document.addInternalScript("var firstSelectedLine = %d, lastSelectedLine = %d;" % (first, last)) target = body.div("main") if tabify: target.script(type="text/javascript").text("calculateTabWidth();") table = target.table('file show expanded paleyellow', align='center', cellspacing=0) columns = table.colgroup() columns.col('edge') columns.col('linenr') columns.col('line') columns.col('middle') columns.col('middle') columns.col('line') columns.col('linenr') columns.col('edge') thead = table.thead() cell = thead.tr().td('h1', colspan=8) h1 = cell.h1() def make_url(url_path, path): params = { "sha1": sha1, "path": path } if review is None: params["repository"] = str(repository.id) else: params["review"] = str(review.id) return "%s?%s" % (url_path, urllib.urlencode(params)) h1.a("root", href=make_url("showtree", "/")).text("root") h1.span().text('/') components = path.split("/") for index, component in enumerate(components[:-1]): h1.a(href=make_url("showtree", "/".join(components[:index + 1]))).text(component, escape=True) h1.span().text('/') if first is not None: h1.a(href=make_url("showfile", "/".join(components))).text(components[-1], escape=True) else: h1.text(components[-1], escape=True) h1.span("right").a(href=("/download/%s?repository=%s&sha1=%s" % (urllib.quote(path), repository.name, file_sha1)), download=urllib.quote(path)).text("[download]") h1.span("right").a(href=("/download/%s?repository=%s&sha1=%s" % (urllib.quote(path), repository.name, file_sha1))).text("[view]") table.tbody('spacer top').tr('spacer top').td(colspan=8).text() tbody = table.tbody("lines") yield document.render(stop=tbody, pretty=not compact) for linenr, line in enumerate(file.newLines(True)): linenr = linenr + 1 highlight_class = "" if first is not None: if not (first_with_context <= linenr <= last_with_context): continue if linenr == first: highlight_class += " first-selected" if linenr == last: highlight_class += " last-selected" if tabify: line = htmlutils.tabify(line, tabwidth, indenttabsmode) line = line.replace("\r", "<i class='cr'></i>") row = tbody.tr("line context single", id="f%do%dn%d" % (file.id, linenr, linenr)) row.td("edge").text() row.td("linenr old").text(linenr) row.td("line single whole%s" % highlight_class, id="f%dn%d" % (file.id, linenr), colspan=4).innerHTML(line) row.td("linenr new").text(linenr) row.td("edge").text() if linenr % 500: yield document.render(stop=tbody, pretty=not compact) table.tbody('spacer bottom').tr('spacer bottom').td(colspan=8).text() yield document.render(pretty=not compact) ================================================ FILE: src/page/showfilters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2015 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import request import reviewing.filters import page.utils def renderShowFilters(req, db, user): path = req.getParameter("path", "/") repo_name = req.getParameter( "repository", user.getPreference(db, "defaultRepository")) repository = gitutils.Repository.fromParameter(db, repo_name) show_path = path if path.endswith("/") or repository.getHead(db).isDirectory(path): path = path.rstrip("/") + "/dummy.txt" file_id = dbutils.find_file(db, path=path) filters = reviewing.filters.Filters() filters.setFiles(db, [file_id]) filters.load(db, repository=repository, recursive=True) reviewers = [] watchers = [] for user_id, (filter_type, _delegate) in filters.listUsers(file_id).items(): if filter_type == 'reviewer': reviewers.append(user_id) else: watchers.append(user_id) result = "Path: %s\n" % show_path reviewers_found = False watchers_found = False for reviewer_id in sorted(reviewers): if not reviewers_found: result += "\nReviewers:\n" reviewers_found = True reviewer = dbutils.User.fromId(db, reviewer_id) result += " %s <%s>\n" % (reviewer.fullname, reviewer.email) for watcher_id in sorted(watchers): if not watchers_found: result += "\nWatchers:\n" watchers_found = True watcher = dbutils.User.fromId(db, watcher_id) result += " %s <%s>\n" % (watcher.fullname, watcher.email) if not reviewers_found and not watchers_found: result += "\nNo matching filters found.\n" return page.utils.ResponseBody(result, content_type="text/plain") ================================================ FILE: src/page/showreview.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import datetime import calendar import traceback import dbutils import gitutils import htmlutils import page.utils import log.html import reviewing.utils as review_utils import reviewing.html as review_html import reviewing.comment as review_comment import configuration import diff import profiling import linkify import textutils try: from customization.paths import getModuleFromFile except: def getModuleFromFile(repository, filename): try: base, rest = filename.split("/", 1) return base + "/" except: return None class SummaryColumn(log.html.SummaryColumn): def __init__(self, review): log.html.SummaryColumn.__init__(self) self.__review = review self.__cache = {} def fillCache(self, db, review): cursor = db.cursor() cursor.execute("""SELECT DISTINCT assignee, child FROM fullreviewuserfiles JOIN changesets ON (changesets.id=changeset) WHERE review=%s AND state='pending'""", (review.id,)) for user_id, commit_id in cursor: self.__cache.setdefault(commit_id, set()).add(user_id) def render(self, db, commit, target, overrides={}): user_ids = self.__cache.get(commit.getId(db)) if user_ids: users = ["%s:%s" % (user.fullname, user.status) for user in dbutils.User.fromIds(db, [user_id for user_id in user_ids])] target.setAttribute("critic-reviewers", ",".join(sorted(users))) log.html.SummaryColumn.render(self, db, commit, target, overrides=overrides) class ApprovalColumn: APPROVED = 1 TOTAL = 2 def __init__(self, user, review, type, cache): self.__user = user self.__review = review self.__type = type self.__cache = cache @staticmethod def fillCache(db, user, review, cache, profiler): cursor = db.cursor() profiler.check("fillCache") cursor.execute("""SELECT child, state, COUNT(*), SUM(deleted), SUM(inserted) FROM changesets JOIN reviewfiles ON (changeset=changesets.id) WHERE review=%s GROUP BY child, state""", (review.id,)) for commit_id, state, nfiles, deleted, inserted in cursor: data = cache.get(commit_id) if not data: data = cache[commit_id] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] if state == 'reviewed': data[3] += nfiles data[4] += deleted data[5] += inserted data[0] += nfiles data[1] += deleted data[2] += inserted profiler.check("fillCache: total") cursor.execute("""SELECT child, COALESCE(reviewfilechanges.to_state, reviewfiles.state) AS effective_state, COUNT(*), SUM(deleted), SUM(inserted) FROM changesets JOIN reviewfiles ON (changeset=changesets.id) JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) LEFT OUTER JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id AND reviewfilechanges.uid=reviewuserfiles.uid AND reviewfilechanges.state='draft') WHERE review=%s AND reviewuserfiles.uid=%s GROUP BY child, effective_state""", (review.id, user.id)) for commit_id, state, nfiles, deleted, inserted in cursor: data = cache.get(commit_id) if state == 'reviewed': data[9] += nfiles data[10] += deleted data[11] += inserted data[6] += nfiles data[7] += deleted data[8] += inserted profiler.check("fillCache: user") def __calculate(self, db, commit): return self.__cache.get(commit.id, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) def className(self, db, commit): if commit: (total_nfiles, total_deleted, total_inserted, approved_nfiles, approved_deleted, approved_inserted, user_total_nfiles, user_total_deleted, user_total_inserted, user_approved_nfiles, user_approved_deleted, user_approved_inserted) = self.__calculate(db, commit) if user_approved_nfiles == user_total_nfiles: category = "" else: category = " user" else: category = "" if self.__type == ApprovalColumn.APPROVED: return "approval" + category else: return "total" + category def heading(self, target): if self.__type == ApprovalColumn.APPROVED: target.text("Pending") else: target.text("Total") def render(self, db, commit, target, overrides={}): (total_nfiles, total_deleted, total_inserted, approved_nfiles, approved_deleted, approved_inserted, user_total_nfiles, user_total_deleted, user_total_inserted, user_approved_nfiles, user_approved_deleted, user_approved_inserted) = self.__calculate(db, commit) if self.__type == ApprovalColumn.APPROVED: if user_approved_nfiles == user_total_nfiles: if approved_nfiles == total_nfiles: target.text() elif approved_deleted == total_deleted and approved_inserted == total_inserted: target.span().text("?? %") else: target.span().text("%d %%" % int(100.0 * ((total_deleted + total_inserted) - (approved_deleted + approved_inserted)) / (total_deleted + total_inserted))) elif user_approved_deleted == user_total_deleted and user_approved_inserted == user_total_inserted: target.span().text("?? %") else: target.span().text("%d %%" % int(100.0 * ((user_total_deleted + user_total_inserted) - (user_approved_deleted + user_approved_inserted)) / (user_total_deleted + user_total_inserted))) else: if user_approved_deleted == user_total_deleted and user_approved_inserted == user_total_inserted: target.span().text("-%d/+%d" % (total_deleted, total_inserted)) else: target.span().text("-%d/+%d" % (user_total_deleted, user_total_inserted)) def notModified(req, db, user, review): value = req.getRequestHeader("If-None-Match") return review.getETag(db, user) == value def renderShowReview(req, db, user): profiler = profiling.Profiler() cursor = db.cursor() if user.getPreference(db, "commit.diff.compactMode"): default_compact = "yes" else: default_compact = "no" compact = req.getParameter("compact", default_compact) == "yes" highlight = req.getParameter("highlight", None) review_id = req.getParameter("id", filter=int) review = dbutils.Review.fromId(db, review_id, profiler=profiler) profiler.check("create review") if not review: raise page.utils.DisplayMessage("Invalid Review ID", "%d is not a valid review ID." % review_id) if review.getETag(db, user) == req.getRequestHeader("If-None-Match"): raise page.utils.NotModified profiler.check("ETag") repository = review.repository prefetch_commits = {} cursor.execute("""SELECT DISTINCT sha1, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN commits ON (commits.id=changesets.child) WHERE review=%s""", (review.id,)) prefetch_commits.update(dict(cursor)) profiler.check("commits (query)") cursor.execute("""SELECT old_head, old_head_commit.sha1, new_head, new_head_commit.sha1, new_upstream, new_upstream_commit.sha1, equivalent_merge, equivalent_merge_commit.sha1, replayed_rebase, replayed_rebase_commit.sha1 FROM reviewrebases LEFT OUTER JOIN commits AS old_head_commit ON (old_head_commit.id=old_head) LEFT OUTER JOIN commits AS new_head_commit ON (new_head_commit.id=new_head) LEFT OUTER JOIN commits AS new_upstream_commit ON (new_upstream_commit.id=new_upstream) LEFT OUTER JOIN commits AS equivalent_merge_commit ON (equivalent_merge_commit.id=equivalent_merge) LEFT OUTER JOIN commits AS replayed_rebase_commit ON (replayed_rebase_commit.id=replayed_rebase) WHERE review=%s""", (review.id,)) rebases = cursor.fetchall() if rebases: has_finished_rebases = False for (old_head_id, old_head_sha1, new_head_id, new_head_sha1, new_upstream_id, new_upstream_sha1, equivalent_merge_id, equivalent_merge_sha1, replayed_rebase_id, replayed_rebase_sha1) in rebases: if old_head_id: prefetch_commits[old_head_sha1] = old_head_id if new_head_id: prefetch_commits[new_head_sha1] = new_head_id has_finished_rebases = True if new_upstream_id: prefetch_commits[new_upstream_sha1] = new_upstream_id if equivalent_merge_id: prefetch_commits[equivalent_merge_sha1] = equivalent_merge_id if replayed_rebase_id: prefetch_commits[replayed_rebase_sha1] = replayed_rebase_id profiler.check("auxiliary commits (query)") if has_finished_rebases: cursor.execute("""SELECT commits.sha1, commits.id FROM commits JOIN reachable ON (reachable.commit=commits.id) WHERE branch=%s""", (review.branch.id,)) prefetch_commits.update(dict(cursor)) profiler.check("actual commits (query)") prefetch_commits = gitutils.FetchCommits(repository, prefetch_commits) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body(onunload="void(0);") def flush(target=None): return document.render(stop=target, pretty=not compact) def renderHeaderItems(target): has_draft_items = review_utils.renderDraftItems(db, user, review, target) target = target.div("buttons") if not has_draft_items: if review.state == "open": if review.accepted(db): target.button(id="closeReview", onclick="closeReview();").text("Close Review") else: if user in review.owners or user.getPreference(db, "review.pingAnyReview"): target.button(id="pingReview", onclick="pingReview();").text("Ping Review") if user in review.owners or user.getPreference(db, "review.dropAnyReview"): target.button(id="dropReview", onclick="dropReview();").text("Drop Review") if user in review.owners and not review.description: target.button(id="writeDescription", onclick="editDescription();").text("Write Description") else: target.button(id="reopenReview", onclick="reopenReview();").text("Reopen Review") target.span("buttonscope buttonscope-global") profiler.check("prologue") page.utils.generateHeader(body, db, user, renderHeaderItems, profiler=profiler) cursor.execute("SELECT 1 FROM fullreviewuserfiles WHERE review=%s AND state='pending' AND assignee=%s", (review.id, user.id)) hasPendingChanges = bool(cursor.fetchone()) if hasPendingChanges: head.setLink("next", "showcommit?review=%d&filter=pending" % review.id) profiler.check("header") document.addExternalStylesheet("resource/showreview.css") document.addExternalStylesheet("resource/review.css") document.addExternalStylesheet("resource/comment.css") document.addExternalScript("resource/showreview.js") document.addExternalScript("resource/review.js") document.addExternalScript("resource/comment.js") document.addExternalScript("resource/reviewfilters.js") document.addExternalScript("resource/autocomplete.js") document.addInternalScript(user.getJS()) document.addInternalScript("var isReviewFrontpage = true;") document.addInternalScript("var owners = [ %s ];" % ", ".join(owner.getJSConstructor() for owner in review.owners)) document.addInternalScript("var updateCheckInterval = %d;" % user.getPreference(db, "review.updateCheckInterval")); log.html.addResources(document) document.addInternalScript(review.getJS()) target = body.div("main") basic = target.table('paleyellow basic', align='center') basic.col(width='10%') basic.col(width='60%') basic.col(width='30%') h1 = basic.tr().td('h1', colspan=3).h1() h1.text("r/%d: " % review.id) h1.span(id="summary").text("%s" % review.summary, linkify=linkify.Context(db=db, review=review)) h1.a("edit", href="javascript:editSummary();").text("[edit]") def linkToCommit(commit): cursor.execute("SELECT 1 FROM commits JOIN changesets ON (child=commits.id) JOIN reviewchangesets ON (changeset=changesets.id) WHERE sha1=%s AND review=%s", (commit.sha1, review.id)) if cursor.fetchone(): return "%s/%s?review=%d" % (review.repository.name, commit.sha1, review.id) return "%s/%s" % (review.repository.name, commit.sha1) def row(heading, value, help, right=None, linkify=False, cellId=None): main_row = basic.tr('line') main_row.td('heading').text("%s:" % heading) if right is False: colspan = 2 else: colspan = None if callable(value): value(main_row.td('value', id=cellId, colspan=colspan).preformatted()) else: main_row.td('value', id=cellId, colspan=colspan).preformatted().text(value, linkify=linkify, repository=review.repository) if right is False: pass elif callable(right): right(main_row.td('right', valign='bottom')) else: main_row.td('right').text() if help: basic.tr('help').td('help', colspan=3).text(help) def renderBranchName(target): classes = "branch inset" if review.branch.archived: classes += " archived" target.code(classes).text(review.branch.name, linkify=linkify.Context()) if repository.name != user.getPreference(db, "defaultRepository"): target.text(" in ") target.code("repository inset").text(repository.getURL(db, user)) buttons = target.div("buttons") cursor.execute("""SELECT id, remote, remote_name, disabled, previous FROM trackedbranches WHERE repository=%s AND local_name=%s""", (repository.id, review.branch.name)) row = cursor.fetchone() if row: trackedbranch_id, remote, remote_name, disabled, previous = row target.p("tracking disabled" if disabled else "tracking").text("tracking") target.code("branch inset").text(remote_name, linkify=linkify.Context(remote=remote)) target.text(" in ") target.code("repository inset").text(remote, linkify=linkify.Context()) if previous: target.span("lastupdate").script(type="text/javascript").text("document.write('(last fetched: ' + shortDate(new Date(%d)) + ')');" % (calendar.timegm(previous.utctimetuple()) * 1000)) if user in review.owners or user.hasRole(db, "administrator"): if review.state == "open": if disabled: button = buttons.button("enabletracking", onclick=("enableTracking(%d, %s, %s);" % (trackedbranch_id, htmlutils.jsify(remote), htmlutils.jsify(remote_name)))) button.text("Enable Tracking") else: buttons.button("disabletracking", onclick="triggerUpdate(%d);" % trackedbranch_id).text("Update Now") buttons.button("disabletracking", onclick="disableTracking(%d);" % trackedbranch_id).text("Disable Tracking") buttons.button("rebasereview", onclick="location.assign('/rebasetrackingreview?review=%d');" % review.id).text("Rebase Review") if review.state != "open" and review.branch.archived: buttons.button("resurrect").text("Resurrect Branch") def renderPeople(target, list): for index, person in enumerate(list): if index != 0: target.text(", ") span = target.span("user %s" % person.status) span.span("name").text(person.fullname) if person.status == 'absent': span.span("status").text(" (%s)" % person.getAbsence(db)) elif person.status == 'retired': span.span("status").text(" (retired)") def renderOwners(target): renderPeople(target, review.owners) def renderReviewers(target): if review.reviewers: renderPeople(target, review.reviewers) else: target.i().text("No reviewers.") cursor.execute("""SELECT reviewfilters.id, reviewfilters.uid, reviewfilters.path FROM reviewfilters JOIN users ON (reviewfilters.uid=users.id) WHERE reviewfilters.review=%s AND reviewfilters.type='reviewer' AND users.status!='retired'""", (review.id,)) rows = cursor.fetchall() reviewer_filters_hidden = [] if rows: table = target.table("reviewfilters reviewers") row = table.thead().tr("h1") row.th("h1", colspan=4).text("Custom filters:") filter_data = {} reviewfilters = {} for filter_id, user_id, path in rows: filter_user = dbutils.User.fromId(db, user_id) path = path or '/' reviewfilters.setdefault(filter_user.fullname, []).append(path) filter_data[(filter_user.fullname, path)] = (filter_id, filter_user) count = 0 tbody = table.tbody() for fullname in sorted(reviewfilters.keys()): original_paths = sorted(reviewfilters[fullname]) trimmed_paths = diff.File.eliminateCommonPrefixes(original_paths[:]) first = True for original_path, trimmed_path in zip(original_paths, trimmed_paths): row = tbody.tr("filter") if first: row.td("username", rowspan=len(original_paths)).text(fullname) row.td("reviews", rowspan=len(original_paths)).text("reviews") first = False row.td("path").span().innerHTML(trimmed_path) filter_id, filter_user = filter_data[(fullname, original_path)] href = "javascript:removeReviewFilter(%d, %s, 'reviewer', %s, %s);" % (filter_id, filter_user.getJSConstructor(), htmlutils.jsify(original_path), "true" if filter_user != user else "false") row.td("remove").a(href=href).text("[remove]") count += 1 tfoot = table.tfoot() tfoot.tr().td(colspan=4).text("%d line%s hidden" % (count, "s" if count > 1 else "")) if count > 10: tbody.setAttribute("class", "hidden") reviewer_filters_hidden.append(True) else: tfoot.setAttribute("class", "hidden") reviewer_filters_hidden.append(False) buttons = target.div("buttons") if reviewer_filters_hidden: buttons.button("showfilters", onclick="toggleReviewFilters('reviewers', $(this));").text("%s Custom Filters" % ("Show" if reviewer_filters_hidden[0] else "Hide")) if not review.applyfilters: buttons.button("applyfilters", onclick="applyFilters('global');").text("Apply Global Filters") if review.applyfilters and review.repository.parent and not review.applyparentfilters: buttons.button("applyparentfilters", onclick="applyFilters('upstream');").text("Apply Upstream Filters") buttons.button("addreviewer", onclick="addReviewer();").text("Add Reviewer") buttons.button("manage", onclick="location.href='managereviewers?review=%d';" % review.id).text("Manage Assignments") def renderWatchers(target): if review.watchers: renderPeople(target, review.watchers) else: target.i().text("No watchers.") cursor.execute("""SELECT reviewfilters.id, reviewfilters.uid, reviewfilters.path FROM reviewfilters JOIN users ON (reviewfilters.uid=users.id) WHERE reviewfilters.review=%s AND reviewfilters.type='watcher' AND users.status!='retired'""", (review.id,)) rows = cursor.fetchall() watcher_filters_hidden = [] if rows: table = target.table("reviewfilters watchers") row = table.thead().tr("h1") row.th("h1", colspan=4).text("Custom filters:") filter_data = {} reviewfilters = {} for filter_id, user_id, path in rows: filter_user = dbutils.User.fromId(db, user_id) path = path or '/' reviewfilters.setdefault(filter_user.fullname, []).append(path) filter_data[(filter_user.fullname, path)] = (filter_id, filter_user) count = 0 tbody = table.tbody() for fullname in sorted(reviewfilters.keys()): original_paths = sorted(reviewfilters[fullname]) trimmed_paths = diff.File.eliminateCommonPrefixes(original_paths[:]) first = True for original_path, trimmed_path in zip(original_paths, trimmed_paths): row = tbody.tr("filter") if first: row.td("username", rowspan=len(original_paths)).text(fullname) row.td("reviews", rowspan=len(original_paths)).text("watches") first = False row.td("path").span().innerHTML(trimmed_path) filter_id, filter_user = filter_data[(fullname, original_path)] href = "javascript:removeReviewFilter(%d, %s, 'watcher', %s, %s);" % (filter_id, filter_user.getJSConstructor(), htmlutils.jsify(original_path), "true" if filter_user != user else "false") row.td("remove").a(href=href).text("[remove]") count += 1 tfoot = table.tfoot() tfoot.tr().td(colspan=4).text("%d line%s hidden" % (count, "s" if count > 1 else "")) if count > 10: tbody.setAttribute("class", "hidden") watcher_filters_hidden.append(True) else: tfoot.setAttribute("class", "hidden") watcher_filters_hidden.append(False) buttons = target.div("buttons") if watcher_filters_hidden: buttons.button("showfilters", onclick="toggleReviewFilters('watchers', $(this));").text("%s Custom Filters" % ("Show" if watcher_filters_hidden[0] else "Hide")) buttons.button("addwatcher", onclick="addWatcher();").text("Add Watcher") if user not in review.reviewers and user not in review.owners: if user not in review.watchers: buttons.button("watch", onclick="watchReview();").text("Watch Review") elif review.watchers[user] == "manual": buttons.button("watch", onclick="unwatchReview();").text("Stop Watching Review") def renderEditOwners(target): target.button("description", onclick="editOwners();").text("Edit Owners") def renderEditDescription(target): target.button("description", onclick="editDescription();").text("Edit Description") def renderRecipientList(target): cursor.execute("""SELECT uid, fullname, include FROM reviewrecipientfilters LEFT OUTER JOIN users ON (uid=id) WHERE review=%s""", (review.id,)) default_include = True included = dict((owner.fullname, owner.id) for owner in review.owners) excluded = {} for user_id, fullname, include in cursor: if user_id is None: default_include = include elif include: included[fullname] = user_id elif user_id not in review.owners: excluded[fullname] = user_id mode = None users = None buttons = [] opt_in_button = False opt_out_button = False if default_include: if excluded: mode = "Everyone except " users = excluded opt_out_button = user.fullname not in excluded opt_in_button = not opt_out_button else: mode = "Everyone." opt_out_button = True else: if included: mode = "No-one except " users = included opt_in_button = user.fullname not in included opt_out_button = not opt_in_button else: mode = "No-one at all." opt_in_button = True if user not in review.owners and (user in review.reviewers or user in review.watchers): if opt_in_button: buttons.append(("Include me, please!", "includeRecipient(%d);" % user.id)) if opt_out_button: buttons.append(("Exclude me, please!", "excludeRecipient(%d);" % user.id)) target.span("mode").text(mode) if users: container = target.span("users") first = True for fullname in sorted(users.keys()): if first: first = False else: container.text(", ") container.span("user", critic_user_id=users[fullname]).text(fullname) container.text(".") if buttons: container = target.div("buttons") for label, onclick in buttons: container.button(onclick=onclick).text(label) row("Branch", renderBranchName, "The branch containing the commits to review.", right=False) row("Owner%s" % ("s" if len(review.owners) > 1 else ""), renderOwners, "The users who created and/or owns the review.", right=renderEditOwners) if review.description: row("Description", review.description, "A longer description of the changes to be reviewed.", linkify=linkToCommit, cellId="description", right=renderEditDescription) row("Reviewers", renderReviewers, "Users responsible for reviewing the changes in this review.", right=False) row("Watchers", renderWatchers, "Additional users who receive e-mails about updates to this review.", right=False) row("Recipient List", renderRecipientList, "Users (among the reviewers and watchers) who will receive any e-mails about the review.", right=False) profiler.check("basic") review_state = review.getReviewState(db) profiler.check("review state") progress = target.table('paleyellow progress', align='center') progress_header = progress.tr().td('h1', colspan=3).h1() progress_header.text("Review Progress") progress_header_right = progress_header.span("right") progress_header_right.text("Display log: ") progress_header_right.a(href="showreviewlog?review=%d&granularity=module" % review.id).text("[per module]") progress_header_right.text() progress_header_right.a(href="showreviewlog?review=%d&granularity=file" % review.id).text("[per file]") progress_h1 = progress.tr().td('percent', colspan=3).h1() title_data = { 'id': 'r/%d' % review.id, 'summary': review.summary, 'progress': str(review_state) } if review.state == "closed": progress_h1.img(src=htmlutils.getStaticResourceURI("seal-of-approval-left.png"), style="position: absolute; margin-left: -80px; margin-top: -100px") progress_h1.text("Finished!") for branch in review.repository.getSignificantBranches(db): if review.branch.getHead(db).isAncestorOf(branch.head_sha1): remark = progress_h1.div().span("remark") remark.text("Merged to ") remark.a(href="/log?repository=%s&branch=%s" % (review.repository.name, branch.name)).text(branch.name) remark.text(".") elif review.state == "dropped": progress_h1.text("Dropped...") elif review.state == "open" and review_state.accepted: progress_h1.img(src=htmlutils.getStaticResourceURI("seal-of-approval-left.png"), style="position: absolute; margin-left: -80px; margin-top: -100px") progress_h1.text("Accepted!") progress_h1.div().span("remark").text("Hurry up and close it before anyone has a change of heart.") else: progress_h1.text(review_state.getProgress()) if review_state.issues: progress_h1.span("comments").text(" and ") progress_h1.text("%d" % review_state.issues) progress_h1.span("comments").text(" issue%s" % (review_state.issues > 1 and "s" or "")) if review_state.getPercentReviewed() != 100.0: cursor = db.cursor() cursor.execute("""SELECT 1 FROM reviewfiles LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.state='pending' AND reviewuserfiles.uid IS NULL""", (review.id,)) if cursor.fetchone(): progress.tr().td('stuck', colspan=3).a(href="showreviewlog?review=%d&granularity=file&unassigned=yes" % review.id).text("Not all changes have a reviewer assigned!") cursor.execute("""SELECT uid, MIN(reviewuserfiles.time) FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.state='pending' GROUP BY reviewuserfiles.uid""", (review.id,)) now = datetime.datetime.now() def seconds_since(timestamp): if isinstance(timestamp, int): # We tell sqlite3 to convert TIMESTAMP values into datetime # objects, but apparently MIN(timestamp) as used in the # query above isn't typed as TIMESTAMP by SQLite, so doesn't # get converted automatically. timestamp = datetime.datetime.fromtimestamp(timestamp) delta = now - timestamp return delta.days * 60 * 60 * 24 + delta.seconds pending_reviewers = [(dbutils.User.fromId(db, user_id), seconds_since(timestamp)) for (user_id, timestamp) in cursor.fetchall() if seconds_since(timestamp) > 60 * 60 * 8] if pending_reviewers: progress.tr().td('stragglers', colspan=3).text("Needs review from") for reviewer, seconds in pending_reviewers: if reviewer.status == 'retired': continue elif reviewer.status == 'absent': warning = " absent" elif not reviewer.getPreference(db, "email.activated"): warning = " no-email" else: warning = "" if seconds < 60 * 60 * 24: hours = seconds / (60 * 60) duration = " (%d hour%s)" % (hours, "s" if hours > 1 else "") elif seconds < 60 * 60 * 24 * 7: days = seconds / (60 * 60 * 24) duration = " (%d day%s)" % (days, "s" if days > 1 else "") elif seconds < 60 * 60 * 24 * 30: weeks = seconds / (60 * 60 * 24 * 7) duration = " (%d week%s)" % (weeks, "s" if weeks > 1 else "") else: duration = " (wake up!)" progress.tr().td('straggler' + warning, colspan=3).text("%s%s" % (reviewer.fullname, duration)) if user in review.owners: progress.tr().td('pinging', colspan=3).span().text("Send a message to these users by pinging the review.") title_format = user.getPreference(db, 'ui.title.showReview') try: document.setTitle(title_format % title_data) except Exception as exc: document.setTitle(traceback.format_exception_only(type(exc), exc)[0].strip()) profiler.check("progress") check = profiler.start("ApprovalColumn.fillCache") approval_cache = {} ApprovalColumn.fillCache(db, user, review, approval_cache, profiler) check.stop() summary_column = SummaryColumn(review) summary_column.fillCache(db, review) profiler.check("SummaryColumn.fillCache") columns = [(10, log.html.WhenColumn()), (60, summary_column), (16, log.html.AuthorColumn()), (7, ApprovalColumn(user, review, ApprovalColumn.APPROVED, approval_cache)), (7, ApprovalColumn(user, review, ApprovalColumn.TOTAL, approval_cache))] def renderReviewPending(db, target): if not user.isAnonymous(): target.text("Filter: ") if hasPendingChanges: target.a(href="showcommit?review=%d&filter=pending" % review.id, title="All changes you need to review.").text("[pending]") target.text() if user in review.reviewers: target.a(href="showcommit?review=%d&filter=reviewable" % review.id, title="All changes you can review, including what you've already reviewed.").text("[reviewable]") target.text() target.a(href="showcommit?review=%d&filter=relevant" % review.id, title="All changes that match your filters.").text("[relevant]") target.text() target.text("Manual: ") target.a(href="filterchanges?review=%d" % review.id, title="Manually select what files to display of the changes from all commits.").text("[full]") target.text() target.a(href="javascript:void(filterPartialChanges());", title="Manually select what files to display of the changes in a selection of commits.").text("[partial]") req.addResponseHeader("ETag", review.getETag(db, user)) if user.getPreference(db, "review.useMustRevalidate"): req.addResponseHeader("Cache-Control", "must-revalidate") yield flush(target) try: if prefetch_commits.error is None: prefetch_commits.getCommits(db) profiler.check("FetchCommits.getCommits()") cursor.execute("""SELECT DISTINCT parent, child FROM changesets JOIN reviewchangesets ON (reviewchangesets.changeset=changesets.id) JOIN commits ON (commits.id=changesets.child) WHERE review=%s AND type!='conflicts'""", (review.id,)) commits = set() for parent_id, child_id in cursor: commits.add(gitutils.Commit.fromId(db, repository, child_id)) commits = list(commits) cursor.execute("""SELECT id, old_head, new_head, new_upstream, equivalent_merge, replayed_rebase, uid, branch FROM reviewrebases WHERE review=%s""", (review.id,)) all_rebases = [(rebase_id, gitutils.Commit.fromId(db, repository, old_head), gitutils.Commit.fromId(db, repository, new_head) if new_head else None, dbutils.User.fromId(db, user_id), gitutils.Commit.fromId(db, repository, new_upstream) if new_upstream is not None else None, gitutils.Commit.fromId(db, repository, equivalent_merge) if equivalent_merge is not None else None, gitutils.Commit.fromId(db, repository, replayed_rebase) if replayed_rebase is not None else None, branch_name) for rebase_id, old_head, new_head, new_upstream, equivalent_merge, replayed_rebase, user_id, branch_name in cursor] bottom_right = None finished_rebases = filter(lambda item: item[2] is not None, all_rebases) current_rebases = filter(lambda item: item[2] is None, all_rebases) if current_rebases: assert len(current_rebases) == 1 def renderCancelRebase(db, target): target.button("cancelrebase").text("Cancel Rebase") if user == current_rebases[0][3]: bottom_right = renderCancelRebase else: def renderPrepareRebase(db, target): target.button("preparerebase").text("Prepare Rebase") bottom_right = renderPrepareRebase if finished_rebases: cursor.execute("""SELECT commit FROM reachable WHERE branch=%s""", (review.branch.id,)) actual_commits = [gitutils.Commit.fromId(db, repository, commit_id) for (commit_id,) in cursor] else: actual_commits = [] log.html.render(db, target, "Commits (%d)", commits=commits, columns=columns, title_right=renderReviewPending, rebases=finished_rebases, branch_name=review.branch.name, bottom_right=bottom_right, review=review, highlight=highlight, profiler=profiler, user=user, extra_commits=actual_commits) yield flush(target) profiler.check("log") except gitutils.GitReferenceError as error: div = target.div("error") div.h1().text("Error!") div.text("The commit %s is missing from the repository." % error.sha1) except gitutils.GitError as error: div = target.div("error") div.h1().text("Error!") div.text("Failed to read commits from the repository: %s" % error.message) all_chains = review_comment.CommentChain.fromReview(db, review, user) profiler.check("chains (load)") if all_chains: issue_chains = filter(lambda chain: chain.type == "issue", all_chains) draft_issues = filter(lambda chain: chain.state == "draft", issue_chains) open_issues = filter(lambda chain: chain.state == "open", issue_chains) addressed_issues = filter(lambda chain: chain.state == "addressed", issue_chains) closed_issues = filter(lambda chain: chain.state == "closed", issue_chains) note_chains = filter(lambda chain: chain.type == "note", all_chains) draft_notes = filter(lambda chain: chain.state == "draft", note_chains) open_notes = filter(lambda chain: chain.state != "draft" and chain.state != "empty", note_chains) else: open_issues = [] open_notes = [] chains = target.table("paleyellow comments", align="center", cellspacing=0) h1 = chains.tr("h1").td("h1", colspan=3).h1().text("Comments") links = h1.span("links") if all_chains: links.a(href="showcomments?review=%d&filter=all" % review.id).text("[display all]") if not user.isAnonymous(): links.a(href="showcomments?review=%d&filter=all&blame=%s" % (review.id, user.name)).text("[in my commits]") cursor.execute("""SELECT count(commentstoread.comment) > 0 FROM commentchains JOIN comments ON (comments.chain=commentchains.id) JOIN commentstoread ON (commentstoread.comment=comments.id) WHERE commentchains.review=%s AND commentstoread.uid=%s""", [review.id, user.id]) if cursor.fetchone()[0]: links.a(href="showcomments?review=%d&filter=toread" % review.id).text("[display unread]") def renderChains(target, chains): for chain in chains: row = target.tr("comment %s %s" % (chain.type, chain.state)) row.td("author").text(chain.user.fullname) row.td("title").a(href="showcomment?chain=%d" % chain.id).innerHTML(chain.leader()) ncomments = chain.countComments() nunread = chain.countUnread() cell = row.td("when") if ncomments <= 1: if nunread: cell.b().text("Unread") else: cell.text("No replies") else: if nunread: cell.b().text("%d of %d unread" % (nunread, ncomments)) else: cell.text("%d repl%s" % (ncomments - 1, "ies" if ncomments > 2 else "y")) if draft_issues: h2 = chains.tr("h2", id="draft-issues").td("h2", colspan=3).h2().text("Draft Issues") h2.a(href="showcomments?review=%d&filter=draft-issues" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=draft-issues&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, draft_issues) if open_issues: h2 = chains.tr("h2", id="open-issues").td("h2", colspan=3).h2().text("Open Issues") h2.a(href="showcomments?review=%d&filter=open-issues" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=open-issues&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, open_issues) if addressed_issues: h2 = chains.tr("h2", id="addressed-issues").td("h2", colspan=3).h2().text("Addressed Issues") h2.a(href="showcomments?review=%d&filter=addressed-issues" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=addressed-issues&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, addressed_issues) if closed_issues: h2 = chains.tr("h2", id="closed-issues").td("h2", colspan=3).h2().text("Resolved Issues") h2.a(href="showcomments?review=%d&filter=closed-issues" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=closed-issues&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, closed_issues) if draft_notes: h2 = chains.tr("h2", id="draft-notes").td("h2", colspan=3).h2().text("Draft Notes") h2.a(href="showcomments?review=%d&filter=draft-notes" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=draft-notes&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, draft_notes) if open_notes: h2 = chains.tr("h2", id="notes").td("h2", colspan=3).h2().text("Notes") h2.a(href="showcomments?review=%d&filter=open-notes" % review.id).text("[display all]") h2.a(href="showcomments?review=%d&filter=open-notes&blame=%s" % (review.id, user.name)).text("[in my commits]") renderChains(chains, open_notes) buttons = chains.tr("buttons").td("buttons", colspan=3) buttons.button(onclick="CommentChain.create('issue');").text("Raise Issue") buttons.button(onclick="CommentChain.create('note');").text("Write Note") profiler.check("chains (render)") yield flush(target) cursor.execute("""SELECT DISTINCT reviewfiles.file, theirs.uid FROM reviewfiles JOIN reviewuserfiles AS yours ON (yours.file=reviewfiles.id) JOIN reviewuserfiles AS theirs ON (theirs.file=yours.file AND theirs.uid!=yours.uid) WHERE reviewfiles.review=%s AND yours.uid=%s""", (review.id, user.id)) rows = cursor.fetchall() profiler.check("shared assignments (query)") if rows: reviewers = {} for file_id, user_id in rows: reviewers.setdefault(file_id, {})[user_id] = set() shared = target.table('paleyellow shared', align='center', cellspacing=0) row = shared.tr('h1') shared_header = row.td('h1', colspan=2).h1() shared_header.text("Shared Assignments") shared_buttons = row.td('buttons', colspan=2).span(style="display: none") shared_buttons.button("confirm").text("Confirm") shared_buttons.button("cancel").text("Cancel") granularity = "module" def moduleFromFile(file_id): filename = dbutils.describe_file(db, file_id) return getModuleFromFile(repository, filename) or filename def formatFiles(files): paths = sorted([dbutils.describe_file(db, file_id) for file_id in files]) if granularity == "file": return diff.File.eliminateCommonPrefixes(paths) else: modules = set() files = [] for path in paths: module = getModuleFromFile(path) if module: modules.add(module) else: files.append(path) return sorted(modules) + diff.File.eliminateCommonPrefixes(files) files_per_team = review_utils.collectReviewTeams(reviewers) teams_per_modules = {} profiler.check("shared assignments (collect teams)") for team, files in files_per_team.items(): modules = set() for file_id in files: modules.add(moduleFromFile(file_id)) teams_per_modules.setdefault(frozenset(modules), set()).update(team) for modules, team in teams_per_modules.items(): row = shared.tr("reviewers") cell = row.td("reviewers") members = sorted([dbutils.User.fromId(db, user_id).fullname for user_id in team]) for member in members: cell.text(member).br() row.td("willreview").innerHTML("<span class='also'>also</span> review changes in") cell = row.td("files") for path in diff.File.eliminateCommonPrefixes(sorted(modules)): cell.span("file").innerHTML(path).br() paths = textutils.json_encode(list(modules)) user_ids = textutils.json_encode(list(team)) cell = row.td("buttons") cell.button("accept", critic_paths=paths, critic_user_ids=user_ids).text("I will review this!") cell.button("deny", critic_paths=paths, critic_user_ids=user_ids).text("They will review this!") yield flush(target) profiler.check("shared assignments") cursor.execute("""SELECT batches.id, batches.time, users.fullname, comments.comment FROM batches JOIN users ON (users.id=batches.uid) LEFT OUTER JOIN commentchains ON (commentchains.id=batches.comment) LEFT OUTER JOIN comments ON (comments.id=commentchains.first_comment) WHERE batches.review=%s ORDER BY batches.id DESC""", (review.id,)) rows = cursor.fetchall() if rows: numbers = {} cursor.execute("""SELECT batches.id, reviewfilechanges.to_state, SUM(deleted), SUM(inserted) FROM batches JOIN reviewfilechanges ON (reviewfilechanges.batch=batches.id) JOIN reviewfiles ON (reviewfiles.id=reviewfilechanges.file) WHERE batches.review=%s GROUP BY batches.id, reviewfilechanges.to_state""", (review.id,)) for batch_id, state, deleted, inserted in cursor: per_batch = numbers.setdefault(batch_id, {}) per_batch[state] = (deleted, inserted) cursor.execute("""SELECT batches.id, commentchains.type, COUNT(commentchains.id) FROM batches JOIN commentchains ON (commentchains.batch=batches.id) WHERE batches.review=%s GROUP BY batches.id, commentchains.type""", (review.id,)) for batch_id, commentchain_type, count in cursor: per_batch = numbers.setdefault(batch_id, {}) per_batch[commentchain_type] = count cursor.execute("""SELECT batches.id, commentchainchanges.to_state, COUNT(commentchainchanges.chain) FROM batches JOIN commentchainchanges ON (commentchainchanges.batch=batches.id) WHERE batches.review=%s AND commentchainchanges.to_state IS NOT NULL GROUP BY batches.id, commentchainchanges.to_state""", (review.id,)) for batch_id, commentchainchange_state, count in cursor: per_batch = numbers.setdefault(batch_id, {}) per_batch[commentchainchange_state] = count cursor.execute("""SELECT batches.id, COUNT(commentchainchanges.chain) FROM batches JOIN commentchainchanges ON (commentchainchanges.batch=batches.id) WHERE batches.review=%s AND commentchainchanges.to_type IS NOT NULL GROUP BY batches.id""", (review.id,)) for batch_id, count in cursor: per_batch = numbers.setdefault(batch_id, {}) per_batch["morph"] = count cursor.execute("""SELECT batches.id, COUNT(comments.id) FROM batches JOIN commentchains ON (commentchains.batch!=batches.id) JOIN comments ON (comments.batch=batches.id AND comments.chain=commentchains.id) WHERE batches.review=%s GROUP BY batches.id""", (review.id,)) for batch_id, count in cursor: per_batch = numbers.setdefault(batch_id, {}) per_batch["reply"] = count batches = target.table("paleyellow batches", align="center", cellspacing=0) batches.tr().td("h1", colspan=3).h1().text("Work Log") def format_lines(deleted, inserted): if deleted and inserted: return "-%d/+%d" % (deleted, inserted) elif deleted: return "-%d" % deleted else: return "+%d" % inserted def with_plural(count, one, many): return (count, many if count > 1 else one) def with_plural_s(count): return with_plural(count, "", "s") for batch_id, timestamp, user_fullname, comment in rows: row = batches.tr("batch") row.td("author").text(user_fullname) title = row.td("title clickable").a( "clickable-target", href="showbatch?batch=%d" % batch_id) if comment: title.innerHTML(textutils.summarize(comment, as_html=True)) else: title.i().text("No comment") per_batch = numbers.get(batch_id) if per_batch: items = [] reviewed = per_batch.get("reviewed") if reviewed: items.append("reviewed %s lines" % format_lines(*reviewed)) unreviewed = per_batch.get("pending") if unreviewed: items.append("unreviewed %s lines" % format_lines(*unreviewed)) issues = per_batch.get("issue") if issues: items.append("raised %d issue%s" % with_plural_s(issues)) notes = per_batch.get("note") if notes: items.append("wrote %d note%s" % with_plural_s(notes)) resolved = per_batch.get("closed") if resolved: items.append("resolved %d issue%s" % with_plural_s(resolved)) reopened = per_batch.get("open") if reopened: items.append("reopened %d issue%s" % with_plural_s(reopened)) morphed = per_batch.get("morph") if morphed: items.append("morphed %d comment%s" % with_plural_s(morphed)) replies = per_batch.get("reply") if replies: items.append("wrote %d %s" % with_plural(replies, "reply", "replies")) if items: if len(items) == 1: items = items[0] else: items = "%s and %s" % (", ".join(items[:-1]), items[-1]) else: items = "nothing" title.span("numbers").text(items) row.td("when").text(user.formatTimestamp(db, timestamp)) profiler.check("batches") profiler.output(db, user, target) yield flush() try: head = review.branch.getHead(db) except gitutils.GitReferenceError: # Commit missing from repository. pass else: try: head_according_to_git = repository.revparse(review.branch.name) except gitutils.GitReferenceError: # Branch missing from repository. head_according_to_git = None head_according_to_us = head.sha1 if head_according_to_git != head_according_to_us: # The git repository disagrees with us. Potentially harmful updates # to the branch will be rejected by the git hook while this is the # case, but this means that "our" head might not be referenced at # all and thus that it might be GC:ed by the git repository at some # point. To avoid that, add a keepalive reference. repository.keepalive(head_according_to_us) yield "\n<!-- branch head mismatch: git=%s, us=%s (corrected) -->" % (head_according_to_git[:8] if head_according_to_git else "N/A", head_according_to_us[:8]) ================================================ FILE: src/page/showreviewlog.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import htmlutils import page.utils import log.html import reviewing.utils as review_utils import reviewing.html as review_html import reviewing.comment as review_comment import re import diff re_module = re.compile("^((?:data|modules|platforms)/[^/]+)/.*$") def renderShowReviewLog(req, db, user): review_id = req.getParameter("review", filter=int) granularity = req.getParameter("granularity") unassigned = req.getParameter("unassigned", "no") == "yes" cursor = db.cursor() review = dbutils.Review.fromId(db, review_id) document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() head.title("Review Log: %s" % review.branch.name) page.utils.generateHeader(body, db, user, lambda target: review_utils.renderDraftItems(db, user, review, target), extra_links=[("r/%d" % review.id, "Back to Review")]) document.addExternalStylesheet("resource/showreviewlog.css") document.addExternalStylesheet("resource/review.css") document.addExternalScript("resource/review.js") document.addInternalScript(review.getJS()) target = body.div("main") reviewed_reviewers = review_utils.getReviewedReviewers(db, review) def formatFiles(files): paths = sorted([dbutils.describe_file(db, file_id) for file_id in files]) if granularity == "file": return diff.File.eliminateCommonPrefixes(paths) else: modules = set() files = [] for path in paths: match = re_module.match(path) if match: modules.add(match.group(1)) else: files.append(path) return sorted(modules) + diff.File.eliminateCommonPrefixes(files) if reviewed_reviewers and not unassigned: reviewed = target.table("paleyellow") reviewed.col(width="30%") reviewed.col(width="10%") reviewed.col(width="60%") reviewed.tr().td('h1', colspan=3).h1().text("Reviewed Changes") teams = review_utils.collectReviewTeams(reviewed_reviewers) for team in teams: row = reviewed.tr("reviewers") cell = row.td("reviewers") users = sorted([dbutils.User.fromId(db, user_id).fullname for user_id in team]) for user in users: cell.text(user).br() row.td("willreview").innerHTML("reviewed") cell = row.td("files") for file in formatFiles(teams[team]): cell.span("file").innerHTML(file).br() pending_reviewers = review_utils.getPendingReviewers(db, review) if pending_reviewers: pending = target.table("paleyellow") pending.col(width="30%") pending.col(width="10%") pending.col(width="60%") pending.tr().td('h1', colspan=3).h1().text("Pending Changes") teams = review_utils.collectReviewTeams(pending_reviewers) if not unassigned: for team in teams: if team is not None: row = pending.tr("reviewers") cell = row.td("reviewers") users = sorted([dbutils.User.fromId(db, user_id).fullname for user_id in team]) for user in users: cell.text(user).br() row.td("willreview").innerHTML("should review") cell = row.td("files") for file in formatFiles(teams[team]): cell.span("file").innerHTML(file).br() if None in teams: row = pending.tr("reviewers") row.td("no-one", colspan=2).text("No reviewers found for changes in") cell = row.td("files") for file in formatFiles(teams[None]): cell.span("file").innerHTML(file).br() return document ================================================ FILE: src/page/showtree.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import stat import urllib import dbutils import gitutils import page.utils import htmlutils import textutils def renderShowTree(req, db, user): cursor = db.cursor() sha1 = req.getParameter("sha1") path = req.getParameter("path", "/") review_id = req.getParameter("review", None, filter=int) if path[0] == '/': full_path = path if path != "/": path = path[1:] else: full_path = "/" + path if not path: path = "/" if review_id is None: review = None repository_arg = req.getParameter("repository", "") if repository_arg: repository = gitutils.Repository.fromParameter(db, repository_arg) else: repository = gitutils.Repository.fromSHA1(db, sha1) else: review = dbutils.Review.fromId(db, review_id) repository = review.repository document = htmlutils.Document(req) html = document.html() head = html.head() body = html.body() extra_links = [] if review: extra_links.append(("r/%d" % review.id, "Back to Review")) page.utils.generateHeader(body, db, user, extra_links=extra_links) document.addExternalStylesheet("resource/showtree.css") if review: document.addInternalScript(review.getJS()) target = body.div("main") table = target.table("tree paleyellow", align="center", cellspacing=0) table.col(width="10%") table.col(width="60%") table.col(width="20%") thead = table.thead() h1 = thead.tr().td('h1', colspan=3).h1() def make_url(url_path, path): params = { "sha1": sha1, "path": path } if review is None: params["repository"] = str(repository.id) else: params["review"] = str(review.id) return "%s?%s" % (url_path, urllib.urlencode(params)) if path == "/": h1.text("/") else: h1.a("root", href=make_url("showtree", "/")).text("root") h1.span().text('/') components = path.split("/") for index, component in enumerate(components[:-1]): h1.a(href=make_url("showtree", "/".join(components[:index + 1]))).text(component, escape=True) h1.span().text('/') h1.text(components[-1], escape=True) row = thead.tr() row.td('mode').text("Mode") row.td('name').text("Name") row.td('size').text("Size") tree = gitutils.Tree.fromPath(gitutils.Commit.fromSHA1(db, repository, sha1), full_path) if tree is None: raise page.utils.DisplayMessage( title="Directory does not exist", body=("<p>There is no directory named <code>%s</code> in the commit " "<a href='/showcommit?repository=%s&sha1=%s'>" "<code>%s</code></a>.</p>" % (htmlutils.htmlify(textutils.escape(full_path)), htmlutils.htmlify(repository.name), htmlutils.htmlify(sha1), htmlutils.htmlify(sha1[:8]))), html=True) def compareEntries(a, b): if a.type != b.type: if a.type == "tree": return -1 else: return 1 else: return cmp(a.name, b.name) tbody = table.tbody() for entry in sorted(tree, cmp=compareEntries): if entry.type in ("blob", "tree"): if entry.type == "blob": url_path = "showfile" else: url_path = "showtree" url = make_url(url_path, os.path.join(path, entry.name)) else: url = None row = tbody.tr(entry.type) row.td('mode').text(str(entry.mode)) if stat.S_ISLNK(entry.mode): cell = row.td('link', colspan=2) cell.span('name').text(entry.name, escape=True) cell.text(' -> ') cell.span('target').text(repository.fetch(entry.sha1).data) elif entry.type == "commit": row.td('name').text("%s (%s)" % (entry.name, entry.sha1), escape=True) row.td('size').text(entry.size) else: row.td('name').a(href=url).text(entry.name, escape=True) row.td('size').text(entry.size) return document ================================================ FILE: src/page/statistics.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import htmlutils import dbutils def renderStatistics(req, db, user): document = htmlutils.Document(req) document.setTitle("Statistics") html = document.html() head = html.head() body = html.body() def flush(stop): return document.render(stop=stop) page.utils.generateHeader(body, db, user, current_page="statistics") document.addExternalStylesheet("resource/statistics.css") table = body.div("main").table("paleyellow", align="center", cellspacing=0) def commas(number): as_string = str(number) if number >= 1000000000: as_string = as_string[:-9] + "," + as_string[-9:] if number >= 1000000: as_string = as_string[:-6] + "," + as_string[-6:] if number >= 1000: as_string = as_string[:-3] + "," + as_string[-3:] return as_string table.tr("h1").td("h1", colspan=4).h1().text("Most Lines Reviewed") table.tr("space").td(colspan=4) cursor = db.cursor() cursor.execute("CREATE TEMPORARY TABLE reviewers (uid INTEGER, lines INTEGER)") cursor.execute("INSERT INTO reviewers (uid, lines) SELECT reviewer, SUM(deleted) + SUM(inserted) FROM reviewfiles WHERE state='reviewed' GROUP BY reviewer") cursor.execute("SELECT uid, lines FROM reviewers ORDER BY lines DESC LIMIT 10") self_included = False for user_id, lines in cursor: if user_id == user.id: row = table.tr("line self") self_included = True else: row = table.tr("line") row.td("left") row.td("user").text(dbutils.User.fromId(db, user_id).fullname) row.td("value").text("%s lines" % commas(lines)) row.td("right") if not self_included: cursor.execute("SELECT lines FROM reviewers WHERE uid=%s", (user.id,)) data = cursor.fetchone() if data and data[0]: lines = data[0] cursor.execute("SELECT COUNT(*) + 1 FROM reviewers WHERE lines > %s", (lines,)) table.tr("space").td(colspan=4) row = table.tr("line self extra") row.td("left") row.td("user").text(user.fullname) row.td("value").innerHTML("%s lines" % commas(lines)) row.td("right").text("(your position: %d)" % cursor.fetchone()[0]) table.tr("space").td(colspan=4) table.tr("space").td(colspan=4) table.tr("h1").td("h1", colspan=4).h1().text("Most Lines in Owned Reviews") table.tr("space").td(colspan=4) cursor.execute("CREATE TEMPORARY TABLE owners (uid INTEGER, lines INTEGER)") cursor.execute("INSERT INTO owners (uid, lines) SELECT uid, SUM(deleted) + SUM(inserted) FROM reviewfiles JOIN reviewusers USING (review) JOIN reviews ON (reviewfiles.review=reviews.id) WHERE reviews.state IN ('open', 'closed') AND reviewusers.owner GROUP BY uid") cursor.execute("SELECT uid, lines FROM owners ORDER BY lines DESC LIMIT 10") self_included = False for user_id, lines in cursor: if user_id == user.id: row = table.tr("line self") self_included = True else: row = table.tr("line") row.td("left") row.td("user").text(dbutils.User.fromId(db, user_id).fullname) row.td("value").innerHTML("%s lines" % commas(lines)) row.td("right") if not self_included: cursor.execute("SELECT lines FROM owners WHERE uid=%s", (user.id,)) data = cursor.fetchone() if data and data[0]: lines = data[0] cursor.execute("SELECT COUNT(*) + 1 FROM owners WHERE lines > %s", (lines,)) table.tr("space").td(colspan=4) row = table.tr("line self extra") row.td("left") row.td("user").text(user.fullname) row.td("value").innerHTML("%s lines" % commas(lines)) row.td("right").text("(your position: %d)" % cursor.fetchone()[0]) table.tr("space").td(colspan=4) table.tr("space").td(colspan=4) table.tr("h1").td("h1", colspan=4).h1().text("Most Issues Raised") table.tr("space").td(colspan=4) cursor.execute("""SELECT uid, COUNT(type) AS issues FROM commentchains WHERE state IN ('open', 'addressed', 'closed') AND type='issue' GROUP BY uid ORDER BY issues DESC LIMIT 10""") def calculateRatio(user_id, issues): cursor.execute("""SELECT lines FROM reviewers WHERE uid=%s""", (user_id,)) row = cursor.fetchone() lines = row[0] if row else 0 return float(issues * 1000) / float(lines) if lines else 0 self_included = False for user_id, issues in cursor.fetchall(): if user_id == user.id: row = table.tr("line self") self_included = True else: row = table.tr("line") row.td("left") row.td("user").text(dbutils.User.fromId(db, user_id).fullname) row.td("value").text("%s issues" % commas(issues)) ratio = "%.2f" % calculateRatio(user_id, issues) if ratio != "0.00": row.td("right").text("(%s issues/kloc)" % ratio) else: row.td("right") if not self_included: cursor.execute("""SELECT COUNT(type) FROM commentchains WHERE state IN ('open', 'addressed', 'closed') AND type='issue' AND uid=%s""", (user.id,)) data = cursor.fetchone() if data and data[0]: issues = data[0] cursor.execute("""SELECT count(*) + 1 FROM (SELECT uid, COUNT(type) AS issues FROM commentchains WHERE state IN ('open', 'addressed', 'closed') AND type='issue' GROUP BY uid ORDER BY issues DESC) AS stats WHERE stats.issues > %s""", (issues,)) table.tr("space").td(colspan=4) row = table.tr("line self extra") row.td("left") row.td("user").text(user.fullname) row.td("value").innerHTML("%s issues" % commas(issues)) right = row.td("right") right.text("(your position: %d)" % cursor.fetchone()[0]) ratio = "%.2f" % calculateRatio(user.id, issues) if ratio != "0.00": right.text(" (%s issues/kloc)" % ratio) table.tr("space").td(colspan=4) table.tr("space").td(colspan=4) table.tr("h1").td("h1", colspan=4).h1().text("Most Comments (and Replies) Written") table.tr("space").td(colspan=4) cursor.execute("""SELECT uid, COUNT(state) AS comments FROM comments WHERE state='current' GROUP BY uid ORDER BY comments DESC LIMIT 10""") self_included = False for user_id, comments in cursor: if user_id == user.id: row = table.tr("line self") self_included = True else: row = table.tr("line") row.td("left") row.td("user").text(dbutils.User.fromId(db, user_id).fullname) row.td("value").innerHTML("%s comments" % commas(comments)) row.td("right") if not self_included: cursor.execute("""SELECT COUNT(state) FROM comments WHERE state='current' AND uid=%s""", (user.id,)) data = cursor.fetchone() if data and data[0]: issues = data[0] cursor.execute("""SELECT count(*) + 1 FROM (SELECT uid, COUNT(state) AS comments FROM comments WHERE state='current' GROUP BY uid ORDER BY comments DESC) AS stats WHERE stats.comments > %s""", (issues,)) table.tr("space").td(colspan=4) row = table.tr("line self extra") row.td("left") row.td("user").text(user.fullname) row.td("value").innerHTML("%s comments" % commas(issues)) row.td("right").text("(your position: %d)" % cursor.fetchone()[0]) table.tr("space").td(colspan=4) table.tr("space").td(colspan=4) table.tr("h1").td("h1", colspan=4).h1().text("Most Characters Written") table.tr("space").td(colspan=4) cursor.execute("""SELECT uid, SUM(CHARACTER_LENGTH(comment)) AS characters FROM comments WHERE state='current' GROUP BY uid ORDER BY characters DESC LIMIT 10""") self_included = False for user_id, characters in cursor: if user_id == user.id: row = table.tr("line self") self_included = True else: row = table.tr("line") row.td("left") row.td("user").text(dbutils.User.fromId(db, user_id).fullname) row.td("value").innerHTML("%s characters" % commas(characters)) row.td("right") if not self_included: cursor.execute("""SELECT SUM(CHARACTER_LENGTH(comment)) FROM comments WHERE state='current' AND uid=%s""", (user.id,)) data = cursor.fetchone() if data and data[0]: characters = data[0] cursor.execute("""SELECT count(*) + 1 FROM (SELECT uid, SUM(CHARACTER_LENGTH(comment)) AS characters FROM comments WHERE state='current' GROUP BY uid ORDER BY characters DESC) AS stats WHERE stats.characters > %s""", (characters,)) table.tr("space").td(colspan=4) row = table.tr("line self extra") row.td("left") row.td("user").text(user.fullname) row.td("value").innerHTML("%s characters" % commas(characters)) row.td("right").text("(your position: %d)" % cursor.fetchone()[0]) db.rollback() return document ================================================ FILE: src/page/tutorial.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils import dbutils import htmlutils import configuration import textformatting def renderFromFile(db, user, target, name): lines = open("%s/tutorials/%s.txt" % (configuration.paths.INSTALL_DIR, name)).read().splitlines() table = target.table("paleyellow", align="center") textformatting.renderFormatted(db, user, table, lines, toc=True) table.tr("back").td("back").div().a(href="tutorial").text("Back") def renderSections(db, user, target): table = target.table("paleyellow", align="center") table.tr("h1").td("h1").h1().text("Tutorials") def section(name, title, description): table.tr("h2").td("h2").div().h2().text(title) table.tr("text").td("text").div().text(description, cdata=True) table.tr("goto").td("goto").div().a(href="tutorial?item=%s" % name).text("Learn More") section("request", "Requesting a Review", """\ Introduction to the different ways of requesting a review of changes in Critic. You'll be able to request a review of your bug fix in 10 seconds, using your favorite git client! (Though it'll take you more than 10 seconds to read all the text…)""") section("review", "Reviewing Changes", """\ Introduction to the process of reviewing changes in Critic. Covers the basic concepts, marking changes as reviewed and raising issues, and some other things. Useful information both for reviewers and for those requesting the reviews.""") section("filters", "Filters", """\ Information about the Filters mechanism.""") section("archival", "Review Branch Archival", """\ Information about the automatic review branch archival mechanism, which deletes review branches some time after the review is finished.""") section("viewer", "Repository Viewer", """\ Some information about Critic's repository viewers and its peculiarities compared to \"normal\" git repository viewers such as gitk and cgit.""") section("reconfigure", "Reconfiguring Critic", """\ Information about the various per-user configuration options that Critic supports.""") section("rebase", "Rebasing Reviews", """\ Details on what kind of rebase operations are supported on review branches, how to convince Critic to accept non-fast-forward updates, and some things you really should make sure not to do.""") section("search", "Review Quick Search", """\ Information about the review search facility and the search query syntax.""") if configuration.extensions.ENABLED: section("extensions", "Critic Extensions", """\ Description of the Critic Extensions mechanism.""") section("extensions-api", "Critic Extensions API", """\ Description of the script API available to Critic Extensions.""") if user.hasRole(db, "administrator"): section("administration", "System Administration", """\ Information about various Critic system administration tasks.""") section("customization", "System Customization", """\ Information about Critic system customization hooks.""") def renderTutorial(req, db, user): item = req.getParameter("item", None) document = htmlutils.Document(req) document.setBase(None) document.setTitle("Tutorials") html = document.html() head = html.head() body = html.body() page.utils.generateHeader(body, db, user, current_page=None if item else "tutorial") document.addExternalStylesheet("resource/tutorial.css") document.addExternalScript("resource/tutorial.js") document.addInternalStylesheet("div.main table td.text { %s }" % user.getPreference(db, "style.tutorialFont")) target = body.div("main") items = { "request": "requesting", "review": "reviewing", "filters": "filters", "archival": "archival", "viewer": "repository", "rebase": "rebasing", "reconfigure": "reconfiguring", "checkbranch": "checkbranch", "administration": "administration", "customization": "customization", "search": "search", "external-authentication": "external-authentication", "extensions": "extensions", "extensions-api": "extensions-api" } if item in items: renderFromFile(db, user, target, items[item]) else: renderSections(db, user, target) return document ================================================ FILE: src/page/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import auth import htmlutils import configuration from request import (NoDefault, MovedTemporarily, DisplayMessage, InvalidParameterValue, decodeURIComponent, Request, NeedLogin, NotModified) from textutils import json_encode, json_decode LINK_RELS = { "Home": "home", "Dashboard": "contents", "Branches": "index", "Tutorial": "help", "Back to Review": "up" } def YesOrNo(value): if value == "yes": return True elif value == "no": return False else: raise DisplayMessage("invalid parameter value; expected 'yes' or 'no'") def generateEmpty(target): pass def generateHeader(target, db, user, generate_right=None, current_page=None, extra_links=[], profiler=None): target.addExternalStylesheet("resource/third-party/jquery-ui.css") target.addExternalStylesheet("resource/third-party/chosen.css") target.addExternalStylesheet("resource/overrides.css") target.addExternalStylesheet("resource/basic.css") target.addInternalStylesheet(".defaultfont, body { %s }" % user.getPreference(db, "style.defaultFont")) target.addInternalStylesheet(".sourcefont { %s }" % user.getPreference(db, "style.sourceFont")) target.addExternalScript("resource/third-party/jquery.js") target.addExternalScript("resource/third-party/jquery-ui.js") target.addExternalScript("resource/third-party/jquery-ui-autocomplete-html.js") target.addExternalScript("resource/third-party/chosen.js") target.addExternalScript("resource/basic.js") target.noscript().h1("noscript").blink().text("Please enable scripting support!") row = target.table("pageheader", width='100%').tr() left = row.td("left", valign='bottom', align='left') b = left.b() opera_class = "opera" if configuration.debug.IS_DEVELOPMENT: opera_class += " development" b.b(opera_class, onclick="location.href='/';").text("Opera") b.b("critic", onclick="location.href='/';").text("Critic") links = [] if not user.isAnonymous(): links.append(["home", "Home", None, None]) links.append(["dashboard", "Dashboard", None, None]) links.append(["branches", "Branches", None, None]) links.append(["search", "Search", None, None]) if user.hasRole(db, "administrator"): links.append(["services", "Services", None, None]) if user.hasRole(db, "repositories"): links.append(["repositories", "Repositories", None, None]) if profiler: profiler.check("generateHeader (basic)") if configuration.extensions.ENABLED: from extensions.extension import Extension updated = Extension.getUpdatedExtensions(db, user) if updated: link_title = "\n".join([("%s by %s can be updated!" % (extension_name, author_fullname)) for author_fullname, extension_name in updated]) links.append(["manageextensions", "Extensions (%d)" % len(updated), "color: red", link_title]) else: links.append(["manageextensions", "Extensions", None, None]) if profiler: profiler.check("generateHeader (updated extensions)") links.append(["config", "Config", None, None]) links.append(["tutorial", "Tutorial", None, None]) if user.isAnonymous(): count = 0 else: cursor = db.cursor() cursor.execute("""SELECT COUNT(*) FROM newsitems LEFT OUTER JOIN newsread ON (item=id AND uid=%s) WHERE uid IS NULL""", (user.id,)) count = cursor.fetchone()[0] if count: links.append(["news", "News (%d)" % count, "color: red", "There are %d unread news items!" % count]) else: links.append(["news", "News", None, None]) if profiler: profiler.check("generateHeader (news)") req = target.getRequest() if configuration.base.AUTHENTICATION_MODE != "host" \ and configuration.base.SESSION_TYPE == "cookie": if user.isAnonymous(): links.append(["javascript:void(location.href='/login?target='+encodeURIComponent(location.href));", "Sign in", None, None]) elif not req or (req.user == user.name and req.session_type == "cookie"): links.append(["javascript:signOut();", "Sign out", None, None]) for url, label in extra_links: links.append([url, label, None, None]) if req and configuration.extensions.ENABLED: import extensions.role.inject injected = {} extensions.role.inject.execute(db, req, user, target, links, injected, profiler=profiler) for url in injected.get("stylesheets", []): target.addExternalStylesheet(url, use_static=False, order=1) for url in injected.get("scripts", []): target.addExternalScript(url, use_static=False, order=1) else: injected = None ul = left.ul() for index, (url, label, style, title) in enumerate(links): if not re.match("[-.a-z]+:|/", url): url = "/" + url ul.li().a(href=url, style=style, title=title).text(label) rel = LINK_RELS.get(label) if rel: target.setLink(rel, url) right = row.td("right", valign='bottom', align='right') if generate_right: generate_right(right) else: right.div("buttons").span("buttonscope buttonscope-global") if profiler: profiler.check("generateHeader (finish)") return injected def renderShortcuts(target, page, **kwargs): shortcuts = [] def addShortcut(keyCode, keyName, description): shortcuts.append((keyCode, keyName, description)) if kwargs.get("review"): addShortcut(ord("u"), "u", "back to review") if page == "showcommit": what = "files" merge_parents = kwargs.get("merge_parents") if merge_parents > 1: for index in range(min(9, merge_parents)): order = ("first", "second", "third", "fourth", "fifth", "seventh", "eight", "ninth")[index] addShortcut(ord('1') + index, "%d" % (index + 1), " changes relative to %s parent" % order) elif page == "showcomments": what = "comments" if page == "showcommit" or page == "showcomments": addShortcut(ord("e"), "e", "expand all %s" % what) addShortcut(ord("c"), "c", "collapse all %s" % what) addShortcut(ord("s"), "s", "show all %s" % what) addShortcut(ord("h"), "h", "hide all %s" % what) if page == "showcommit": addShortcut(ord("m"), "m", "detect moved code") if kwargs.get("squashed_diff"): addShortcut(ord("b"), "b", "blame") addShortcut(32, "SPACE", "scroll or show/expand next file") if page == "showcomment": addShortcut(ord("m"), "m", "show more context") addShortcut(ord("l"), "l", "show less context") if page == "filterchanges": addShortcut(ord("a"), "a", "select everything") addShortcut(ord("g"), "g", "go / display diff") container = target.div("pagefooter shortcuts") if shortcuts: container.text("Shortcuts: ") def renderShortcut(keyCode, ch, text, is_last=False): a = container.a("shortcut", href="javascript:void(handleKeyboardShortcut(%d));" % keyCode) a.b().text("(%s)" % ch) a.text(" %s" % text) if not is_last: container.text(", ") for index, (keyCode, keyName, description) in enumerate(shortcuts): renderShortcut(keyCode, keyName, description, index == len(shortcuts) - 1) def generateFooter(target, db, user, current_page=None): renderShortcuts(target, current_page) def displayMessage(db, req, user, title, review=None, message=None, page_title=None, is_html=False): document = htmlutils.Document(req) if page_title: document.setTitle(page_title) document.addExternalStylesheet("resource/message.css") html = document.html() head = html.head() body = html.body() if review: import reviewing.utils as review_utils def generateRight(target): review_utils.renderDraftItems(db, user, review, target) back_to_review = ("r/%d" % review.id, "Back to Review") document.addInternalScript(review.getJS()) generateHeader(body, db, user, generate_right=generateRight, extra_links=[back_to_review]) else: generateHeader(body, db, user) target = body.div("message paleyellow") if message: target.h1("title").text(title) if callable(message): message(target) elif is_html: target.innerHTML(message) else: target.p().text(message) else: target.h1("center").text(title) return document class PaleYellowTable: def __init__(self, target, title, columns=[10, 60, 30]): if not target.hasTitle(): target.setTitle(title) self.table = target.div("main").table("paleyellow", align="center").tbody() self.columns = columns colgroup = self.table.colgroup() for column in columns: colgroup.col(width="%d%%" % column) h1 = self.table.tr().td("h1", colspan=len(columns)).h1() h1.text(title) self.titleRight = h1.span("right") self.table.tr("spacer").td(colspan=len(self.columns)) def addSection(self, title, extra=None): h2 = self.table.tr().td("h2", colspan=len(self.columns)).h2() h2.text(title) if extra is not None: h2.span().text(extra) def addItem(self, heading, value, description=None, buttons=None): row = self.table.tr("item") row.td("name").innerHTML(htmlutils.htmlify(heading).replace(" ", " ") + ":") cell = row.td("value", colspan=2).preformatted() if callable(value): value(cell) else: cell.text(str(value)) if buttons: div = cell.div("buttons") for label, onclick in buttons: div.button(onclick=onclick).text(label) if description is not None: self.table.tr("help").td(colspan=len(self.columns)).text(description) def addCentered(self, content=None): row = self.table.tr("centered") cell = row.td(colspan=len(self.columns)) if callable(content): content(cell) elif content: cell.text(str(content)) return cell def addSeparator(self): self.table.tr("separator").td(colspan=len(self.columns)).div() def generateRepositorySelect(db, user, target, allow_selecting_none=False, placeholder_text=None, selected=None, access_type="read", **attributes): select = target.select("repository-select", **attributes) cursor = db.cursor() cursor.execute("""SELECT id, name, path FROM repositories ORDER BY name""") rows = cursor.fetchall() if not rows: # Note: not honoring 'placeholder_text' here; callers typically don't # take into account the possibility that there are no repositories. select.setAttribute("data-placeholder", "No repositories") select.option(value="", selected="selected") return if selected is None: default_repository = user.getDefaultRepository(db) if default_repository: selected = default_repository.name if not selected or allow_selecting_none: if placeholder_text is None: placeholder_text = "Select a repository" select.setAttribute("data-placeholder", placeholder_text) select.option(value="", selected="selected") highlighted_ids = set() cursor.execute("""SELECT DISTINCT repository FROM filters WHERE uid=%s""", (user.id,)) highlighted_ids.update(repository_id for (repository_id,) in cursor) cursor.execute("""SELECT DISTINCT repository FROM branches JOIN reviews ON (reviews.branch=branches.id) JOIN reviewusers ON (reviewusers.review=reviews.id) WHERE reviewusers.uid=%s AND reviewusers.owner""", (user.id,)) highlighted_ids.update(repository_id for (repository_id,) in cursor) if not highlighted_ids or len(highlighted_ids) == len(rows): # Do not group options when there will be only one group. highlighted = select other = select else: highlighted = select.optgroup(label="Highlighted") other = select.optgroup(label="Other") html_format = ("<span class=repository-name>%s</span>" "<span class=repository-path>%s</span>") for repository_id, name, path in rows: try: repository = auth.AccessControl.Repository(repository_id, path) auth.AccessControl.accessRepository(db, access_type, repository) except auth.AccessDenied: # Skip repositories the user doesn't have access to. continue if repository_id in highlighted_ids: optgroup = highlighted else: optgroup = other if repository_id == selected or name == selected: is_selected = "selected" else: is_selected = None html = html_format % (name, path) option = optgroup.option("repository flex", value=name, selected=is_selected, data_text=name, data_html=html) option.text(name) def displayFormattedText(db, req, user, source): document = htmlutils.Document(req) document.setBase(None) document.addExternalStylesheet("resource/tutorial.css") document.addInternalStylesheet("div.main table td.text { %s }" % user.getPreference(db, "style.tutorialFont")) html = document.html() head = html.head() body = html.body() generateHeader(body, db, user) if isinstance(source, basestring): lines = source.splitlines() else: lines = source import textformatting textformatting.renderFormatted( db, user, body.div("main").table("paleyellow"), source, toc=True) generateFooter(body, db, user) return document class DisplayFormattedText(Exception): def __init__(self, source): self.source = source class ResponseBody(object): def __init__(self, data, content_type="text/html"): self.data = data self.content_type = content_type ================================================ FILE: src/page/verifyemail.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import page.utils def renderVerifyEmail(req, db, user): if user.isAnonymous(): raise page.utils.NeedLogin(req) elif req.user != user.name: raise page.utils.DisplayMessage("Invalid use!") email = req.getParameter("email") verification_token = req.getParameter("token") cursor = db.cursor() cursor.execute("""SELECT id FROM useremails WHERE uid=%s AND email=%s AND verification_token=%s""", (user.id, email, verification_token)) row = cursor.fetchone() if not row: raise page.utils.DisplayMessage("Invalid verification token!") email_id = row[0] cursor.execute("""UPDATE useremails SET verified=TRUE WHERE id=%s""", (email_id,)) db.commit() raise page.utils.MovedTemporarily("/home?email_verified=%d" % email_id) ================================================ FILE: src/profiling.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import re class Profiler: class Check: def __init__(self, profiler, title): self.__profiler = profiler self.__title = title self.__begin = time.time() def stop(self): self.__profiler.add(self.__title, self.__begin, time.time()) def __init__(self): self.__previous = time.time() self.__checks = [] self.__table = {} def add(self, title, begin, end): if title not in self.__table: self.__checks.append(title) self.__table[title] = 0 self.__table[title] += end - begin self.__previous = end def start(self, title): return Profiler.Check(self, title) def check(self, title): self.add(title, self.__previous, time.time()) def output(self, db=None, user=None, target=None): log = "" total = 0.0 title_width = max(map(len, self.__checks)) format = " %%-%ds : %%8.2f\n" % title_width for title, duration in sorted(self.__table.items(), cmp=lambda a, b: cmp(a[1], b[1]), reverse=True): log += format % (title, self.__table[title] * 1000) total += self.__table[title] log += "\n" + format % ("TOTAL", total * 1000) if db and user and target and user.getPreference(db, 'debug.profiling.pageGeneration'): target.comment("\n\n" + log + "\n") return log def formatDBProfiling(db): lines = [" | TIME (milliseconds) | ROWS |", " Count | Accumulated | Maximum | Accumulated | Maximum | Query", " -------|-------------|----------|-------------|----------|-------"] items = sorted(db.profiling.items(), key=lambda item: item[1][1], reverse=True) total_count = 0 total_accumulated_ms = 0.0 total_accumulated_rows = 0 for item, (count, accumulated_ms, maximum_ms, accumulated_rows, maximum_rows) in items: total_count += count total_accumulated_ms += accumulated_ms if accumulated_rows is None: lines.append(" %6d | %11.4f | %8.4f | | | %s" % (count, accumulated_ms, maximum_ms, re.sub(r"\s+", " ", item))) else: total_accumulated_rows += accumulated_rows lines.append(" %6d | %11.4f | %8.4f | %11d | %8d | %s" % (count, accumulated_ms, maximum_ms, accumulated_rows, maximum_rows, re.sub(r"\s+", " ", item))) lines.insert(3, (" %6d | %11.4f | | %11d | | TOTAL" % (total_count, total_accumulated_ms, total_accumulated_rows))) return "\n".join(lines) ================================================ FILE: src/request.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import urllib import urlparse import httplib import wsgiref.util import base import auth import configuration import dbutils # Paths to which access should be allowed without authentication even if # anonymous users are not allowed in general. INSECURE_PATHS = set(["login", "validatelogin", "createuser", "registeruser"]) def decodeURIComponent(text): """\ Replace %HH escape sequences and return the resulting string. """ return urllib.unquote_plus(text) class NoDefault: """\ Placeholder class to signal that a parameter has no default value. An instance of this class is provided to Request.getParameter() as the 'default' argument to signal that it is an error if the parameter is not present. """ pass class HTTPResponse(Exception): def __init__(self, status): self.status = status self.body = [] self.content_type = "text/plain" def execute(self, db, req): req.setStatus(self.status) if self.body: req.setContentType(self.content_type) req.start() return self.body class NoContent(HTTPResponse): def __init__(self): super(NoContent, self).__init__(204) class NotModified(HTTPResponse): def __init__(self): super(NotModified, self).__init__(304) class Forbidden(HTTPResponse): def __init__(self, message="Forbidden"): super(Forbidden, self).__init__(403) self.body = [message] class NotFound(HTTPResponse): def __init__(self, message="Not found"): super(NotFound, self).__init__(404) self.body = [message] class Redirect(HTTPResponse): def __init__(self, status, location, no_cache=False): super(Redirect, self).__init__(status) self.location = location self.no_cache = no_cache def execute(self, db, req): from htmlutils import htmlify if not req.allowRedirect(self.status): self.status = 403 self.body = ["Cowardly refusing to redirect %s request." % req.method] else: req.addResponseHeader("Location", self.location) self.body = ["<p>Please go here: <a href=%s>%s</a>." % (htmlify(self.location, attributeValue=True), htmlify(self.location))] self.content_type = "text/html" return super(Redirect, self).execute(db, req) class Found(Redirect): def __init__(self, location): super(Found, self).__init__(302, location) class SeeOther(Redirect): def __init__(self, location): super(SeeOther, self).__init__(303, location) class MovedTemporarily(Redirect): def __init__(self, location, no_cache=False): super(MovedTemporarily, self).__init__(307, location) self.no_cache = no_cache def execute(self, db, req): if self.no_cache: req.addResponseHeader("Cache-Control", "no-cache") return super(MovedTemporarily, self).execute(db, req) class NeedLogin(MovedTemporarily): def __init__(self, source, optional=False): if isinstance(source, Request): target = source.getTargetURL() else: target = str(source) location = "/login?target=%s" % urllib.quote(target) if optional: location += "&optional=yes" return super(NeedLogin, self).__init__(location, no_cache=True) class RequestHTTPAuthentication(HTTPResponse): def __init__(self): super(RequestHTTPAuthentication, self).__init__(401) def execute(self, db, req): import page.utils self.body = str(page.utils.displayMessage( db, req, dbutils.User.makeAnonymous(), title="Authentication required", message=("You must provide valid HTTP authentication to access " "this system."))) self.content_type = "text/html" req.addResponseHeader("WWW-Authenticate", "Basic realm=\"Critic\"") return super(RequestHTTPAuthentication, self).execute(db, req) class DisplayMessage(base.Error): """\ Utility exception raised by pages to display a simply message. """ def __init__(self, title, body=None, review=None, html=False, status=200): self.title = title self.body = body self.review = review self.html = html self.status = status class InvalidParameterValue(DisplayMessage): """\ Exception raised by pages when a query parameter has an invalid value. This exception is automatically raised by Request.getParameter() if the parameter's value can't be converted as requested. """ def __init__(self, name, value, expected): DisplayMessage.__init__(self, "Invalid URI Parameter Value!", "Got '%s=%s', expected %s." % (name, value, expected), status=400) class MissingParameter(DisplayMessage): """\ Exception raised by pages when a required query parameter is missing. This exception is automatically raised by Request.getParameter() if the parameter is required and missing. """ def __init__(self, name): DisplayMessage.__init__(self, "Missing URI Parameter!", "Expected '%s' parameter." % name, status=400) class MissingWSGIRemoteUser(Exception): """\ Exception raised if WSGI environ "REMOTE_USER" is missing. This error happens when Critic is running in "host" authentication mode but no REMOTE_USER variable was present in the WSGI environ dict provided by the web server. """ pass class Request: """\ WSGI request wrapper class. Pages and operations should typically only need to access request parameters (via getParameter()) and headers (via getRequestHeader()), and set response status (using setStatus()) if not "200 OK" and content-type (using setContentType()) if not "text/html". The start() method must be called before any content is returned to the WSGI layer, but this is taken care of by the main request handling function (critic.py::main). In the case of POST requests, the request body is retrieved using the read() method. Properties: user -- user name from HTTP authentication method -- HTTP method ("GET" or "POST", typically) path -- URI path component, without leading forward slash original_path -- same as 'path', unless the path is a short-hand for another path, in which case 'path' is the resolved path query -- URI query component original_query == same as 'query', unless the path is a short-hand for another path, in which case 'query' is typically extended with parameters derived from the short-hand path Primary methods: getParameter(name, default, filter) -- get URI query parameter getRequestHeader(name) -- get HTTP request header getRequestHeaders(name) -- get all HTTP request headers read() -- read HTTP request body setStatus(code, message) -- set HTTP response status setContentType(content_type) -- set Content-Type response header addResponseHeader(name, value) -- add HTTP response header Methods used by framework code: start() -- call the WSGI layers start_response() callback isStarted() -- check if start() has been called getContentType() -- get response content type """ def __init__(self, db, environ, start_response): """\ Construct request wrapper. The environ and start_response arguments should be the arguments to the WSGI application object. """ self.__db = db self.__environ = environ self.__start_response = start_response self.__status = None self.__content_type = None self.__response_headers = [] self.__started = False content_length = environ.get("CONTENT_LENGTH") self.__request_body_length = int(content_length) if content_length else 0 self.__request_body_read = 0 self.server_name = \ self.getRequestHeader("X-Forwarded-Host") \ or environ.get("SERVER_NAME") \ or configuration.base.HOSTNAME self.method = environ.get("REQUEST_METHOD", "") self.path = environ.get("PATH_INFO", "").lstrip("/") self.original_path = self.path self.query = environ.get("QUERY_STRING", "") self.parsed_query = urlparse.parse_qs(self.query, keep_blank_values=True) self.original_query = self.query self.cookies = {} header = self.getRequestHeader("Cookie") if header: for cookie in map(str.strip, header.split(";")): name, _, value = cookie.partition("=") if name and value: self.cookies[name] = value self.session_type = configuration.base.SESSION_TYPE def updateQuery(self, items): self.parsed_query.update(items) self.query = urllib.urlencode( sorted(self.parsed_query.items()), doseq=True) @property def user(self): return self.__db.user def getTargetURL(self): target = "/" + self.path if self.query: target += "?" + self.query return target def getRequestURI(self): return wsgiref.util.request_uri(self.__environ) def getEnvironment(self): return self.__environ def getParameter(self, name, default=NoDefault, filter=lambda value: value): """\ Get URI query parameter. If the requested parameter was not present in the URI query component, the supplied default value is returned instead, or, if the supplied default value is the NoDefault class, a MissingParameter exception is raised. If a filter function is supplied, it is called with a single argument, the string value of the URI parameter, and its return value is returned from getParameter(). If the filter function raises an exception (other than DisplayMessage or sub-classes thereof) an InvalidParameterValue exception is raised. Note: the filter function is not applied to default values, meaning that the default value can be of a different type than actual parameter values. """ value = self.parsed_query.get(name) if value is None: if default is NoDefault: raise MissingParameter(name) return default def filter_value(value): try: return filter(value) except (base.Error, auth.AccessDenied): raise except Exception: if filter is int: expected = "integer" else: expected = "something else" raise InvalidParameterValue(name, value, expected) value = [filter_value(element) for element in value] if len(value) == 1: return value[0] return value def getParameters(self): return { name: value[0] if len(value) == 1 else value for name, value in self.parsed_query.items() } def getRequestHeader(self, name, default=None): """\ Get HTTP request header by name. The name is case-insensitive. If the request header was not present in the request, the default value is returned (or None if no default value is provided.) If the request header was present, its value is returned as a string. """ return self.__environ.get("HTTP_" + name.upper().replace("-", "_"), default) def getRequestHeaders(self): """\ Get a dictionary containing all HTTP request headers. The header names are converted to all lower-case, and any underscores ('_') in the header name is replaced with a dash ('-'). The reason for this name transformation is that the header names are already transformed in the WSGI layer from their original form to all upper-case, with dashes replaced by underscores, so the original name is not available. The returned dictionary is a copy of the underlying storage, so the caller can modify it without the modifications having any side-effects. """ headers = {} for name, value in self.__environ.items(): if name.startswith("HTTP_"): headers[name[5:].lower().replace("_", "-")] = value return headers def getReferrer(self): try: return self.getRequestHeader("Referer") except: return "N/A" def read(self, bufsize=None): """\ Return the HTTP request body, or an empty string if there is none. """ if self.__request_body_length: max_bufsize = self.__request_body_length - self.__request_body_read if bufsize is None: bufsize = max_bufsize else: bufsize = min(bufsize, max_bufsize) if "wsgi.input" not in self.__environ or not bufsize: return "" data = self.__environ["wsgi.input"].read(bufsize) self.__request_body_read += len(data) return data def write(self, data): """ Write HTTP response body chunk. """ self.__write(data) def setStatus(self, code, message=None): """\ Set the HTTP status code, and optionally the status message. If the message argument is None, a default status message for the specified HTTP status code is used. If the specified status code is not one included in httplib.responses, an KeyError exception is raised. If this method is not called, the HTTP status will be "200 OK". This method must be called before the response is started. (This really only matters for incremental pages that returns the response body in chunks; they can't call this method once they've yielded the first body chunk.) """ assert not self.__started, "Response already started!" if message is None: message = httplib.responses[code] self.__status = "%d %s" % (code, message) def hasContentType(self): return self.__content_type is not None def setContentType(self, content_type): """\ Set the response content type (the "Content-Type" header). If the specified content type doesn't have a "charset=X" addition, the string "; charset=utf-8" is appended to the content type. If this method is not called, the Content-Type header's value will be "text/html; charset=utf-8". This function must be used rather than addResponseHeader() to set the Content-Type header, and must be called before the response is started. """ assert not self.__started, "Response already started!" if content_type.startswith("text/") and "charset=" not in content_type: content_type += "; charset=utf-8" self.__content_type = content_type def addResponseHeader(self, name, value): """\ Add HTTP response header. Append a response header to the list of response headers passed to the WSGI start_response() callback when the response is started. Note: This function does not replace existing headers or merge headers with the same name; calling code has to handle such things. No headers (except Content-Type) are added automatically. This function must not be used to add a Content-Type header, and must be called before the response is started. """ assert not self.__started, "Response already started!" assert name.lower() != "content-type", "Use Request.setContentType() instead!" self.__response_headers.append((name, value)) def setCookie(self, name, value, secure=False): if secure and configuration.base.ACCESS_SCHEME != "http": modifier = "Secure" else: modifier = "HttpOnly" self.addResponseHeader( "Set-Cookie", "%s=%s; Max-Age=31536000; Path=/; %s" % (name, value, modifier)) def deleteCookie(self, name): if self.cookies.has_key(name): self.addResponseHeader( "Set-Cookie", "%s=invalid; Path=/; Expires=Thursday 01-Jan-1970 00:00:00 GMT" % name) def start(self): """\ Start the response by calling the WSGI start_response() callback. This function is called automatically by the main request handling function (critic.py::main) and should typically not be called from any other code. This function can be called multiple times; repeated calls do nothing. """ if not self.__started: if self.__status is None: self.setStatus(200) if self.__content_type is None: self.setContentType("text/plain") headers = [("Content-Type", self.__content_type)] headers.extend(self.__response_headers) self.__write = self.__start_response(self.__status, headers) self.__started = True def isStarted(self): """\ Check if the response has been started. """ return self.__started def getContentType(self): """\ Return the currently set response content type. The returned value includes the automatically added "charset=utf-8". If the response hasn't been started yet, and setContentType() hasn't been called, None is returned. """ return self.__content_type def ensureSecure(self): if configuration.base.ACCESS_SCHEME != "http": current_url = self.getRequestURI() secure_url = re.sub("^http:", "https:", current_url) if current_url != secure_url: raise MovedTemporarily(secure_url, True) def requestHTTPAuthentication(self, realm="Critic"): self.setStatus(401) self.addResponseHeader("WWW-Authenticate", "Basic realm=\"%s\"" % realm) self.start() def allowRedirect(self, status): """Return true if it is safe to redirect this request""" return self.method in ("GET", "HEAD") or status == 303 ================================================ FILE: src/resources/.gitattributes ================================================ *.png -text ================================================ FILE: src/resources/autocomplete.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function AutoCompleteUsers(users) { function autocomplete(request, response) { var match = /^(.*?)([^\s,]*)$/.exec(request.term); var source = match[1]; var term = match[2].toLowerCase(); if (!term) { response([]); return; } var matches = []; for (var username in users) { var fullname = users[username]; if (username.substring(0, term.length).toLowerCase() == term || fullname.substring(0, term.length).toLowerCase() == term) matches.push({ label: fullname + " (" + username + ")", value: source + username }); } matches.sort(function (a, b) { switch (true) { case a.label < b.label: return -1; case a.label > b.label: return 1; default: return 0; } }); response(matches); } return autocomplete; } function AutoCompletePath(paths) { var pending_response; var pending_operation; function autocomplete(request, response) { function hasPrefix(full, prefix) { return full.substring(0, prefix.length) == prefix; } function repeat(what, count) { return Array(count + 1).join(what); } function callResponse(paths, prefiltered) { var pathnames = Object.keys(paths), previous, matches = [], additional = 0; pathnames.sort(); for (var index = 0; index < pathnames.length; ++index) { var pathname = pathnames[index], shortened = pathname; if (prefiltered || hasPrefix(pathname, request.term)) { if (matches.length == 20) { if (prefiltered) { additional = pathnames.length - matches.length; break; } else { ++additional; continue; } } if (previous) { var components = pathname.split("/"), count = 0, prefix = "", checked_prefix; while (count < components.length && hasPrefix(previous, checked_prefix = components.slice(0, count).join("/"))) { ++count; prefix = checked_prefix; } if (prefix.length > 3) shortened = repeat(" ", prefix.length - 3) + "..." + pathname.substring(prefix.length); } var counts = paths[pathname]; if ("deleted" in counts && "inserted" in counts) { if (counts.files == 0) counts = "-" + counts.deleted + "/+" + counts.inserted; else counts = "(" + counts.files + " files) -" + counts.deleted + "/+" + counts.inserted; } else if ("files" in counts) counts = "(" + counts.files + " files)" else counts = ""; if (counts) counts = "<span style='float:right;font-size:smaller'>" + counts + "</span>"; matches.push({ label: ("<div class=sourcefont style='padding:0;margin:0;white-space:pre'>" + htmlify(shortened) + counts + "</div>"), value: pathname }); previous = pathname; } else if (matches.length) break; } if (additional) matches.push({ label: "<i>" + matches.length + " more matching paths</i>", value: request.term }); response(matches); pending_response = null; pending_operation = null; } if (pending_response) { pending_response([]); pending_response = null; } if (pending_operation) { pending_operation.abort(); pending_operation = null; } pending_response = response; if (typeof paths == "function") { pending_operation = paths(request.term, callResponse); if (pending_operation) pending_response = response; else response([]); } else callResponse(paths); } return autocomplete; } function AutoCompleteRef(remote, prefix) { var branches_remote = null; var branches = null; var branches_sha1 = null; var branches_request = null; var branches_response = null; prefix = prefix || ""; function getCurrentRemote() { if (typeof remote == "function") return remote(); else return remote; } function autocomplete(request, response) { function callResponse() { function match(name) { return name.substring(0, branches_request.term.length) == branches_request.term; } var matches = branches.filter(match); if (matches.length < 20) { matches.sort(); function formatMatch(name) { return { label: ("<div class=sourcefont style='padding:0;margin:0;white-space:pre'>" + htmlify(name) + "<span style='float:right;font-size:smaller'>" + branches_sha1[name].substring(0, 8) + "</span></div>"), value: name }; } branches_response(matches.map(formatMatch)); } else branches_response([{ label: matches.length + " matching branches", value: branches_request.term }]); branches_request = branches_response = null; } function handleResult(result) { branches = []; branches_sha1 = {}; if (result) { for (var name in result.branches) { var use_name = name.substring(prefix.length); branches.push(use_name); branches_sha1[use_name] = result.branches[name]; } } callResponse(); } if (branches_response) branches_response([]); branches_request = request; branches_response = response; var current_remote = getCurrentRemote(); if (branches_remote != current_remote) { branches_remote = current_remote; branches = null; var operation = new Operation({ action: "fetch remote branches", url: "fetchremotebranches", data: { remote: branches_remote, pattern: "refs/heads/*" }, callback: handleResult }); operation.execute(); } else if (branches) return callResponse(); } return autocomplete; } ================================================ FILE: src/resources/basic.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { color: #222; } h1.noscript { color: red; text-align: center } div.main > table { width: 95% } .pagefooter { margin-top: 10px; border-top: 2px solid #A0A092; padding: 10px 1em 0 1em; } div.shortcuts { text-align: right; } div.shortcuts a.shortcut { text-decoration: none; color: #222 } table.pageheader { border-bottom: 2px solid #A0A092; } table.pageheader td.left { vertical-align: bottom } table.pageheader td.left b { font-family: sans-serif } table.pageheader td.left b.opera { font-size: 40px; color: #d32226; cursor: pointer } table.pageheader td.left b.opera.development { color: #222 } table.pageheader td.left b.critic { font-size: 50px; color: #666666; cursor: pointer } table.pageheader td.right { padding-bottom: 10px; text-align: right; vertical-align: bottom } table.pageheader td.right div { display: inline-block } table.pageheader td.right div span.buttonscope-global > :first-child { margin-left: 0.5em } .paleyellow { margin: 1rem auto; padding: 1rem; box-sizing: border-box; background: #ffffe6; border: 1px solid #cca; border-bottom-color: #bb9; border-right-color: #bb9; border-radius: 4px; } .paleyellow > h1 { margin: 0; padding: .25rem .5rem .5rem; border-bottom: 1px solid #cca; font-size: 1.9rem; font-weight: normal; } table.paleyellow { margin-top: 1.5em; margin-bottom: 2em; padding: 1em 1em 1.5em 1em; } table.paleyellow > tbody > tr > td { padding-left: 1em } table.paleyellow > * > tr > td.h1 { border-bottom: 1px solid #cca } table.paleyellow > * > tr > td.h1 > h1 { margin: 0 0 0.75rem; font-size: 1.9rem; font-weight: normal; } table.paleyellow > * > tr > td.h1 > h1 > * { text-shadow: none } table.paleyellow > * > tr > td.h1 > h1 > span.right { font-size: 50%; float: right } table.paleyellow > tbody > tr > td.h2 { border-bottom: 1px solid #cca } table.paleyellow > tbody > tr > td.h2 > h2 { font-size: 1rem; margin: 0.75rem 0; } table.paleyellow > tbody > tr > td.h2 > h2 > span { font-size: 50%; margin-left: 1em } table.paleyellow > tbody > tr > td.h2 > h2 > span.right { float: right } table.paleyellow > tbody > tr.item > td { padding-top: 0.5em } table.paleyellow > tbody > tr.item > td.name { font-family: serif; font-weight: bold; text-align: right; } table.paleyellow > tbody > tr.item > td.value { font-family: monospace; white-space: pre-wrap; padding-left: 1em } table.paleyellow > tbody > tr.item > td.value > div.buttons { float: right } table.paleyellow > tbody > tr.item > td.value > div.buttons button { margin-left: 3px } table.paleyellow > tbody > tr.help > td { font-style: italic; text-align: right; border-bottom: 1px solid #cca } table.paleyellow > tbody > tr.spacer > td { padding-top: 0.3em; } table.paleyellow > tbody > tr.separator > td { padding: 1em 0 0.5em 0 } table.paleyellow > tbody > tr.separator > td > div { border-bottom: 1px solid #cca } table.paleyellow > tbody > tr.centered > td { text-align: center } table.paleyellow > tbody > tr.centered > td > button { margin-top: 1em } @media (min-width: 1348px) { .section, div.main > table, table.paleyellow { width: 1280px } } .callout { padding: 0 0.75rem; background: #fffff4; border: 1px solid #ddb; border-bottom-color: #cca; border-right-color: #cca; border-radius: 4px; } table.callout { margin: 0 auto; padding-top: 0.75rem; padding-bottom: 0.75rem; border-spacing: 0; } table.callout th { border-bottom: 1px solid #cca; } table.callout th, table.callout td { padding: 0.25em 0.5em; text-align: left; } .inset { padding: 0.25em 0.5em; background: #fffff0; border: 1px solid #ddb; } .ui-dialog .inset { padding: 0.1rem 0.5rem; background: #fbfaf9; border: 1px solid #e0cfc2; } .ui-dialog { box-shadow: 0 5px 5px rgba(0, 0, 0, 0.5), 0 7px 25px rgba(0, 0, 0, 0.5) } table.pageheader td.left > ul { display: inline-block; margin: 0 0 0 1em; padding: 0; font-weight: bold } table.pageheader td.left > ul > li { display: inline; padding: 0 } table.pageheader td.left > ul > li > a { text-decoration: none; color: #222 } table.pageheader td.left > ul > li:before { content: " | " } table.pageheader td.left > ul > li:first-child:before { content: none } .message-dialog pre { margin-left: 1em } .error-dialog code, .message-dialog code { display: inline-block; border: 1px solid #cca; background-color: #fff; padding: 2px 4px } .repository-select { background-color: white; text-align: left; white-space: nowrap } .repository-select .repository { flex-flow: row wrap; } .repository-select .repository .repository-name { font-weight: bold } .repository-select .repository .repository-path { font-family: monospace; padding-left: 0.5rem; margin-left: auto; } .notifications { position: fixed; width: 400px; margin: 0 } .notification { border-radius: 5px; border: 2px solid #8a8; margin: 10px auto; padding: 5px 2em; background-color: #dfd; } table.searchresults { width: 100% } table.searchresults .id { text-align: right; padding-right: 0.5em } table.searchresults .summary { padding-left: 0.5em } table.searchresults td.link { text-align: right; vertical-align: top; } table.searchresults tr.review td { font-family: monospace; padding-top: 3px; padding-bottom: 3px; background-color: #fff } div.searchdialog .help { float: right } div.searchdialog input { width: 100%; font-family: monospace; margin-top: 0.5em } .flex { display: -ms-flexbox !important; display: -webkit-flex !important; display: flex !important; } .ui-dialog > .flex { -ms-flex-direction: column; -webkit-flex-direction: column; flex-direction: column; } .ui-dialog > .flex > * { -ms-flex: none; -webkit-flex: none; flex: none; } .ui-dialog > .flex > .flexible { -ms-flex: auto; -webkit-flex: auto; flex: auto; min-height: 0; } input:not([type]), input[type="text"], input[type="password"], input[type="date"], input[type="email"], input[type="number"], input[type="search"], input[type="tel"], input[type="time"], input[type="url"], textarea { margin: 0; -moz-box-sizing: border-box; box-sizing: border-box; padding: .3rem; background: #fff; opacity: .95; border: 1px solid #efefcf; border-top-color: #e5e6b3; border-left-color: #e5e6b3; border-radius: 2px; font-size: inherit; } input:not([type]):focus, input[type="text"]:focus, input[type="password"]:focus, input[type="date"]:focus, input[type="email"]:focus, input[type="number"]:focus, input[type="search"]:focus, input[type="tel"]:focus, input[type="time"]:focus, input[type="url"]:focus, textarea:focus { outline: none; opacity: 1; border-color: #ddb; border-top-color: #cca; border-left-color: #cca; box-shadow: 0 0 1px 1px #fff; } select { font-size: inherit; } input:disabled, select:disabled { opacity: .7; } fieldset { border: 0; padding: 0; } .input-label { font-size: .9rem; font-weight: bold; } .checkbox-group label { padding-right: .5rem; white-space: nowrap; } .checkbox-group label:last-child { padding-right: 0; } .clickable { cursor: pointer } ================================================ FILE: src/resources/basic.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil -*- */ function User(id, name, email, displayName, status, options) { this.id = id; this.name = name; this.email = email; this.displayName = displayName; this.status = status; this.options = options || {}; } User.prototype.toString = function () { return this.displayName + " <" + this.email + ">"; }; function Repository(id, name, path) { this.id = id; this.name = name; this.path = path; } function Branch(id, name, base) { this.id = id; this.name = name; this.base = base; } function reportError(what, specifics, title, callback) { if (!title) title = "Communication Error!"; var content = $("<div class=error-dialog title='" + title + "'><h1>Failed to " + what + ".</h1><p>" + specifics + "</p></div>"); content.dialog({ width: 800, height: 400, modal: true, buttons: { OK: function () { content.dialog("close"); if (callback) callback(); }}}); } function showMessage(title, heading, message, callback) { var content = $("<div class=message-dialog title='" + title + "'><h1>" + heading + "</h1>" + (message || "") + "</div>"); content.dialog({ width: 600, modal: true, buttons: { OK: function () { content.dialog("close"); if (callback) callback(); }}}); } function htmlify(text, attribute) { text = String(text).replace(/&/g, "&").replace(/</g, "<"); if (attribute) text = text.replace(/'/g, "'").replace(/"/g, """); return text; } function Operation(data) { this.action = data.action; this.url = data.url; this.data = data.data; this.wait = data.wait; this.cancelable = data.cancelable; this.failure = data.failure || {}; this.callback = data.callback; this.id = ++Operation.counter; if (this.callback) Operation.current[this.id] = true; } Operation.SUPPORTS_CALLBACK = true; Operation.current = {}; Operation.counter = 0; Operation.idleCallbacks = []; Operation.isBusy = function () { return Object.keys(Operation.current).length != 0; }; Operation.whenIdle = function (callback) { if (Operation.isBusy()) Operation.idleCallbacks.push(callback); else callback(); }; Operation.finished = function (id) { delete Operation.current[id]; }; Operation.checkIdle = function () { if (!Operation.isBusy()) { Operation.idleCallbacks.forEach(function (fn) { fn(); }); Operation.idleCallbacks = []; } }; window.addEventListener("beforeunload", function (ev) { if (Operation.isBusy()) { ev.returnValue = "There are pending requests to the server. You probably want to let them finish before you leave the page."; ev.preventDefault(); } }); Operation.prototype.execute = function () { var self = this; var result = null; var wait = null; function handleResult(result, callback) { callback = callback || function (result) { return result; }; if (result.status == "failure") { var handler = self.failure[result.code]; if (!handler || !handler(result)) showMessage("Oops...", result.title, result.message, function () { callback(null); }); return null; } else if (result.status == "error") { if (result.error.indexOf("\n") != -1) reportError(self.action, "Server reply:<pre>" + htmlify(result.error) + "</pre>", null, function () { callback(null); }); else reportError(self.action, "Server reply: <i>" + htmlify(result.error) + "</i>", null, function () { callback(null); }); return null; } else return callback(result); } function success(data) { self.ajax = null; result = data; if (data.__profiling__) console.log(self.url + "\n" + Array(self.url.length + 1).join("=") + "\n" + "Total: " + data.__time__.toPrecision(3) + " seconds\n" + data.__profiling__); if (wait) wait.dialog("close"); if (self.callback) { Operation.finished(self.id); handleResult(result, self.callback); Operation.checkIdle(); } } function error(xhr) { self.ajax = null; if (wait) wait.dialog("close"); if (!self.aborted) { if (xhr.status == 404) reportError(self.action, "<p>The operation <code>" + self.url + "</code> is not supported by the server.<p>" + "<p>Simply reloading the page and then trying again might help. " + "If that doesn't help, and you think an extension might be " + "involved, try reinstalling (or uninstalling) it.</p>", null, self.callback); else reportError(self.action, "Server reply:<pre>" + (xhr.responseText ? htmlify(xhr.responseText) : "N/A") + "</pre>", null, self.callback); if (self.callback) Operation.finished(self.id); } } if (this.wait) { wait = $("<div title='Please Wait' style='text-align: center; padding-top: 2em'>" + this.wait + "</div>"); var data = { modal: true }; if (this.cancelable) data.buttons = { "Cancel": function () { wait.dialog("close"); self.ajax.abort(); }}; wait.dialog(data); } this.ajax = $.ajax({ async: !!this.callback, type: "POST", url: "/" + this.url, contentType: "text/json", data: JSON.stringify(this.data), dataType: "json", success: success, error: error }); if (!this.callback) { if (wait) wait.dialog("close"); if (result) return handleResult(result); else return null; } }; Operation.prototype.abort = function () { this.aborted = true; if (this.ajax) this.ajax.abort(); }; $(document).ready(function () { $("button").button(); $("a.button").button(); /* Element (e.g. a table-cell) containing a link and with a 'click' handler that clicks the link. */ $(".clickable").click(function (ev) { if (ev.button == 0 && !$(ev.target).closest("a, button, .clickable-target").size()) /* The '.get(0)' means we call the browser's native click() instead of jQuery's. For some reason, the latter doesn't trigger the default action of the click event on the link (i.e. navigation). */ $(ev.currentTarget).find(".clickable-target").get(0).click(); }); }); var keyboardShortcutHandlers = []; function handleKeyboardShortcut(key) { for (var index = 0; index < keyboardShortcutHandlers.length; ++index) if (keyboardShortcutHandlers[index](key)) return true; return false; } $(document).ready(function () { if (typeof keyboardShortcuts == "undefined" || keyboardShortcuts) $(document).keypress(function (ev) { if (ev.ctrlKey || ev.shiftKey || ev.altKey || ev.metaKey) return; if (/^(?:input|textarea)$/i.test(ev.target.nodeName)) if (ev.which == 32 || /textarea/i.test(ev.target.nodeName) || !/^(?:checkbox|radio)$/i.test(ev.target.type)) return; /* Handling non-printable keys. */ if (ev.which) { if (handleKeyboardShortcut(ev.which)) ev.preventDefault(); } }); }); if (!Object.create) { Object.create = function (proto, props) { var object = {}; try { object.__proto__ = proto; } catch (e) {} for (var name in props) { if ("value" in props[name]) object[name] = props[name].value; else { if (props[name].get) object.__defineGetter__(name, props[name].get); if (props[name].set) object.__defineSetter__(name, props[name].set); } } return object; }; } var hooks = Object.create(null, { "create-comment": { value: [] }, "display-comment": { value: [] } }); var critic = { Operation: Operation, buttons: { add: function (data) { if (!data.title || !(data.href || data.onclick) || !data.scope) throw new TypeError("invalid data; should have 'title', 'scope' and 'href'/'onclick' properties"); if (data.href) html = "<a href='" + htmlify(data.href, true) + "'>" + htmlify(data.title) + "</a>"; else if (typeof data.onclick == "function") html = "<button>" + htmlify(data.title) + "</button>"; else html = "<button onclick='" + htmlify(data.onclick, true) + "'>" + htmlify(data.title) + "</button>"; var button = $(html); if (typeof data.onclick == "function") button.click(data.onclick); button.button(); $("span.buttonscope-" + data.scope).append(button); }, remove: function (data) { if (!data.title || !data.scope) throw new TypeError("invalid data; should have 'title' and 'scope' properties"); $("span.buttonscope-" + data.scope + " button").filter(function () { return $(this).text() == data.title }).detach(); } }, hooks: { add: function (name, callback) { if (!(name in hooks)) throw new TypeError("invalid hook; valid alternatives are: " + Object.keys(hooks)); hooks[name].push(callback); } }, html: { escape: htmlify } }; function signOut() { var operation = new Operation({ action: "sign out", url: "endsession", data: {}}); var result = operation.execute(); if (result) { if (result.target_url) location.href = result.target_url; else location.reload(); } } function repositionNotifications() { $("body > div.notifications").position({ my: "center top", at: "center top", of: window }); } function showNotification(content, data) { data = data || {}; var notifications = $("body > div.notifications"); if (notifications.size() == 0) { notifications = $("<div class=notifications></div>"); $("body").append(notifications); repositionNotifications(); } var notification = $("<div class=notification></div>"); function displayed() { setTimeout(hide, (data.duration || 3) * 1000); } function hide() { if (notification.next("div.notification").size()) remove(); else { /* Using .animate({ opacity: 0 }) instead of .fadeOut() since the latter "helpfully" sets display:none at the end of the animation. We want to also do .slideUp(), and that only works if the element is still there. */ notification.animate( { opacity: 0 }, { duration: 600, complete: remove }); } } function remove() { if (data.callback) data.callback(); notification.slideUp(400, finalize); } function finalize() { notification.remove(); } if (data.className) notification.addClass(data.className); notification.append(content); notification.fadeIn(400, displayed); notification.click(hide); notifications.append(notification); return { hide: hide, remove: remove }; } var previous_query = ""; if (typeof localStorage != "undefined") previous_query = localStorage.getItem("previous_query"); function quickSearch(external_query, callback) { function finish(result) { if (!result) { if (external_query === void 0) setTimeout(quickSearch, 0); return; } if (result.reviews.length == 0) { showMessage("Search results", "No reviews found!"); return; } var html = ("<table class=searchresults>" + "<tr><th class=id>Review</th><th class=summary>Summary</th>"); html += "</tr>"; result.reviews.forEach(function (review, index) { html += ("<tr class=review critic-review-id=" + review.id + ">" + "<td class=id>r/" + review.id + "</td>" + "<td class=summary><a href=/r/" + review.id + ">" + htmlify(review.summary) + "</a></td>" + "</tr>"); }); html += "</table></div>"; var content = $(html); content.find("tr").click(function (ev) { var target = $(ev.target); if (!target.is("a")) target.parents("tr").find("a").get(0).click(); }); if (callback) callback(content, result); else { content.wrap("<div title='Search results'></div>"); content.find("tr").first().append( "<td class=link><a>Link to this search</a></td>"); content.find("td.link a").attr("href", "/search?" + result.query_string); content.find("td.summary").attr("colspan", "2"); content = content.parent(); content.dialog( { width: 800, buttons: { "Close": function () { content.dialog("close"); }}}); if (content.closest(".ui-dialog").height() > innerHeight) content.dialog("option", "height", innerHeight - 10); } } function search(query) { var operation = new Operation({ action: "search", url: "searchreview", data: { query: query }, wait: "Searching...", callback: finish }); operation.execute(); } if (external_query !== void 0) { search(external_query); return; } function start() { previous_query = content.find("input").val().trim(); if (typeof localStorage != "undefined") localStorage.setItem("previous_query", previous_query); content.dialog("close"); if (previous_query) search(previous_query); } function cancel() { content.dialog("close"); } function handleKeypress(ev) { if (ev.keyCode == 13) start(); } var content = $("<div title='Review Quick Search' class=searchdialog>" + "<div><b>Search query:</b>" + "<span class=help><a href=/tutorial?item=search>Help</a></span></div>" + "<div><input></div>" + "</div>"); content.find("input") .val(previous_query) .keypress(handleKeypress); content.dialog({ width: 800, buttons: { "Search": start, "Cancel": cancel }}); setTimeout(function () { content.find("input").select(); content.find("input").focus(); }, 0); } keyboardShortcutHandlers.push(function (key) { if (key == "f".charCodeAt(0)) { quickSearch(); return true; } if (key == "u".charCodeAt(0)) { if (window.review) { if (window.isReviewFrontpage) location.href = "/dashboard"; else location.href = "/r/" + review.id; return true; } } }); $(window).resize(repositionNotifications); ================================================ FILE: src/resources/branches.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main table tr.title td.description { text-align: right; border-bottom: 1px solid #cca; margin-left: 0; margin-top: 0; margin-bottom: 0.5em; font-style: italic } div.main table tr.title td.repositories { text-align: right; border-bottom: 1px solid #cca } div.main table.branches tr.headings td { padding-top: 0.5em; font-weight: bold; text-decoration: underline } div.main table.branches tr.headings td.when { text-align: right; width: 10% } div.main table.branches tr.headings td.name { width: 40% } div.main table.branches tr.headings td.base { width: 40% } div.main table.branches tr.headings td.commits { text-align: right; width: 10% } div.main table.branches tr.nothing td.nothing { text-align: center; font-weight: bold; padding-top: 1em } div.main table.branches tr.branch:hover { background: #eec } div.main table.branches tr.branch td.when { text-align: right; font-weight: bold } div.main table.branches tr.branch td.name { font-family: monospace; font-size: 10pt } div.main table.branches tr.branch td.name span.review { padding-left: 1em; font-size: smaller } div.main table.branches tr.branch td.name span.check { padding-left: 1em; font-size: smaller } div.main table.branches tr.branch td.base { font-family: monospace; font-size: 10pt } div.main table.branches tr.branch td.commits { text-align: right; font-weight: bold } ================================================ FILE: src/resources/branches.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ $(function () { $("td.repositories select").change(function (ev) { if (typeof repository == "undefined" || ev.target.value != repository.id) location.href = "/branches?repository=" + encodeURIComponent(ev.target.value); }); $(".repository-select").chosen({ inherit_select_classes: true, generate_selected_value: function (item) { return { html: "Repository: <b>" + htmlify(item.text) + "</b>" }; }, collapsed_width: "auto", expanded_width: "600px" }); }); ================================================ FILE: src/resources/changeset.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main { margin-bottom: 20px; padding-bottom: 10px; border-bottom: 2px solid #A0A092; } table.commit-files { margin-left: 0.5em; margin-bottom: 15px } table.commit-files > thead > tr > th { text-align: left; padding-left: 10px; padding-right: 10px } table.commit-files > tbody > tr > td { font-family: monospace; white-space: pre; padding-left: 10px; padding-right: 10px } table.commit-files > tbody > tr > td.approve { text-align: right; font-style: italic } table.commit-files > tbody > tr > td.approve > span { display: none } table.commit-files > tbody > tr > td.approve > span.show { display: inline } table.commit-files > tbody > tr > th.parent { text-align: center; font-style: italic } table.commit-files > tbody > tr > td > a { color: #222; text-decoration: none } table.commit-files > tbody > tr > td.path:hover { background-color: #ccc } table.commit-files > tbody > tr > td.parent.hover { background-color: #ccc } table.file { border: 1px solid #cca; border-radius: 2px; margin-bottom: 1em; table-layout: fixed } table.file col.edge { width: 1em } table.file col.linenr { width: 3em } table.file col.middle { width: 1em } table.file col.line { width: 50% } table.file { display: none } table.file.show { display: table } table.file > tbody, table.file > tfoot { display: none } table.file.expanded > tbody, table.file.expanded > tfoot { display: table-row-group } table.file > thead > tr > td, table.file > tfoot > tr > td { padding: 3px 2em; width: 50%; background-color: #ffffe6 } table.file > thead > tr > td.left, table.file > tfoot > tr > td.left { width: 50%; text-align: left } table.file > thead > tr > td.right, table.file > tfoot > tr > td.right { width: 50%; text-align: right } table.file > thead + tbody > tr:first-child > td, table.file > tbody + tfoot > tr:first-child > td { border-top: 1px solid #cca } table.file thead td.left a.showtree, table.file tfoot td.left a.showtree { text-decoration: none; color: #222 } table.file thead td.left a.showtree.root, table.file tfoot td.left a.showtree.root { font-size: 80% } table.file thead td.left a.showtree:hover, table.file tfoot td.left a.showtree:hover { text-decoration: underline; color: #222 } table.file > tbody.spacer > tr > td { background-color: #eee; text-align: center } table.file > tbody.spacer > tr > td { padding: 0 } table.file > tbody.spacer > tr + tr.spacer { display: none } table.file > tbody.spacer > tr.expand > td, table.file > tbody.spacer > tr.context > td { padding-top: 3px; padding-bottom: 3px } table.file > tbody.spacer > tr.expand > td > select { background-color: #eee; border: 0 } table.file > tbody.lines > tr > td.edge, table.file > tbody.lines > tr > td.middle, table.file > tbody.lines > tr > td.linenr { background-color: #eee } table.file > tbody.lines > tr > td.middle { cursor: col-resize; background-image: -webkit-radial-gradient(circle, #eee 4px, transparent 4px), -webkit-radial-gradient(circle, rgba(0, 0, 0, 0.3) 4px, transparent 4px); background-image: -moz-radial-gradient(circle, #eee 4px, transparent 4px), -moz-radial-gradient(circle, rgba(0, 0, 0, 0.3) 4px, transparent 4px); background-image: radial-gradient(circle, #eee 4px, transparent 4px), radial-gradient(circle, rgba(0, 0, 0, 0.3) 4px, transparent 4px); background-position: 0 0, -1px -1px } table.file > tbody.lines > tr > td.linenr.old { text-align: right; padding-right: 3px } table.file > tbody.lines > tr > td.linenr.new { padding-left: 3px } table.file > tbody.lines > tr > td.line { border-left: 1px solid #888; border-right: 1px solid #888; white-space: pre-wrap; overflow: hidden; word-wrap: break-word } table.file.resized.old-narrower > tbody.lines > tr > td.line.old, table.file.resized.new-narrower > tbody.lines > tr > td.line.new { overflow-wrap: normal; white-space: pre } table.file > tbody.lines > tr > td.comment { white-space: pre-wrap; border-left: 1px solid #888; border-right: 1px solid #888 } table.file > tbody.lines > tr:first-child > td.line { border-top: 1px solid #888; padding-top: 3px } table.file > tbody.lines > tr:last-child > td.line { border-bottom: 1px solid #888; padding-bottom: 3px } table.file > tbody.content > tr > td { background-color: #eee } table.file > tbody.deleted > tr > td { background-color: #eee; text-align: center } table.file > tbody.deleted > tr > td > h2 { margin-top: 0 } table.file > tbody.binary > tr > td { background-color: #eee; text-align: center; vertical-align: middle } table.file > tbody.binary > tr > td > h2 { margin-top: 0 } table.file > tbody.binary > tr.download > td > a { padding-left: 1em; padding-right: 1em } table.file > tbody.binary > tr.download > td > a > img { vertical-align: middle } table.commit-info { margin-top: 1em; width: auto !important } table.commit-info tr.commit-info td { white-space: nowrap } table.commit-info span.links, table.commit-info span.branches, table.commit-info span.tags { margin-left: 1em } table.commit-info span.link, table.commit-info span.branch, table.commit-info span.tag { margin-right: 0.2em } pre.commit-msg { padding: 0.5em 1em } tr.modified b.t { color: #cca } tr.replaced > td.old b.t, tr.deleted > td.old b.t { color: #caa } tr.replaced > td.new b.t, tr.inserted > td.new b.t { color: #aca } input.approve { background-color: #eec; font-weight: bold } body { font-size: 12px; font-family: sans-serif } div.parent { display: none } div.parent.show { display: block } div.parent > h1 { padding-left: 1em; font-size: 150%; font-weight: bold } div.detectmoves select { width: 100%; padding: 3px } table.commit-info tr.commit-msg > td { padding-top: 1.5em; padding-bottom: 1.5em } table.commit-msg tr.line:hover { background-color: #eee } table.commit-msg tr.line td.edge { padding: 0 1em 0 1em } table.commit-msg tr.line td.line { white-space: pre; font-family: monospace; padding: 0 } table.commit-msg tr.line.highlight td.line { font-weight: bold } .blame-tooltip { opacity: 1 !important; max-width: none !important } .blame-tooltip pre { margin: 5px; padding: 5px; background-color: white; border: 1px solid #888 } ================================================ FILE: src/resources/changeset.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil -*- */ var files = []; var blocks = []; function makeLine(fileId, oldOffset, oldLine, newLine, newOffset) { var row = document.createElement('tr'); row.className = 'line context'; row.id = 'f' + fileId + 'o' + oldOffset + 'n' + newOffset; var edge1 = row.insertCell(-1); edge1.className = 'edge'; var oldOffsetCell = row.insertCell(-1); oldOffsetCell.textContent = oldOffset; oldOffsetCell.className = 'linenr old'; oldOffsetCell.align = 'right'; var oldLineCell = row.insertCell(-1); oldLineCell.innerHTML = oldLine ? oldLine : " "; oldLineCell.className = 'line old'; oldLineCell.id = 'f' + fileId + 'o' + oldOffset; var middle = row.insertCell(-1); middle.innerHTML = ' '; middle.className = 'middle'; middle.colSpan = 2; var newLineCell = row.insertCell(-1); newLineCell.innerHTML = newLine ? newLine : " "; newLineCell.className = 'line new'; newLineCell.id = 'f' + fileId + 'n' + newOffset; var newOffsetCell = row.insertCell(-1); newOffsetCell.textContent = newOffset; newOffsetCell.className = 'linenr new'; var edge2 = row.insertCell(-1); edge2.className = 'edge'; if (typeof startCommentMarking != "undefined") { var lineCells = $(row).children("td.line"); lineCells.mousedown(startCommentMarking); lineCells.mouseover(continueCommentMarking); lineCells.mouseup(endCommentMarking); } return row; } function previousTableSection(node) { while (node.parentNode.nodeName.toLowerCase() != "table") node = node.parentNode; do node = node.previousSibling; while (node.nodeName.toLowerCase() != "tbody"); return node; } function nextTableSection(node) { while (node.parentNode.nodeName.toLowerCase() != "table") node = node.parentNode; do node = node.nextSibling; while (node.nodeName.toLowerCase() != "tbody"); return node; } function hasClass(element, cls) { return new RegExp("(^|\\s)" + cls + "($|\\s)").test(element.className) } function addClass(element, cls) { if (!hasClass(element, cls)) element.className += " " + cls; } function removeClass(element, cls) { if (hasClass(element, cls)) element.className = element.className.replace(new RegExp("(^|\\s+)" + cls + "($|(?=\\s))"), ""); } var extractedFiles = {}; var HIDE = 1; var SHOW = 2; var EXPAND = 3; var CONTEXT = 1; var DELETED = 2; var MODIFIED = 3; var REPLACED = 4; var INSERTED = 5; var WHITESPACE = 6; var CONFLICT = 7; var line_classes = [null, "context", "deleted", "modified", "replaced", "inserted", "modified whitespace", "conflict"]; function recompact(id) { var table = fileById(id), count = 0; table.each(function (index, table) { if (!table.disableCompact) for (index = 0; index < table.tBodies.length; ++index) { var tbody = table.tBodies.item(index); if (tbody.firstChild.nodeType == Node.COMMENT_NODE) { var comment = tbody.firstChild; while (comment.nextSibling) { tbody.removeChild(comment.nextSibling); ++count; } } } }); } function decompact(id) { var table = fileById(id); if (!table.children("colgroup").size()) table.prepend("<colgroup><col class=edge><col class=linenr><col class=line><col class=middle><col class=middle><col class=line><col class=linenr><col class=edge></colgroup>"); table.each(function (index, table) { var parent; if (table.hasAttribute("critic-parent-index")) parent = "p" + table.getAttribute("critic-parent-index"); else parent = ""; if (table.disableCompact) return; function unpack(line) { return line.replace(/<([bi])([a-z]+)>/g, "<$1 class=$2>"); } for (index = 0; index < table.tBodies.length; ++index) { var tbody = table.tBodies.item(index); if (tbody.firstChild.nodeType == Node.COMMENT_NODE && !tbody.firstChild.nextSibling) { var data = JSON.parse(tbody.firstChild.nodeValue); var file_id = data[0]; var sides = data[1]; var old_offset = data[2]; var new_offset = data[3]; var lines = data[4]; var html = ""; for (var line_index = 0; line_index < lines.length; ++line_index) { var line = lines[line_index]; var line_type = line[0]; var item_index = 1; var line_old_offset = 0, line_new_offset = 0; if (line_type != INSERTED) line_old_offset = old_offset++; if (line_type != DELETED && line_type != CONFLICT) line_new_offset = new_offset++; var line_id = parent + "f" + file_id + "o" + line_old_offset + "n" + line_new_offset; html += "<tr class='line " + (sides != 2 ? "single " : "") + line_classes[line_type] + "' id='" + line_id + "'>" + "<td class=edge> </td>" + "<td class='linenr old'>"; if (sides == 2) { if (line_type != INSERTED) html += line_old_offset + "</td><td class='line old' id=" + parent + "f" + file_id + "o" + line_old_offset + ">" + unpack(line[item_index++]); else html += " <td class='line old'> "; html += "</td><td class='middle' colspan=2> </td>" + "<td class='line new'"; if (line_type != DELETED && line_type != CONFLICT) html += " id=" + parent + "f" + file_id + "n" + line_new_offset + ">" + unpack(line[item_index++]) + "</td><td class='linenr new'>" + line_new_offset; else html += "> </td><td class='linenr old'> "; } else { if (line_type == DELETED) html += line_old_offset + "</td><td class='line single old' id=" + parent + "f" + file_id + "o" + line_old_offset + " colspan=4>" + unpack(line[item_index++]) + "</td><td class='linenr new'>" + line_old_offset; else html += line_new_offset + "</td><td class='line single new' id=" + parent + "f" + file_id + "n" + line_new_offset + " colspan=4>" + unpack(line[item_index++]) + "</td><td class='linenr new'>" + line_new_offset; } html += "</td><td class=edge> </td></tr>"; } tbody = $(tbody); tbody.append(html); if (typeof review != "undefined") { tbody.find("td.line").mousedown(startCommentMarking); tbody.find("td.line").mouseover(continueCommentMarking); tbody.find("td.line").mouseup(endCommentMarking); } updateBlame(parseInt(id)); } } }); } function restoreFile(id) { if (id in extractedFiles) { var table = extractedFiles[id][0]; var placeholder = extractedFiles[id][1]; delete extractedFiles[id]; placeholder.replaceWith(table); if (typeof review != "undefined") { table.find("td.line").mousedown(startCommentMarking); table.find("td.line").mouseover(continueCommentMarking); table.find("td.line").mouseup(endCommentMarking); } } } function restoreAllFiles() { for (var id in extractedFiles) restoreFile(id); } function toggleFile(table) { table = $(table); if (table.hasClass("expanded")) collapseFile(table.attr("critic-file-id")); else expandFile(table.attr("critic-file-id")); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } function fileById(id) { if (typeof parentsCount != "undefined") { var selector = []; for (index = 0; index < parentsCount; ++index) selector.push("#p" + index + "f" + id); return $(selector.join(", ")); } else return $("#f" + id); } function collapseFile(id, implicit) { var table = fileById(id); table.removeClass("expanded"); recompact(id); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); if (!implicit) saveState(); } function expandFile(id, scroll) { if (typeof parentsCount != "undefined") if (selectedParent == null || !document.getElementById("p" + selectedParent + "f" + id)) for (var index = 0; index < parentsCount; ++index) if (document.getElementById("p" + index + "f" + id)) { selectParent(index); break; } restoreFile(id); var table = currentFile = fileById(id); decompact(id); table.addClass("show expanded"); if (scroll) { if (table.offset().top + table.height() > pageYOffset + innerHeight || table.offset().top < scrollY) scrollTo(pageXOffset, table.offset().top); } if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); saveState(); } function hideFile(id) { var table = fileById(id); table.removeClass("show"); recompact(id); } function showFile(id) { if (typeof parentsCount != "undefined") if (selectedParent == null || !document.getElementById("p" + selectedParent + "f" + id)) for (var index = 0; index < parentsCount; ++index) if (document.getElementById("p" + index + "f" + id)) { selectParent(index); break; } restoreFile(id); var table = fileById(id); table.addClass("show expanded"); } function collapseAll(implicit) { var changed = false; $("table.file.expanded").each(function (index, table) { changed = true; table = $(table); table.removeClass("expanded"); var id = table.attr("critic-file-id"); recompact(id); }); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); if (!implicit && changed) saveState(); } function expandAll() { showAll(true); var changed = false; $("table.file").each(function (index, table) { table = $(table); var id = table.attr("critic-file-id"); if (!table.hasClass("expanded")) { decompact(id); changed = true; table.addClass("expanded"); } }); if (changed) saveState(); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } var mode = "hide"; function hideAll(implicit) { if (/showcomments?$/.test(location.pathname)) return; mode = "hide"; $("table.file").each(function (index, table) { hideFile($(table).attr("critic-file-id")); }); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); if (!implicit) saveState(); } function showAll(implicit) { mode = "show"; restoreAllFiles(); var changed = false; $("table.file").each(function (index, table) { table = $(table); var id = table.attr("critic-file-id"); if (!table.hasClass("show")) { changed = true; if (table.hasClass("expanded")) decompact(id); table.addClass("show"); } }); if (!implicit && changed) saveState(); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } var isRestoringState = false; var saveStateTimer = null; var previousFilesView = {}; function isFilesViewEqual(first, second) { for (var id in first) if (first[id] != second[id]) return false; return true; } function queueSaveState(replace) { if (saveStateTimer) clearTimeout(saveStateTimer); saveStateTimer = setTimeout(function () { saveState(replace); }, 1500); } function saveState(replace) { if (!isRestoringState) { var filesView = {}; $("table.file").each(function (index, table) { table = $(table); var id = table.attr("critic-file-id"); if (table.hasClass("show")) filesView[id] = table.hasClass("expanded") ? EXPAND : SHOW; else filesView[id] = HIDE; }); var state = { filesView: filesView, scrollLeft: pageXOffset, scrollTop: pageYOffset }; if (isFilesViewEqual(filesView, previousFilesView)) replace = true; else previousFilesView = filesView; if (!replace) { if (typeof history.pushState == "function") history.pushState(state, document.title, location.href); } else { if (typeof history.replaceState == "function") history.replaceState(state, document.title, location.href); } } clearTimeout(saveStateTimer); saveStateTimer = null; } function restoreState(state) { isRestoringState = true; hideAll(true); if (state) { for (var id in state.filesView) switch (state.filesView[id]) { case EXPAND: expandFile(id); break; case SHOW: showFile(id); collapseFile(id); break; case HIDE: hideFile(id); break; } if (typeof state.scrollTop == "number") window.scrollTo(state.scrollLeft, state.scrollTop); } isRestoringState = false; } var selectedParent = null; function selectParent(index) { for (var other = 0; other < parentsCount; ++other) if (other != index) $(".parent" + other).removeClass("show"); $(".parent").removeClass("show"); $("#p" + index).addClass("show"); $(".parent" + index).addClass("show"); selectedParent = index; if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } document.addEventListener("click", function (ev) { var node = ev.target; while (node) { if (node.nodeName.toLowerCase() == "thead" || node.nodeName.toLowerCase() == "tfoot" || hasClass(node, "file-summary")) { toggleFile($(node).parents("table")); } else if (node.nodeName.toLowerCase() == "a") return; node = node.parentNode; } }, false); var currentFile = null; keyboardShortcutHandlers.push(function (key) { switch (key) { case 32: if (!currentFile) if (mode == "hide") hideAll(true); else collapseAll(true); if (pageYOffset + innerHeight >= (currentFile ? (currentFile.offset().top + currentFile.height()) : document.documentElement.scrollHeight)) { var nextFile = currentFile ? currentFile.nextAll("table.file").first() : $("table.file.first"); if (currentFile && currentFile.length) { var id = currentFile.first().attr("critic-file-id"); if (mode == "hide") { $(currentFile).removeClass("show"); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } else collapseFile(id, true); if (typeof markFile != "undefined") { var parent_index = currentFile.first().attr("critic-parent-index"); if (parent_index) parent_index = parseInt(parent_index); else parent_index = null; markFile("reviewed", parseInt(currentFile.first().attr("critic-file-id")), parent_index); } } if (nextFile.length) { expandFile(nextFile.first().attr("critic-file-id"), true); return true; } else currentFile = null; } saveState(); return false; case "e".charCodeAt(0): expandAll(); return true; case "c".charCodeAt(0): collapseAll(); return true; case "s".charCodeAt(0): showAll(); return true; case "h".charCodeAt(0): hideAll(); return true; case "m".charCodeAt(0): detectMoves(); return true; case "b".charCodeAt(0): blame(); return true; default: if (typeof parentsCount != "undefined") if (key >= "1".charCodeAt(0) && key <= "0".charCodeAt(0) + parentsCount) { selectParent(key - "1".charCodeAt(0)); return true; } } }); function setSpacerContext(spacer, context) { var target = $(spacer).nextAll("tr.context").find("td"); if (!target.size()) { var row = $("<tr class=context><td class=context colspan=8></td></tr>"); $(spacer).after(row); target = row.find("td"); } target.text(context); } function expand(select, file_id, path, sha1, where, oldOffset, newOffset, total) { if (select.value == "none") return; var spacerCell = select.parentNode; var spacerRow = spacerCell.parentNode; var table = spacerRow.parentNode.parentNode; var count = parseInt(select.value); var deltaOffset = 0, deltaTotal = count, deltaFactor; table.disableCompact = true; if (where != 'top') deltaOffset = count; if (where == 'middle') deltaFactor = 2; else deltaFactor = 1; deltaTotal *= deltaFactor; if (count == total) spacerCell.innerHTML = " "; else { select.selectedIndex = 0; var newTotal = total - deltaTotal; select.onchange = function () { expand(this, file_id, path, sha1, where, oldOffset + deltaOffset, newOffset + deltaOffset, total - deltaTotal); }; select.options[0].textContent = (total - deltaTotal) + ' lines not shown'; select.lastChild.value = newTotal; if (select.options.length == 5 && newTotal < 50 * deltaFactor) select.options[3] = null; if (select.options.length == 4 && newTotal < 25 * deltaFactor) select.options[2] = null; if (select.options.length == 3 && newTotal < 10 * deltaFactor) select.options[1] = null; select.blur(); } var ranges = []; /* Request lines above the spacer. */ if (where != "top") ranges.push({ offset: newOffset, count: count, context: false }); /* Request lines below the spacer. */ if (where != "bottom" && (where == 'top' || count < total)) ranges.push({ offset: newOffset + total - count, count: count, context: true }); var data = { repository_id: repository.id, path: path, sha1: sha1, ranges: ranges, tabify: typeof tabified != "undefined" }; var operation = new Operation({ action: "fetch lines", url: "fetchlines", data: data }); var result = operation.execute(); /* Add lines below the spacer. */ if (where != 'bottom' && (where == 'top' || count < total)) { var range = result.ranges.pop(); var lines = range.lines; var tbody = nextTableSection(spacerRow); var anchor = tbody.firstChild; for (var index = 0; index < lines.length; ++index) tbody.insertBefore(makeLine(file_id, oldOffset + total - count + index, lines[index], lines[index], newOffset + total - count + index), anchor); setSpacerContext(spacerRow, range.context || ""); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } if (where != "top") { var lines = result.ranges.pop().lines; var tbody = previousTableSection(spacerRow); for (var index = 0; index < lines.length; ++index) tbody.appendChild(makeLine(file_id, oldOffset + index, lines[index], lines[index], newOffset + index)); if (count == total && where == 'middle') { var next = nextTableSection(spacerRow); var spacerSection = spacerRow.parentNode; spacerSection.parentNode.removeChild(spacerSection); while (next.firstChild) tbody.appendChild(next.firstChild); next.parentNode.removeChild(next); } } if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } function createReview() { location.href = "/createreview?repository=" + repository.id + "&commits=" + changeset.commits.join(","); } function customProcessCommits() { location.href = "/processcommits?review=" + review.id + "&commits=" + changeset.commits.join(","); } function fetchFile(fileset, file_id, side, replace_tbody) { var data = { repository_id: repository.id, path: fileset[file_id].path, sha1: fileset[file_id][side + "_sha1"], ranges: [{ offset: 1, count: -1, context: false }], tabify: typeof tabified != "undefined" }; var operation = new Operation({ action: "fetch lines", url: "fetchlines", data: data }); var result = operation.execute(); var lines = result.ranges[0].lines; var html = "<tbody class=lines>"; var deleted = side == "old"; var row_class = deleted ? "deleted" : "inserted"; for (var offset = 1; offset <= lines.length; ++offset) { var line = lines[offset - 1] || " "; var row_id = "f" + file_id + (deleted ? "o" + offset + "n0" : "o0n" + offset); var cell_id = "f" + file_id + (deleted ? "o" + offset : "n" + offset); html += "<tr class='line single " + row_class + "' id=" + row_id + ">" + "<td class=edge></td>" + "<td class='linenr old'>" + offset + "</td>" + "<td class='line single " + side + "' id=" + cell_id + " colspan=4>" + line + "</td>" + "<td class='linenr new'>" + offset + "</td>" + "<td class=edge></td>" + "</tr>"; } html += "</tbody>"; var tbody = $(html); if (typeof review != "undefined") { tbody.find("td.line").mousedown(startCommentMarking); tbody.find("td.line").mouseover(continueCommentMarking); tbody.find("td.line").mouseup(endCommentMarking); } tbody.replaceAll($(replace_tbody)); } function detectMoves() { var content = $("<div title='Detect Moved Code' class='detectmoves'><p>Source file:<br><select class='source'><option value='any'></option></select></p><p>Target file:<br><select class='target'><option value='any'></option></select></p></div>"); var selects = content.find("select"); var source = selects.filter(".source"); var target = selects.filter(".target"); var fileids = {}; var paths = []; var expanded_files = []; for (var name in files) if (/^\d+$/.test(name)) { var fileid = parseInt(name); var path = files[fileid].path; fileids[path] = fileid; paths.push(path); if ($("#f" + fileid).is(".expanded")) expanded_files.push(fileid); } paths.sort(); for (var index = 0; index < paths.length; ++index) { var path = paths[index]; var fileid = fileids[path]; var selected; if (expanded_files.length == 1 && expanded_files[0] == fileid) selected = " selected"; else selected = ""; selects.append("<option value='" + fileid + "'" + selected + ">" + htmlify(path) + "</option>"); } function finish() { var source_arg = source.val() == "any" ? "" : "&sourcefiles=" + source.val(); var target_arg = target.val() == "any" ? "" : "&targetfiles=" + target.val(); if (typeof review != "undefined") location.href = "/" + changeset.parent.sha1 + ".." + changeset.child.sha1 + "?review=" + review.id + "&moves=yes" + source_arg + target_arg; else location.href = "/" + repository.name + "/" + changeset.parent.sha1 + ".." + changeset.child.sha1 + "?moves=yes" + source_arg + target_arg; } content.dialog({ width: 600, buttons: { Search: function () { finish(); content.dialog("close"); }, Cancel: function () { content.dialog("close"); } } }); selects.chosen({ placeholder_text: "Any", allow_single_deselect: true }); } var BLAME = null; function fetchBlame() { if (BLAME === null) { var files = []; for (var file_id in blocks) { var raw_blocks = blocks[file_id]; var fine_blocks = new Array(raw_blocks.length); for (var index = 0; index < raw_blocks.length; ++index) fine_blocks[index] = { first: raw_blocks[index][0], last: raw_blocks[index][1] }; files.push({ id: ~~file_id, blocks: fine_blocks }); } var operation = new Operation({ action: "blame lines", url: "blame", data: { repository_id: repository.id, changeset_id: changeset.id, files: files } }); var result = operation.execute(); if (result) { BLAME = result; BLAME.color_index = 0; for (var index = 0; index < BLAME.commits.length; ++index) { var commit = BLAME.commits[index]; if (commit.original) BLAME.original = commit; if (commit.current) BLAME.current = commit; } BLAME.file_by_id = {}; for (var index = 0; index < BLAME.files.length; ++index) { var file = BLAME.files[index]; BLAME.file_by_id[file.id] = file; } } } } function updateBlame(file_id) { function getColor(index) { var compentvalues = [0xff, 0x80, 0xc0, 0x40, 0xe0, 0xa0, 0x60, 0x20]; var cv1 = compentvalues[parseInt(index / 6) % 8], cv2 = parseInt(cv1 / 2), pattern; cv1 = cv1.toString(16); if (cv1.length == 1) cv1 = "0" + cv1; cv2 = cv2.toString(16); if (cv2.length == 1) cv2 = "0" + cv2; switch (index % 6) { case 0: pattern = "hhllll"; break; case 1: pattern = "llhhll"; break; case 2: pattern = "llllhh"; break; case 3: pattern = "hhhhll"; break; case 4: pattern = "hhllhh"; break; case 5: pattern = "llhhhh"; break; } return pattern.replace(/hh/g, cv1).replace(/ll/g, cv2); } function generateTooltip() { return $(this).attr("critic-blame-tooltip"); } if (BLAME) { for (var file_index = 0; file_index < BLAME.files.length; ++file_index) { var file = BLAME.files[file_index]; if (!file_id || file.id === file_id) { for (var block_index = 0; block_index < file.blocks.length; ++block_index) { var lines = file.blocks[block_index].lines; for (var line_index = 0; line_index < lines.length; ++line_index) { var line = lines[line_index]; var commit = BLAME.commits[line.commit]; var row = $("#f" + file.id + "n" + line.offset).parent(), color_selector, tooltip_selector; function addTooltip(element, commit) { element.addClass("with-blame-tooltip"); element.attr("critic-blame-tooltip", ("<div><b><u>" + htmlify(commit.author_name) + " <" + htmlify(commit.author_email) + "></u></b>" + "<pre>" + htmlify(commit.message) + "</pre></div>")); } if (commit.original) addTooltip(row.children("td.line"), commit); else { if (!commit.color) commit.color = getColor(BLAME.color_index++); if (row.children("td.line.single").size()) row.children("td.linenr").css("background-color", "#" + commit.color); else row.children("td.middle, td.linenr.new").css("background-color", "#" + commit.color); if (!row.hasClass("inserted")) addTooltip(row.children("td.line.old"), BLAME.original); addTooltip(row.children("td.line.new"), commit); } } } } } /* This is a workaround for an issue where a tooltip isn't always removed when the mouse pointer is moved to a different element, leading to multiple tooltips on-top of each other. */ var current_tooltip = null; function tooltipOpened(event, ui) { if (current_tooltip !== null) $(current_tooltip.tooltip).remove(); current_tooltip = ui; } function tooltipClosed(event, ui) { current_tooltip = null; } $(document).mouseover( function (ev) { if (current_tooltip && !$(ev.target).closest("td.with-blame-tooltip").size() && !$(ev.target).is("td.with-blame-tooltip")) $("td.with-blame-tooltip").tooltip("close"); }); /* End of workaround. */ $("td.with-blame-tooltip").tooltip({ content: generateTooltip, items: "td.with-blame-tooltip", tooltipClass: "blame-tooltip", track: true, hide: false, open: tooltipOpened, close: tooltipClosed }); } } function blame() { fetchBlame(); updateBlame(); } function registerPathHandlers() { $("table.commit-files td.path").click(function (ev) { try { if (mode == "hide") hideAll(true); else collapseAll(true); file_id = ev.currentTarget.parentNode.getAttribute("critic-file-id"); expandFile(file_id, true); } catch (e) { console.log(e.message + "\n" + e.stacktrace); } ev.preventDefault(); ev.target.blur(); }); } $(document).ready(function () { var match = /#f(\d+)([on])(\d+)/.exec(location.hash); if (match) { expandFile(parseInt(match[1])); location.hash = location.hash; } $("table.commit-files td.parent").mouseover(function (ev) { var target = $(ev.currentTarget); target.addClass("hover"); if (target.prev("td.parent").first().attr("critic-parent-index") == target.attr("critic-parent-index")) target.prev("td.parent").first().addClass("hover"); if (target.next("td.parent").first().attr("critic-parent-index") == target.attr("critic-parent-index")) target.next("td.parent").first().addClass("hover"); }); $("table.commit-files td.parent").mouseout(function (ev) { var target = $(ev.currentTarget); target.removeClass("hover"); if (target.prev("td.parent").first().attr("critic-parent-index") == target.attr("critic-parent-index")) target.prev("td.parent").first().removeClass("hover"); if (target.next("td.parent").first().attr("critic-parent-index") == target.attr("critic-parent-index")) target.next("td.parent").first().removeClass("hover"); }); $("table.commit-files td.parent").click(function (ev) { var target = $(ev.currentTarget); var file_id = target.parentsUntil("table").filter("tr").attr("critic-file-id"); var parent = target.attr("critic-parent-index"); if (mode == "hide") hideAll(true); else collapseAll(true); selectParent(parent); expandFile(file_id, true); ev.preventDefault(); }); }); function applyLengthLimit(lines) { lines.each( function (index, element) { var limit = element.getAttribute("critic-length-limit"); if (limit) { var match = /(\d+)-(\d+)/.exec(limit); var low_limit = parseInt(match[1]); var high_limit = parseInt(match[2]); if (element.textContent.length > low_limit) { var iterator = document.createNodeIterator(element, NodeFilter.SHOW_TEXT, null, false); var texts = [], text, seen = 0; while (text = iterator.nextNode()) texts.push(text); for (var index = 0; index < texts.length; ++index) { var text = texts[index]; var html = ""; var offset = Math.min(Math.max(0, low_limit - seen), text.length), end = Math.min(text.length, Math.max(0, high_limit - seen)); if (offset > 0) { html += htmlify(text.data.substring(0, offset)); seen += offset; } for (; offset < end; ++offset) { var redness = Math.min(100, 100 * (seen - low_limit) / (high_limit - low_limit)).toFixed(1); html += "<span style='color: rgb(" + redness + "%, 0%, 0%)'>" + htmlify(text.data.substring(offset, offset + 1)) + "</span>"; ++seen; } if (offset < text.length) html += "<span style='color: rgb(100%, 0%, 0%)'>" + htmlify(text.data.substring(offset)) + "</span>" $("<div>" + html + "</div>").contents().replaceAll($(text)); } } } }); } (function() { /*Handle resizing of the left and right diff views by dragging divider between them. */ var currentTable = null; ///< cached reference to the table whose panes are being resized var currentCols = null; ///< cached reference to the col elements that are being resized var tableCoord = { left: 0, width: 0 } var HALF_DIVIDER_WIDTH = 15; ///< half of the width of the divider between diff views (somewhat arbitrary) document.addEventListener('mousedown', handleMouseDown); document.addEventListener('dblclick', handleDblClick); function handleMouseDown(e) { if (e.button != 0) return; var mid_cell = $(e.target); if (!mid_cell.is('td.middle')) return; var table = mid_cell.parents('table'); if (!table.length) return; currentTable = table; currentCols = table.find('colgroup col.line'); if (currentCols.length != 2) return; /* Store clicked element's offset relative to the table. It can change during wrapping and we want to restore previous position the element had on screen. */ var offset_before = mid_cell.offset().top; table.addClass("resized"); window.scrollBy(0, mid_cell.offset().top - offset_before); /* Calculate offsets from the sibling cells of the clicked one. WebKit is unable to get dimensions from the col elements. */ var panes = mid_cell.parent().find("td.line"); tableCoord.left = $(panes[0]).offset().left; tableCoord.width = $(panes[0]).width() + $(panes[1]).width(); document.addEventListener('mousemove', handleMouseMove); document.addEventListener('mouseup', handleMouseUp); e.preventDefault(); } function handleMouseUp(e) { currentTable = currentCols = null; document.removeEventListener('mouseup', handleMouseUp); document.removeEventListener('mousemove', handleMouseMove); } function handleMouseMove(e) { if (currentCols) { var leftDiffPaneWidth = e.pageX - tableCoord.left - HALF_DIVIDER_WIDTH; leftDiffPaneWidth = Math.min(tableCoord.width, Math.max(0, leftDiffPaneWidth)); var rightDiffPaneWidth = (tableCoord.width - leftDiffPaneWidth); $(currentCols[0]).css('width', leftDiffPaneWidth + 'px'); $(currentCols[1]).css('width', rightDiffPaneWidth + 'px'); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); if (leftDiffPaneWidth < rightDiffPaneWidth) currentTable.removeClass("new-narrower").addClass("old-narrower"); else if (leftDiffPaneWidth > rightDiffPaneWidth) currentTable.removeClass("old-narrower").addClass("new-narrower"); else currentTable.removeClass("old-narrower new-narrower"); } } function handleDblClick(e) { var mid_cell = $(e.target); if (!mid_cell.is('td.middle')) return; var table = mid_cell.parents('table'); var cols = table.find('colgroup col.line'); if (cols.length == 2) { /* Center diff division (reset to default). */ table.removeClass("resized old-narrower new-narrower"); cols.removeAttr('style'); if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } } })(); window.addEventListener("popstate", function (ev) { if (ev.state) restoreState(ev.state); }, false); if (typeof history.replaceState == "function") { document.addEventListener("DOMContentLoaded", function (ev) { saveState(true); }); window.addEventListener("scroll", function (ev) { queueSaveState(true); }); } ================================================ FILE: src/resources/checkbranch.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main table.branchstatus, div.main table.data { table-layout: fixed } div.main table td.h1 p { padding-left: 1em } div.main table td.h1 p span.command { font-family: monospace; background: #eec; padding: 3px 6px } div.main table.branchstatus thead tr.headings th { padding-top: 0.5em; text-decoration: underline } div.main table.branchstatus thead tr.headings th.sha1 { text-align: center } div.main table.branchstatus thead tr.headings th.user, div.main table.branchstatus thead tr.headings th.summary { padding-left: 1em; text-align: left } div.main table.branchstatus thead tr.headings th.review { padding-left: 0; padding-right: 0; text-align: right } div.main table.branchstatus tbody tr.commit td.sha1 { padding-left: 0 } div.main table.branchstatus tbody tr.commit td.sha1 div { padding-top: 3px; padding-bottom: 3px } div.main table.branchstatus tbody.reviewed tr.commit td.sha1 div { background-color: lime } div.main table.branchstatus tbody.pending tr.commit td.sha1 div { background-color: red } div.main table.branchstatus tbody.note tr.commit td.sha1 div { background-color: yellow } div.main table.branchstatus tbody.unknown tr.commit td.sha1 div { background-color: red } div.main table.branchstatus tbody.unknown tr.commit.own td.summary a { color: red; font-weight: bold } div.main table.branchstatus tbody tr.commit td { padding-top: 3px; padding-bottom: 3px } div.main table.branchstatus tbody tr.commit td.sha1 { padding-top: 0; padding-bottom: 0 } div.main table.branchstatus tbody tr.empty td { padding-top: 3px; padding-bottom: 3px } div.main table.branchstatus tbody tr.commit td.sha1 { font-family: monospace; text-align: center; vertical-align: top } div.main table.branchstatus tbody tr.commit td.user { font-family: monospace; font-weight: bold; vertical-align: top } div.main table.branchstatus tbody tr.commit td.summary { font-family: monospace; } div.main table.branchstatus tbody tr.commit td.review { vertical-align: top; text-align: right } div.main table.branchstatus tbody tr td.edit { vertical-align: top; text-align: right } div.main table.branchstatus tbody tr.note td.note { font-family: serif; text-align: right } div.main table.branchstatus tbody tr.note td.note span.user { font-weight: bold } div.main table.branchstatus tbody:hover tr.commit, div.main table.branchstatus tbody:hover tr.note { background-color: #eec } div.main table.branchstatus tbody tr.commit:hover { background-color: #998; } div.main table.branchstatus tbody tr td.edit a.edit { color: #eec; text-decoration: none } div.main table.branchstatus tbody:hover tr td.edit a.edit { color: #cca } div.main table.branchstatus tbody tr:hover td.edit a.edit { color: #222 } div.main table.branchstatus tbody.unknown tr.commit.own td.edit a.edit { color: red; font-weight: bold } div.main table.branchstatus tbody tr td.edit a.edit:hover { text-decoration: underline } div.main table td.value input { font-family: monospace; width: 40em } div.main table td.value input[name="fetch"] { width: auto } div.main table td.value select { width: 40em } div.comment > p { margin: 5px 0 } div.comment > p > span.review-id { font-family: monospace } div.comment > p > select { width: 100% } div.comment > p > a { float: right } div.comment > div.warning { font-weight: bold; color: #f00 } div.comment > div.header { padding-top: 10px } div.comment > div.header > span.author { font-weight: bold } div.comment > div.text { font-family: monospace; font-size: 11px; background-color: #fff; border: 1px solid #bbb; margin-top: 3px; padding: 5px; white-space: pre-wrap } div.comment > div.text > textarea { font-family: monospace; font-size: 11px; background-color: #fff; border: none; width: 100%; resize: none } div.comment-dialog > div.resolution, div.comments > div.resolution { font-weight: bold; font-size: 130%; text-align: center; padding-top: 10px } div.legend { margin-top: 10px; border-top: 2px solid #A0A092; text-align: right; padding-top: 10px; padding-right: 1em; font-weight: bold } div.legend span { padding: 3px 2em; font-family: monospace } div.legend span.red { background-color: red } div.legend span.yellow { background-color: yellow } div.legend span.green { background-color: lime } ================================================ FILE: src/resources/checkbranch.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function deleteNote(sha1, parentDialog) { var content = $("<div class='comment' title='Delete Note'?>Are you sure?</div>"); function finish() { $.ajax({ async: false, url: "/deletecheckbranchnote?repository=" + repository.id + "&branch=" + branch + "&upstream=" + upstream + "&sha1=" + sha1, dataType: "text", success: function (data) { if (data == "ok") finished = true; else reportError("delete note", "Server reply: <i>" + data + "</i>"); }, error: function () { reportError("delete note", "Request failed."); } }); if (finished) { content.dialog("close"); location.reload(); } } content.dialog({ modal: true, buttons: { Delete: function () { content.dialog("close"); if (finish()) { parentDialog.dialog("close"); } }, Cancel: function () { content.dialog("close"); }}}); } function editCommit(sha1, commit_id, has_note, old_review_id) { var row = $("tr.commit#" + sha1); var text = row.parent("tbody.note").find("span.text").text(); var suggestions = ""; if (old_review_id == void 0) { var operation = new Operation({ action: "suggest reviews", url: "suggestreviews", data: { repository_id: repository.id, sha1: sha1 }}); var result = operation.execute(); if (result) { suggestions = "<p><b>Suggested reviews:</b><br><select><option>(nothing selected)</option>"; for (var id in result.reviews) suggestions += "<option value=" + id + ">[r/" + id + "] " + result.reviews[id] + "</option>"; suggestions += "</select></p>"; } else return; } function rebase(review_id) { function proceed() { var operation = new Operation({ action: "rebase review", url: "rebasereview", data: { review_id: review_id, sha1: sha1, branch: branch }, wait: "Rebasing review..." }); if (operation.execute()) { confirm.dialog("close"); location.reload(); } } var confirm = $("<div class='comment' title='Confirm Review Rebase'>The review <a href='/r/" + review_id + "'>r/" + review_id + "</a> can be rebased to contain this single commit. If the commit is a squash of all changes in the review, this is the appropriate thing to do.</div>"); confirm.dialog({ width: 400, modal: true, buttons: { "Rebase Review": function () { proceed(); }, "Don't Rebase": function () { confirm.dialog("close"); content.dialog("close"); location.reload(); } } }); } function finish() { var new_review_id = content.find("input[type=text]").val(); var new_text = content.find("textarea").val(); if (new_review_id) if (!/^[1-9][0-9]*$/.test(new_review_id)) { alert("Invalid review ID; must be a positive integer!"); return; } else new_review_id = parseInt(new_review_id); if (!new_review_id && /^\s*$/.test(new_text)) { alert("You must enter either a review ID or a comment (or both.)"); return; } var finished = false; $.ajax({ async: false, type: "POST", url: "/addcheckbranchnote?repository=" + repository.id + "&branch=" + branch + "&upstream=" + upstream + "&sha1=" + sha1 + (new_review_id ? "&review=" + new_review_id : ""), contentType: "text/plain", data: new_text, dataType: "text", success: function (data) { if (data == "rebase") rebase(new_review_id); else if (data == "ok") finished = true; else reportError("rebase review", "Server reply: <i>" + data + "</i>"); }, error: function () { reportError("Request failed."); } }); if (finished) { content.dialog("close"); location.reload(); } } if (old_review_id === void 0) old_review_id = ""; var content = $("<div class='comment flex' title='Edit Commit Meta-Data'>" + "<p><b>Review ID:</b> <span class='review-id'>r/<input type='text' value='" + old_review_id + "'></span>" + suggestions + "<p><b>Comment:</b></p>" + "<textarea class='text flexible' rows=5>" + htmlify(text) + "</textarea>" + "</div>"); content.find("select").change(function () { content.find("input[type=text]").val(content.find("select").val()); }); content.find("a").button(); var buttons = {}; if (has_note) buttons["Delete"] = function () { deleteNote(sha1, content); } buttons["Save"] = function () { finish(); }; buttons["Cancel"] = function () { content.dialog("close"); }; content.dialog({ width: 600, modal: true, buttons: buttons }); } $(document).ready(function () { $("button.check").click(function (ev) { var repository = $("select[name='repository']").val(); var commit = $("input[name='commit']").val(); var fetch = $("input[name='fetch']:checked").size(); var upstream = $("input[name='upstream']").val(); location.href = "/checkbranch?repository=" + encodeURIComponent(repository) + "&commit=" + encodeURIComponent(commit) + "&fetch=" + (fetch ? "yes" : "no") + "&upstream=" + encodeURIComponent(upstream); }); $("a.button").button(); }); ================================================ FILE: src/resources/comment.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.comment.draft > div.header > span.time:after { content: " (draft)"; font-weight: bold; color: #f00 } span.draft { font-weight: bold; color: #f00 } div.comment > p { margin: 0 } div.comment > p.state { margin: 0; margin-bottom: 5px; padding-bottom: 5px; border-bottom: 1px solid black; font-weight: bold } div.comment > div.warning { font-weight: bold; color: #f00 } div.comment > div.header { padding-top: 10px } div.comment > div.header > span.author { font-weight: bold } div.comment > .text { font-family: monospace; font-size: 11px; background-color: #fff; border: 1px solid #bbb; margin-top: 3px; padding: 5px; white-space: pre-wrap } div.comment > textarea { width: 100%; resize: none; display: block; outline: 0 } div.comment-dialog > div.resolution, div.comments > div.resolution { font-weight: bold; font-size: 130%; text-align: center; padding-top: 10px } .comment-tooltip { max-width: 40em } .comment-tooltip div.tooltip > div.header { font-weight: bold } .comment-tooltip div.tooltip > div.text { padding: 5px; background-color: white; border: 1px solid #bbb; margin-top: 3px; white-space: pre-wrap } div.marker { position: absolute; border: 2px solid; width: 7px } div.marker.issue { background-color: #f88; border-color: #b66; z-index: 2 } div.marker.issue.addressed, div.marker.issue.closed { background-color: #8f8; border-color: #6b6; } div.marker.note { background-color: #ff8; border-color: #bb6; z-index: 1 } div.marker.new { background-color: #88f; border-color: #66b; z-index: 3 } div.marker.right { border-top-right-radius: 10px; border-bottom-right-radius: 10px; } div.marker.left { border-top-left-radius: 10px; border-bottom-left-radius: 10px; } div.comments { max-width: 80em; } div.comments.left { margin-left: 0 } div.comments.center { margin-left: auto; margin-right: auto } div.comments.right { margin-left: auto; margin-right: 0 } div.comments > div.buttons { text-align: right; margin-top: 3px; font-size: 12px } div.comment-chain { margin-top: 1em } div.comment-chain > table.file span.comment-chain-title { font-weight: bold } div.comment-chain tr.content table.commit-info { background-color: #fff; margin-left: auto; margin-right: auto; border: 1px solid #bbb } button.hidden { display: none } ================================================ FILE: src/resources/comment.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil -*- */ var commentChainsPerFile = {}; var commentChainById = {}; var commentChains = []; function Comment(id, author, time, state, text) { this.id = id; this.author = author; this.time = time; this.state = state; this.text = text; } Comment.prototype.getLeader = function () { var leader = htmlify(this.text.substr(0, 80)); var linebreak = leader.indexOf("\n"); if (linebreak != -1) leader = leader.substr(0, linebreak); var period = leader.indexOf(". "); if (period != -1) leader = leader.substr(0, period + 1); if (this.text.length > 80) leader += "…"; return leader; }; function CommentLines(file, sha1, firstLine, lastLine) { this.file = file; this.sha1 = sha1; this.firstLine = firstLine; this.lastLine = lastLine; } CommentLines.prototype.getFirstLine = function (chain) { for (var linenr = this.firstLine; linenr <= this.lastLine; ++linenr) { var base_id, line; if (this.file !== null) { var file = files[this.sha1]; base_id = "f" + this.file + file.side + linenr; if (typeof file.parent == "number") base_id = "p" + file.parent + base_id; else if (window.selectedParent != null) base_id = "p" + selectedParent + base_id; } else base_id = "msg" + this.firstLine; line = document.getElementById("c" + chain.id + base_id); if (line) return line; line = document.getElementById(base_id); if (line) return line; } //console.log("first line missing: f" + this.file + files[this.sha1].side + this.firstLine); return null; }; CommentLines.prototype.getLastLine = function (chain) { for (var linenr = this.lastLine; linenr >= this.firstLine; --linenr) { var base_id, line; if (this.file !== null) { var file = files[this.sha1]; base_id = "f" + this.file + file.side + linenr; if (typeof file.parent == "number") base_id = "p" + file.parent + base_id; else if (window.selectedParent != null) base_id = "p" + selectedParent + base_id; } else base_id = "msg" + this.lastLine; line = document.getElementById("c" + chain.id + base_id); if (line) return line; line = document.getElementById(base_id); if (line) return line; } //console.log("last line missing: f" + this.file + files[this.sha1].side + this.lastLine); return null; }; function CommentChain(id, user, type, type_is_draft, state, closed_by, addressed_by, comments, lines, markers) { this.id = id; this.user = user; this.type = type; this.type_is_draft = type_is_draft; this.state = state; this.closed_by = closed_by; this.addressed_by = addressed_by; this.comments = comments; this.lines = lines; this.markers = markers || null; } CommentChain.extraButtons = {}; CommentChain.create = function (type_or_markers) { var chain_type = null; var markers = null; var paused = false; if (typeof type_or_markers == "string") chain_type = type_or_markers; else markers = type_or_markers; var message = ""; function abort() { markers.remove(); currentMarkers = null; } var markersLocation; var useChangeset; var useFiles; if (markers) { var m1 = /(?:p(\d+))?f(\d+)([on])(\d+)/.exec(markers.firstLine.id); if (m1) { var side = m1[3]; var parent; if (m1[1] !== undefined) { parent = parseInt(m1[1]); useChangeset = changeset[parent]; useFiles = files[parent]; } else { useChangeset = changeset; useFiles = files; } var file = parseInt(m1[2]); var sha1 = side == 'o' ? useFiles[file].old_sha1 : useFiles[file].new_sha1; var firstLine = parseInt(m1[4]); var m2 = /(?:p\d+)?f\d+[on](\d+)/.exec(markers.lastLine.id); var lastLine = parseInt(m2[1]); if (side == 'o' && markers.linesModified()) { message = "<p>" + "<b>Warning:</b> An issue raised against the old version of " + "modified lines will never be marked as addressed, and " + "will thus need to be resolved manually." + "</p>"; } else { var data = { review_id: review.id, origin: side == 'o' ? "old" : "new", parent_id: useChangeset.parent.id, child_id: useChangeset.child.id, file_id: file, offset: firstLine, count: lastLine + 1 - firstLine }; var operation = new Operation({ action: "validate commented lines", url: "validatecommentchain", data: data }); var result = operation.execute(); if (result.verdict == "modified") { var content = $("<div title='Warning!'>" + "<p>" + "One or more of the lines you are commenting are modified by a " + "<a href='/" + result.parent_sha1 + ".." + result.child_sha1 + "?review=" + review.id + "#f" + file + "o" + result.offset + "'>later commit</a> " + "in this review." + "</p>" + "<p>" + "An issue raised against already modified lines " + "will never be marked as addressed, and will thus " + "need to be resolved manually." + "</p>" + "</div>"); content.dialog({ modal: true, width: 400, buttons: { "Comment Anyway": function () { content.dialog("close"); start(); }, "Cancel": function () { content.dialog("close"); abort(); }} }); paused = true; } else if (result.verdict == "transferred") { message = "<p>" + "<b>Note:</b> This file is modified by " + (result.count > 1 ? result.count + " later commits " : "a later commit ") + "in this review, without affecting the commented lines. " + "This comment will appear against each version of the file." + "</p>"; } else if (result.verdict == "invalid") { var content = $("<div title='Error!'>" + "<p>" + "<b>It is not possible to comment these lines.</b>" + "</p>" + "<p>" + "This is probably because this/these commits are not part of the review." + "</p>" + "</div>"); content.dialog({ modal: true, buttons: { "OK": function () { content.dialog("close"); }} }); abort(); return; } } markersLocation = "file"; } else { var m1 = /msg(\d+)/.exec(markers.firstLine.id); firstLine = parseInt(m1[1]); var m2 = /msg(\d+)/.exec(markers.lastLine.id); lastLine = parseInt(m2[1]); if ("child" in changeset) useChangeset = changeset; else useChangeset = changeset[0]; markersLocation = "commit"; } } var content; function finish(chain_type) { var text = content.find("textarea").val(); var data = { review_id: review.id, chain_type: chain_type, text: text }; if (markers) { if (markersLocation == "file") { data.file_context = { origin: side == 'o' ? "old" : "new", file_id: file, child_id: useChangeset.child.id, offset: firstLine, count: lastLine + 1 - firstLine }; if (useChangeset.parent) data.file_context.parent_id = useChangeset.parent.id; } else data.commit_context = { commit_id: useChangeset.child.id, offset: firstLine, count: lastLine + 1 - firstLine }; } var operation = new Operation({ action: "create comment", url: "createcommentchain", data: data }); var result = operation.execute(); if (result.status == "ok") { var comment = new Comment(result.comment_id, user, "now", "draft", text); if (markers) { var lines = new CommentLines(file, sha1, firstLine, lastLine); var chain = new CommentChain(result.chain_id, user, chain_type, false, "draft", null, null, [comment], lines, markers); markers.commentChain = chain; commentChains.push(chain); if (!(file in commentChainsPerFile)) commentChainsPerFile[file] = []; commentChainsPerFile[file].push(markers.commentChain); markers.setType(chain_type); } else { var chain = new CommentChain(result.chain_id, user, chain_type, false, "draft", null, null, [comment], null, null); var html = "<tr class='comment draft " + chain_type + "'><td class='author'>" + htmlify(user.displayName) + "</td><td class='title'><a href='/showcomment?chain=" + chain.id + "'>" + chain.comments[0].getLeader() + "</a></td><td class='when'>now</td></tr>"; if (chain_type == "issue") { target = $("tr#draft-issues"); if (target.length == 0) $("table.comments tr.h1").after("<tr id='draft-issues'><td class='h2' colspan='3'><h2>Draft Issues<a href='/showcomments?review=" + review.id + "&filter=draft-issues'>[display all]</h2></td></tr>" + html); else target.after(html); } else { target = $("tr#draft-notes"); if (target.length == 0) { target = $("tr#notes"); if (target.length == 0) target = $("tr.buttons"); target.before("<tr id='draft-notes'><td class='h2' colspan='3'><h2>Draft Notes<a href='/showcomments?review=" + review.id + "&filter=draft-notes'>[display all]</h2></td></tr>" + html); } else target.after(html); } /* Force "compatible history navigation" from now on. */ unload = function () {} } updateDraftStatus(result.draft_status); return true; } return success; } function start() { content = $("<div class='comment flex' title='Create Comment'>" + message + "<textarea class='text flexible' rows=8></textarea></div>"); var buttons; if (chain_type != null) buttons = { Save: function () { if (finish(chain_type)) { content.dialog("close"); } } }; else { buttons = { "Add issue": function () { if (finish("issue")) { markers = null; content.dialog("close"); } }, "Add note": function () { if (finish("note")) { markers = null; content.dialog("close"); } } }; function wrapDialogFunction(fn) { return function () { if (fn()) content.dialog("close"); }; } for (var title in CommentChain.extraButtons) buttons[title] = wrapDialogFunction(CommentChain.extraButtons[title]); } var data = {}; if (markers) { data.context = markersLocation; if (markersLocation == "file") { data.changeset = useChangeset.id; data.path = useFiles[file].path; data.sha1 = useFiles[file][side == "o" ? "old_sha1" : "new_sha1"]; data.lineIndex = firstLine - 1; } else { data.sha1 = useChangeset.child.sha1; data.lineIndex = firstLine; } data.lineCount = lastLine - firstLine + 1; } else data.context = "general"; var hook_results = hooks["create-comment"].map(function (callback) { try { return callback(data); } catch (e) { return []; } }); for (var index1 = 0; index1 < hook_results.length; ++index1) { var hook_result = hook_results[index1]; if (hook_result) for (var index2 = 0; index2 < hook_result.length; ++index2) { (function (hooked) { if (hooked.href) buttons[hooked.title] = function () { content.dialog("close"); location.href = hooked.href; }; else buttons[hooked.title] = function () { if (hooked.callback(content.find("textarea").val())) content.dialog("close"); }; })(hook_result[index2]); } } buttons["Cancel"] = function () { content.dialog("close"); }; function close() { if (markers && chain_type == null) markers.remove(); currentMarkers = null; } content.dialog({ width: 600, buttons: buttons, closeOnEscape: false, close: close }); } if (!paused) start(); }; CommentChain.prototype.getFirstLine = function () { return this.lines.getFirstLine(this); }; CommentChain.prototype.getLastLine = function () { return this.lines.getLastLine(this); }; CommentChain.prototype.removeDraftStatus = function () { if (this.state == "draft") { this.state = "open"; var comment = this.comments[this.comments.length - 1]; if (comment.state == "draft") comment.state = "current"; } }; CommentChain.currentDialog = null; CommentChain.reopening = null; CommentChain.prototype.display = function () { if (CommentChain.currentDialog) { CommentChain.currentDialog.dialog("close"); CommentChain.currentDialog = null; } var self = this; var html = "<div class='comment-dialog' title='" + (this.type == "issue" ? "Issue raised" : "Note") + " by " + htmlify(this.comments[0].author.displayName) + "'>"; for (var index = 0; index < this.comments.length; ++index) { var comment = this.comments[index]; html += "<div class='comment" + (comment.state == "draft" ? " draft" : "") + "'><div class='header'><span class='author'>" + htmlify(comment.author.displayName) + "</span> posted <span class='time'>" + comment.time + "</span></div><div class='text'>" + htmlify(comment.text) + "</div></div>"; } if (this.state != "draft" && this.state != "open") { var text; switch (this.state) { case "addressed": text = "Addressed by <a href='/showcommit?review=" + review.id + "&sha1=" + this.addressed_by + "'>" + this.addressed_by.substr(0, 8) + "</a>"; break; case "closed": text = "Resolved by " + htmlify(this.closed_by.displayName); break; } html += "<div class='resolution'>" + text + "</div>"; } html += "</div>"; var content = $(html); var buttons = {}; if (this.state == "draft" || comment.state == "draft") { buttons["Edit"] = function () { self.editComment(comment, content); }; buttons["Delete"] = function () { self.deleteComment(comment, content); }; } else buttons["Reply"] = function () { self.reply(content); }; if (this.state == "closed" || this.addressed_by) buttons["Reopen issue"] = function () { content.dialog("close"); self.reopen(); }; var back = this.type_is_draft ? "back " : ""; if (this.type == "issue") { if (this.state == "open" && !this.type_is_draft) buttons["Resolve issue"] = function () { self.resolve(content); }; if (back || user.options.ui.convertIssueToNote) buttons["Convert " + back + "to note"] = function () { self.morph(content); }; } else buttons["Convert " + back + "to issue"] = function () { self.morph(content); }; var data = {}; if (this.markers) { var m1 = /(?:p(\d+))?f(\d+)([on])(\d+)/.exec(this.markers.firstLine.id); var m2 = /(?:p\d+)?f\d+[on](\d+)/.exec(this.markers.lastLine.id); if (m1 && m2) { var side = m1[3]; if (m1[1] !== undefined) { var parent = parseInt(m1[1]); useChangeset = changeset[parent]; useFiles = files[parent]; } else { useChangeset = changeset; useFiles = files; } var file = parseInt(m1[2]); data.context = "file"; data.changeset = useChangeset.id; data.path = useFiles[file].path; data.sha1 = useFiles[file][side == "o" ? "old_sha1" : "new_sha1"]; data.lineIndex = parseInt(m1[4]) - 1; data.lineCount = parseInt(m2[1]) - data.lineIndex; } else { var m1 = /msg(\d+)/.exec(this.markers.firstLine.id); var m2 = /msg(\d+)/.exec(this.markers.lastLine.id); data.context = "commit"; data.sha1 = ("child" in changeset) ? changeset.child.sha1 : changeset[0].child.sha1; data.lineIndex = parseInt(m1[1]); data.lineCount = parseInt(m2[1]) - data.lineIndex + 1; } } else data.context = "general"; var hook_results = hooks["display-comment"].map(function (callback) { try { return callback(data); } catch (e) { return []; } }); for (var index1 = 0; index1 < hook_results.length; ++index1) { var hook_result = hook_results[index1]; if (hook_result) for (var index2 = 0; index2 < hook_result.length; ++index2) { (function (hooked) { if (hooked.href) buttons[hooked.title] = function () { content.dialog("close"); location.href = hooked.href; }; else buttons[hooked.title] = function () { if (hooked.callback(content.find("textarea").val())) content.dialog("close"); }; })(hook_result[index2]); } } buttons["Close"] = function () { content.dialog("close"); }; content.dialog({ width: 600, buttons: buttons, close: function () { CommentChain.currentDialog = null; }}); if (content.closest(".ui-dialog").height() > innerHeight) content.dialog("option", "height", innerHeight - 10); CommentChain.currentDialog = content; }; CommentChain.prototype.reply = function (parentDialog) { var self = this; var content = $("<div class='comment flex' title='Write Reply'>" + "<textarea class='text flexible' rows=8></textarea></div>"); function finish() { var text = content.find("textarea").val(); var data = { chain_id: self.id, text: text }; var success = false; var operation = new Operation({ action: "add reply", url: "createcomment", data: data }); var result = operation.execute(); if (result) { var comment = new Comment(result.comment_id, user, "now", "draft", text); self.comments.push(comment); var container = $("div.comment-chain#c" + self.id); if (container.length) { var buttons = container.find("div.comments").find("div.buttons"); var resolution = container.find("div.comments").find("div.resolution"); var target = resolution.size() ? resolution : buttons; target.before("<div class='comment draft' id='c" + self.id + "c" + comment.id + "'>" + "<div class='header'><span class='author'>" + htmlify(user.displayName) + "</span> posted <span class='time'>now</span></div>" + "<div class='text' id='c" + comment.id + "text'>" + htmlify(text) + "</div>" + "</div>"); buttons.children("button.reply").addClass("hidden").before("<button class='edit'>Edit</button><button class='delete'>Delete</button>"); buttons.children("button.edit").button().click(function () { self.editComment(comment, null); }); buttons.children("button.delete").button().click(function () { self.deleteComment(comment, null); }); CommentMarkers.updateAll(); } updateDraftStatus(result.draft_status); return true; } else return false; } content.dialog({ width: 600, buttons: { Save: function () { if (finish()) { content.dialog("close"); if (parentDialog) parentDialog.dialog("close"); } }, Cancel: function () { content.dialog("close"); }}, closeOnEscape: false, modal: true }); }; CommentChain.prototype.reopen = function (from_showcomment, from_onload) { var self = this; var content; function cancel() { content.dialog("close"); CommentChain.reopening = null; } function finish(markers) { var operation; if (markers) { var m1 = /(?:p(\d+))?f(\d+)[on](\d+)/.exec(markers.firstLine.id); var useFiles; if (m1[1] !== undefined) useFiles = files[parseInt(m1[1])]; else useFiles = files; var file = parseInt(m1[2]); var sha1 = useFiles[file].new_sha1; var firstLine = parseInt(m1[3]); var m2 = /(?:p\d+)?f\d+[on](\d+)/.exec(markers.lastLine.id); var lastLine = parseInt(m2[1]); operation = new Operation({ action: "reopen issue", url: "reopenaddressedcommentchain", data: { chain_id: self.id, commit_id: changeset.child.id, sha1: sha1, offset: firstLine, count: lastLine + 1 - firstLine }}); } else operation = new Operation({ action: "reopen issue", url: "reopenresolvedcommentchain", data: { chain_id: self.id }}); var result = operation.execute(); if (result) { if (result.new_state == "open") { self.state = "open"; if (markers) { self.markers.setType(self.type, self.state); self.lines.sha1 = sha1; self.lines.firstLine = firstLine; self.lines.lastLine = lastLine; self.markers.updatePosition(); } } else { showMessage("Reopen Issue", "Issue still addressed!", "The issue was successfully transferred to the selected lines, " + "but those lines were in turn modified by a later commit in the " + "review, so the issue is still marked as addressed."); } updateDraftStatus(result.draft_status); var container = $("div.comment-chain#c" + self.id); if (container.length) { container.find("div.resolution").remove(); CommentMarkers.updateAll(); } } markers.remove(); cancel(); } if (this.addressed_by) { if (from_showcomment || changeset.child.sha1 != this.addressed_by) { content = $("<div title='Reopen Issue'>Addressed issues can only be reopened from a regular diff of the commit that addressed the issue. Would you like to go there?</div>"); function goThere() { content.dialog("close"); location.href = "/showcommit?review=" + review.id + "&sha1=" + self.addressed_by + "&reopen=" + self.id; } function stayHere() { content.dialog("close"); } content.dialog({ width: 600, buttons: { "Yes, go there": goThere, "No, stay here": stayHere }, resizable: false }); } else { this.finish = finish; content = $("<div title='Reopen Issue'>Please select the lines in the new version of the file where the comment should be transferred to.</div>"); content.dialog({ width: 800, position: "top", buttons: { Cancel: cancel }, resizable: false }); CommentChain.reopening = this; } } else if (this.state == "closed") finish(null); }; CommentChain.prototype.resolve = function (dialog) { var self = this; function finish() { var operation = new Operation({ action: "resolve issue", url: "resolvecommentchain", data: { chain_id: self.id }}); var result = operation.execute(); if (result) { self.state = 'closed'; self.closed_by = user; if (self.markers) self.markers.setType(self.type, self.state); var container = $("#c" + self.id); if (container.length) { container.find("div.buttons").before("<div class='resolution'>Resolved by " + htmlify(user.displayName) + "</div>"); container.find("button.resolve").remove(); CommentMarkers.updateAll(); } updateDraftStatus(result.draft_status); if (dialog) dialog.dialog("close"); } } if (user.options.ui.resolveIssueWarning && user.id != this.user.id) { var content = $("<div title='Please Confirm'><p><b>You did not raise this issue.</b> Are you sure you mean to resolve it explicitly?</p><p>If you fixed the code, you should push a commit with the fixes, which often closes the issue automatically. And even if it does not, you may want to let the reviewer who raised the issue resolve it after reviewing your fix.</p></div>"); content.dialog({ width: 400, modal: true, buttons: { "Resolve issue": function () { content.dialog("close"); finish(); }, "Do nothing": function () { content.dialog("close"); }}}); } else finish(); }; CommentChain.prototype.morph = function (dialog, button) { var self = this; var new_type = this.type == 'issue' ? 'note' : 'issue'; var operation = new Operation({ action: "change comment type", url: "morphcommentchain", data: { chain_id: this.id, new_type: new_type }}); var result = operation.execute(); if (result) { self.type = new_type; if (self.markers) self.markers.setType(self.type, self.state); var title = $("#c" + self.id + " .comment-chain-title"); if (new_type == 'note') title.text(title.text().replace("Issue raised by", "Note by")); else title.text(title.text().replace("Note by", "Issue raised by")); self.type_is_draft = !self.type_is_draft; var back = self.type_is_draft ? "back " : ""; if (button) if (new_type == 'note') $(button).button("option", "label", "Convert " + back + "to issue"); else if (back || user.options.ui.convertIssueToNote) $(button).button("option", "label", "Convert " + back + "to note"); updateDraftStatus(result.draft_status); if (dialog) dialog.dialog("close"); } }; CommentChain.prototype.editComment = function (comment, parentDialog) { var self = this; var content = $("<div class='comment flex' title='Edit Comment'>" + "<textarea class='text flexible' rows=8></textarea></div>"); var textarea = content.find("textarea"); textarea.val(comment.text); function finish() { var new_text = textarea.val(); var operation = new Operation({ action: "update comment", url: "updatecomment", data: { comment_id: comment.id, new_text: new_text }}); var result = operation.execute(); if (result) { comment.text = new_text; $("#c" + comment.id + "text").text(new_text); updateDraftStatus(result.draft_status); return true; } else return false; } content.dialog({ width: 600, buttons: { Save: function () { if (finish()) { content.dialog("close"); if (parentDialog) parentDialog.dialog("close"); } }, Cancel: function () { content.dialog("close"); }}, closeOnEscape: false, modal: true }); }; CommentChain.prototype.deleteComment = function (comment, parentDialog) { var self = this; var content = $("<div class='dialog' title='Delete Comment'?>Are you sure?</div>"); function finish() { var operation = new Operation({ action: "delete comment", url: "deletecomment", data: { comment_id: comment.id }}); var result = operation.execute(); if (result) { self.comments.pop(); if (self.comments.length == 0) if (self.markers) { commentChains.splice(commentChains.indexOf(self), 1); commentChainsPerFile[self.lines.file].splice(commentChainsPerFile[self.lines.file].indexOf(self), 1); self.markers.remove(); } else { $("#c" + self.id).remove(); if ($("table.file").length == 0) location.href = "/showreview?id=" + review.id; } else { $("#c" + self.id + "c" + comment.id).remove(); var buttons = $("#c" + self.id + " div.buttons"); buttons.children("button.edit, button.delete").remove(); buttons.children("button.reply").removeClass("hidden"); } updateDraftStatus(result.draft_status); return true; } else return false; } content.dialog({ modal: true, buttons: { Delete: function () { content.dialog("close"); if (finish() && parentDialog) { parentDialog.dialog("close"); } }, Cancel: function () { content.dialog("close"); }}}); }; CommentChain.prototype.toolTip = function () { var html = "<div class='tooltip'>"; html += "<div class='header'>"; html += (this.type == "issue" ? "Issue raised" : "Note"); html += " by "; html += htmlify(this.comments[0].author.displayName); if (this.closed_by) { html += " (closed by " + this.closed_by + ")"; } else if (this.addressed_by) { html += " (addressed by " + this.addressed_by.substring(0, 7) + ")"; } html += "</div>"; html += "<div class='text sourcefont'>" + htmlify(this.comments[0].text) + "</div>"; html += "</div>"; return html; }; CommentChain.removeAll = function () { if (typeof commentChains != "undefined") { for (var index = 0; index < commentChains.length; ++index) commentChains[index].markers.remove(); commentChains = []; } }; function CommentMarkers(commentChain) { var self = this; allMarkers.push(this); if (this.commentChain = commentChain) { this.firstLine = commentChain.getFirstLine(); this.lastLine = commentChain.getLastLine(); } else this.firstLine = this.lastLine = null; this.bothMarkers = $("<div class='marker left'></div><div class='marker right'></div>"); this.leftMarker = this.bothMarkers.first(); this.rightMarker = this.bothMarkers.last(); if (commentChain) this.setType(commentChain.type, commentChain.state); else this.setType("new"); this.bothMarkers.tooltip({ content: function () { if (self.commentChain) return self.commentChain.toolTip() }, items: "div.marker", tooltipClass: "comment-tooltip", track: true, hide: false }); this.leftMarker.click(function () { if (self.commentChain) self.commentChain.display(); }); this.rightMarker.click(function () { if (self.commentChain) self.commentChain.display(); }); $(document.body).append(this.bothMarkers); this.updatePosition(); } CommentMarkers.prototype.setLines = function (firstLine, lastLine) { this.firstLine = firstLine; this.lastLine = lastLine; this.updatePosition(); } CommentMarkers.prototype.setType = function (type, state) { this.bothMarkers.removeClass("issue note new open addressed closed"); this.bothMarkers.addClass(type); if (type == "issue" && typeof state != "undefined") this.bothMarkers.addClass(state); } CommentMarkers.prototype.updatePosition = function () { if (this.commentChain) { this.firstLine = this.commentChain.getFirstLine(); this.lastLine = this.commentChain.getLastLine(); } if (this.firstLine) { var firstLine = $(this.firstLine); var lastLine = $(this.lastLine); if (firstLine.parents("table.file").is(".show.expanded") || firstLine.parents("table.commit-msg").size()) { this.leftMarker.css("display", "block"); this.rightMarker.css("display", "block"); var top = firstLine.offset().top - 2; var bottom = lastLine.offset().top + lastLine.height(); if (firstLine.hasClass("whole")) { var linenr = firstLine.prevAll("td.linenr.old"); this.leftMarker.offset({ top: top, left: linenr.offset().left - this.leftMarker.width() - 4 }); } else if (firstLine.hasClass("old") || firstLine.hasClass("single") && !firstLine.hasClass("commit-msg")) { var edge = firstLine.prevAll("td.edge"); this.leftMarker.offset({ top: top, left: edge.offset().left + edge.width() - this.leftMarker.width() - 4 }); } else this.leftMarker.offset({ top: top, left: firstLine.offset().left - this.leftMarker.width() - 6 }); if (firstLine.hasClass("new") || firstLine.hasClass("single")) this.rightMarker.offset({ top: top, left: firstLine.nextAll("td.edge").offset().left }); else this.rightMarker.offset({ top: top, left: firstLine.nextAll("td.middle").offset().left + 2 }); this.leftMarker.height(bottom - top - 1); this.rightMarker.height(bottom - top - 1); return; } } this.leftMarker.css("display", "none"); this.rightMarker.css("display", "none"); }; CommentMarkers.prototype.remove = function () { this.leftMarker.remove(); this.rightMarker.remove(); allMarkers.splice(allMarkers.indexOf(this), 1); }; CommentMarkers.prototype.linesModified = function () { var iter = $(this.firstLine).closest("tr"); var stop = $(this.lastLine).closest("tr"); do { if (!iter.hasClass("context")) return true; if (iter.is(stop)) break; iter = iter.next("tr"); } while (iter.size()); return false; }; CommentMarkers.updateAll = function () { try { for (var index = 0; index < allMarkers.length; ++index) allMarkers[index].updatePosition(); } catch (e) { } } var activeMarkers = null, anchorLine = null, currentMarkers = null, allMarkers = []; function startCommentMarking(ev) { if (ev.ctrlKey || ev.shiftKey || ev.altKey || ev.metaKey || /showcomments?$/.test(location.pathname) || ev.button != 0) return; if (ev.currentTarget.id && !activeMarkers && !currentMarkers) { if (CommentChain.reopening && CommentChain.reopening.lines.file != $(ev.currentTarget).parents("table.file").first().attr("critic-file-id")) { showMessage("Not supported", "Not supported", "Reopening an issue against lines in a different file is not supported."); return; } anchorLine = ev.currentTarget; activeMarkers = new CommentMarkers; activeMarkers.setLines(anchorLine, anchorLine); ev.preventDefault(); } } function continueCommentMarking(ev) { if (activeMarkers && ev.currentTarget.id) if (ev.currentTarget.parentNode.parentNode == activeMarkers.firstLine.parentNode.parentNode && ev.currentTarget.cellIndex == anchorLine.cellIndex) { var firstLine, lastLine; if (ev.currentTarget.parentNode.sectionRowIndex < anchorLine.parentNode.sectionRowIndex) { firstLine = ev.currentTarget; lastLine = anchorLine; } else { firstLine = anchorLine; lastLine = ev.currentTarget; } activeMarkers.setLines(firstLine, lastLine); } } /* This function is overridden on some pages. */ function handleMarkedLines(markers) { CommentChain.create(markers); } function endCommentMarking(ev) { if (activeMarkers) { if (CommentChain.reopening) CommentChain.reopening.finish(activeMarkers); else { currentMarkers = activeMarkers; handleMarkedLines(activeMarkers); } activeMarkers = null; ev.preventDefault(); } } function markChainsAsRead(chain_ids) { var operation = new Operation({ action: "mark comments as read", url: "markchainsasread", data: { chain_ids: chain_ids }, callback: function () {} }); operation.execute(); } $(document).ready(function () { if (typeof commentChains != "undefined") $.each(commentChains, function (index, commentChain) { try { if (commentChain.lines.file !== null) { if (!(commentChain.lines.file in commentChainsPerFile)) commentChainsPerFile[commentChain.lines.file] = []; commentChainsPerFile[commentChain.lines.file].push(commentChain); } commentChain.markers = new CommentMarkers(commentChain); } catch (e) { //console.log(e); } }); if (typeof review != "undefined") $("td.line") .mousedown(startCommentMarking) .mouseover(continueCommentMarking) .mouseup(endCommentMarking); CommentMarkers.updateAll(); }); $(window).load(function () { CommentMarkers.updateAll(); var match = /(?:\?|&)reopen=(\d+)(?:&|$)/.exec(location.search); if (match) { for (var index in commentChains) if (commentChains[index].id == match[1]) { var chain = commentChains[index]; var file_id = chain.lines.file; expandFile(file_id); var first_line = $(chain.markers.firstLine); var last_line = $(chain.markers.lastLine); scrollTo(0, first_line.offset().top - innerHeight / 2 + (last_line.offset().top + last_line.height() - first_line.offset().top) / 2); setTimeout(function () { chain.reopen(false, true); }, 10); } } }); onresize = function () { CommentMarkers.updateAll(); }; ================================================ FILE: src/resources/config.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.preferences { table-layout: fixed } div.main table.preferences tr.line td { padding-top: 1em } div.main table.preferences td.heading { font-family: serif; vertical-align: top } div.main table.preferences tr.customized td.heading { font-weight: bold; } div.main table.preferences td.value { font-family: monospace; white-space: pre-wrap; } div.main table.preferences td.value div.text { white-space: pre } div.main table.preferences td.value input.setting[type=text] { font-family: monospace } div.main table.preferences td.value select.setting { min-width: 30% } div.main table.preferences tr td.value span.reset { display: none } div.main table.preferences tr.customized:hover td.value span.reset { display: inline; float: right } div.main table.preferences td.value span.also-configurable-per { float: right; padding-left: 1em; font-family: sans-serif; font-style: italic; color: #444 } div.main table.preferences td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } div.main table.preferences td.extension { padding-top: 1.5em; border-bottom: 1px solid #cca } div.main table.preferences td.extension h2 span.name { font-size: 150%; padding-right: 0.5em } div.main table.preferences td.extension h2 span.author { font-size: 120%; padding-left: 0.5em } div.installed_sha1 { text-align: center; color: gray } div.installed_sha1 a { text-decoration: none; color: gray } div.installed_sha1 a:hover { text-decoration: underline; } .notification.saved pre { margin: 5px 0 3px 1em } .url-type { flex-flow: row wrap; } .url-type .label { font-weight: bold } .url-type .prefix { font-family: monospace; padding-left: 0.5rem; margin-left: auto; } ================================================ FILE: src/resources/config.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var timer_id = null; var notifications = {}; var saves_in_progress = 0; function scheduleSaveSettings() { if (user.id === null) /* Don't (try to) save if user is anonymous. */ return; if (timer_id !== null) clearTimeout(timer_id); timer_id = setTimeout(saveSettings, 500); } function saveSettings(reset_item) { if (user.id === null) /* Don't (try to) save if user is anonymous. */ return; timer_id = null; var data = { settings: [] }; var per_url = {}; function processElement(index, element) { var url = element.getAttribute("critic-url"), value; if (url) { var modified = false; if (element instanceof HTMLInputElement) if (element.type == "checkbox") { value = element.checked; modified = value !== element.hasAttribute("checked"); if (modified) if (value) element.setAttribute("checked", "checked"); else element.removeAttribute("checked"); } else { value = element.value; modified = value !== element.getAttribute("value"); if (modified) element.setAttribute("value", value); } else { value = element.value; modified = value !== element.getAttribute("critic-value"); if (modified) element.setAttribute("critic-value", value); } if (!modified) return; var items = per_url[url]; if (!items) { items = per_url[url] = {}; Object.defineProperty(items, "CRITIC-EXTENSION", { value: element.getAttribute("critic-extension") }); } items[element.getAttribute("name")] = value; if (value != JSON.parse(element.getAttribute("critic-default"))) $(element).parents("tr.line").addClass("customized"); else $(element).parents("tr.line").removeClass("customized"); } else { if (element.type == "checkbox") value = element.checked; else if (element.type == "number") value = parseInt(element.value); else value = String($(element).val()); if (value != JSON.parse(element.getAttribute("critic-current"))) { element.setAttribute("critic-current", JSON.stringify(value)); data.settings.push({ item: element.name, value: value }); $(element).parents("tr.line").addClass("customized"); } } } if (defaults) data.defaults = true; if (filter_id !== null) data.filter_id = filter_id; else if (repository_id !== null) data.repository_id = repository_id; if (reset_item) { var element = document.getElementsByName(reset_item)[0]; var default_value = JSON.parse(element.getAttribute("critic-default")); var item = { item: reset_item }; if (defaults && repository_id === null) /* Editing global defaults => reset the default value rather than deleting the override. */ item.value = default_value; data.settings.push(item); if (element.type == "checkbox") element.checked = default_value; else { element.value = default_value; if (element instanceof HTMLSelectElement) $(element).trigger("chosen:updated"); } $(element).parents("tr.line").removeClass("customized"); element.setAttribute("critic-current", JSON.stringify(default_value)); } else { $("td.value > input.setting, td.value > select.setting") .each(processElement); } function builtInSaved(result) { --saves_in_progress; function showSavedNotification(title, details) { var html = "<b>" + title + "</b>"; if (details) html += "<pre>" + details + "</pre>"; if (notifications[html]) notifications[html].remove(); notifications[html] = showNotification( html, { className: "saved", callback: function () { notifications[html] = null; }}); } if (result) { if (result.saved_settings.length) { var title = reset_item ? "Reset to default:" : "Saved settings:"; var details = ""; result.saved_settings.forEach( function (item) { details += htmlify(item) + "\n"; }); showSavedNotification(title, details); } } for (var url in per_url) { var items = per_url[url]; $.ajax({ async: false, url: url, type: "POST", contentType: "text/json", data: JSON.stringify(items), dataType: "text", success: function () { --saves_in_progress; showSavedNotification("Saved settings for extension " + htmlify(items["CRITIC-EXTENSION"]) + "."); }, error: function (xhr) { --saves_in_progress; reportError("save settings for extension " + items["CRITIC-EXTENSION"], "Request failed: " + xhr.responseText); } }); ++saves_in_progress; } } if (data.settings.length) { var operation = new Operation({ action: "save settings", url: "savesettings", data: data, callback: builtInSaved }); operation.execute(); ++saves_in_progress; } else builtInSaved(); } $(document).ready(function () { $("input[name='review.createViaPush']").click(function (ev) { if (ev.target.checked) showMessage("Important Note!", "Important Note!", "<p>Please note that when creating a review by pushing a branch whose name starts with <code>r/</code>, only the first (head) commit on the branch will be added to the review, and you will not be able to add its ancestor commits later.</p><p><strong>This feature cannot be used to create a review of multiple commits!</strong></p>"); }); $("td.h1 .repository-select") .change(function (ev) { var repository = ev.target.value; var params = {}; if (repository) params.repository = repository; if (defaults) params.defaults = "yes"; var url = "/config"; if (Object.keys(params).length) url += "?" + Object.keys(params).map(function (name) { return name + "=" + encodeURIComponent(params[name]); }).join("&"); location.assign(url); }) .chosen({ inherit_select_classes: true, allow_single_deselect: true, collapsed_width: "auto", expanded_width: "600px" }); $("td.value > select.setting") .chosen({ inherit_select_classes: true, disable_search: true, collapsed_width: "auto", expanded_width: "30em" }); $("td.value > .repository-select") .chosen({ inherit_select_classes: true, allow_single_deselect: true, collapsed_width: "auto", expanded_width: "40em" }) .addClass("setting"); $(".setting").bind("change input", scheduleSaveSettings); window.addEventListener("beforeunload", function (ev) { saveSettings(); if (saves_in_progress > 0) /* Firefox/IE looks at ev.returnValue; Chrome/Safari looks at the function's return value. Opera either doesn't fire the event at all or behaves as Chrome. */ return ev.returnValue = ("Modified settings are being saved. You may want " + "to wait a few seconds before leaving the page."); }); }); ================================================ FILE: src/resources/confirmmerge.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main td.heading { font-family: serif; font-weight: bold; text-align: right; vertical-align: top } div.main td.value { font-family: monospace; white-space: pre-wrap; vertical-align: top } ================================================ FILE: src/resources/confirmmerge.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ overrideShowSquashedDiff = function (from_sha1) { location.href = "/confirmmerge?id=" + confirmation_id + "&tail=" + from_sha1; } $(document).ready(function () { $("button").button(); $("button.confirmAll").click(function (ev) { var tail = ""; if (typeof tail_sha1 == "string") tail = "&tail=" + tail_sha1; location.href = "/confirmmerge?id=" + confirmation_id + "&confirm=yes" + tail; }); $("button.confirmNone").click(function (ev) { location.href = "/confirmmerge?id=" + confirmation_id + "&confirm=yes&tail=" + merge_sha1; }); $("button.cancel").click(function (ev) { location.href = "/confirmmerge?id=" + confirmation_id + "&cancel=yes"; }); if (confirmed) { var content = $("<div title='Merge Confirmed'><p>Please repeat the 'git push' command that failed and redirected you here. It will now allow this merge commit, and the additional commits it contributes listed on this page, to be added to the review.</p></div>"); content.dialog({ width: 600, height: 225, modal: true, buttons: { OK: function () { content.dialog("close"); }}}); } }); ================================================ FILE: src/resources/createreview.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } input.submit { font-size: 20px; background-color: lime } div.main table.basic tr.line td { padding-top: 0.5em } div.main table.basic td.heading { font-family: serif; font-weight: bold; text-align: right; width: 20% } div.main table.basic tr.line.description td.heading, div.main table.basic tr.line.recipients td.heading { vertical-align: top; } div.main table.basic td.value { font-family: monospace; } div.main table.basic td.value input, div.main table.basic td.value textarea { font: 10pt monospace; width: 90%; } #branch_name { /* subtract width of "r/" prefix */ width: calc(90% - 1.34em); } div.main table.basic td.value span.mode { font-style: italic } div.main table.basic td.value div.buttons { float: right } div.main table.basic td.status { text-align: right; width: 20%; font-weight: bold } div.main table.basic td.help { font-style: italic; border-bottom: 1px solid #cca; padding-left: 30% } div.main table.filters tr.applyfilters td.value { padding-top: 0.5em; text-align: right } div.main table.filters tr.applyfilters td.legend { padding-top: 0.5em; font-weight: bold } div.main table.filters tr.reviewers td { padding-top: 0.5em } div.main table.filters tr.reviewers td.reviewers { font-weight: bold; text-align: right; width: 30% } div.main table.filters tr.reviewers td.files { font-family: monospace; width: 70% } div.main table.filters tr.reviewers td.files span.file { white-space: pre } div.main table.filters tr.reviewers td.no-one { text-align: right; color: red; font-weight: bold } div.main table.filters tr.watchers td.spacer { padding-top: 0; border-bottom: 1px solid #cca } div.main table.filters tr.watchers td.heading { padding-top: 0.5em; text-align: right; font-weight: bold } div.main table.filters tr.watchers td.watchers { padding-top: 0.5em; } div.main table.filters tr.buttons td.spacer { padding-top: 0; border-bottom: 1px solid #cca } div.main table.filters tr.buttons td.buttons { padding-top: 0.5em; text-align: right } div.comment > div.text { font-family: monospace; font-size: 11px; background-color: #fff; border: 1px solid #bbb; margin-top: 3px; padding: 5px; white-space: pre-wrap } div.comment > div.text > textarea { font-family: monospace; font-size: 11px; background-color: #fff; border: none; width: 100%; resize: none } div#recipients table { width: 100% } div#recipients .key { font-weight: bold } div#recipients select { width: 100% } div#recipients input { font-family: monospace; width: 100% } p.remotehost { margin-top: 0; margin-bottom: 3px } p.remotepath { margin-top: 3px; margin-bottom: 0 } code.inset > a { color: #222 } code.inset > a:hover { color: blue; text-decoration: underline } select.repository-select, input.remote, input.workbranch, input.upstreamcommit { width: 40em } input.remote, input.workbranch, input.upstreamcommit { font-family: monospace } ================================================ FILE: src/resources/createreview.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil -*- */ var reviewfilters = {}; var recipients_mode = "opt-out", recipients_included = {}, recipients_excluded = {}; function splitReviewFilters(reviewfilters) { var result = []; for (var key in reviewfilters) { var data = JSON.parse(key); data.type = reviewfilters[key]; result.push(data); } return result; } function submitReview() { var branch_name = document.getElementById("branch_name"); var summary = document.getElementById("summary"); var description = document.getElementById("description").value.trim(); if (invalid_branch_name && branch_name.value == invalid_branch_name) { alert("You need to edit the branch name, lazy!"); branch_name.focus(); return; } if (branch_name.value.length <= 4) { alert("A branch name that short is not a good review identifier. Please elaborate a little bit."); branch_name.focus(); return; } if (summary.value.length <= 8) { alert("A summary that short is not very meaningful. Please elaborate a little bit."); summary.focus(); return; } var data = { repository_id: repository.id, commit_ids: review_data.commit_ids, branch: "r/" + branch_name.value, summary: summary.value.trim(), reviewfilters: splitReviewFilters(reviewfilters), recipientfilters: { mode: recipients_mode, included: Object.keys(recipients_included), excluded: Object.keys(recipients_excluded) }, applyfilters: $("input.applyfilters:checked").size() != 0, applyparentfilters: $("input.applyparentfilters:checked").size() != 0 }; if (description) data.description = description; if (typeof fromBranch == "string") data.frombranch = fromBranch; if (typeof trackedbranch == "object") data.trackedbranch = trackedbranch; var operation = new Operation({ action: "create review", url: "submitreview", data: data }); var result = operation.execute(); if (result) { if (result.extensions_output) showMessage("Review Created", "Extension Output", "<pre>" + htmlify(result.extensions_output) + "</pre>", function () { location.href = "/r/" + result.review_id; }); else location.href = "/r/" + result.review_id; } } function updateReviewersAndWatchers(new_reviewfilters) { var success = false; if (!new_reviewfilters) new_reviewfilters = reviewfilters; var data = { repository_id: repository.id, commit_ids: review_data.commit_ids, reviewfilters: splitReviewFilters(new_reviewfilters), applyfilters: $("input.applyfilters:checked").size() != 0, applyparentfilters: $("input.applyparentfilters:checked").size() != 0 }; var operation = new Operation({ action: "update filters", url: "reviewersandwatchers", data: data }); var result = operation.execute(); if (result) { $("table.filters").replaceWith(result.html); $("table.filters").find("button").button(); connectApplyFilters(); reviewfilters = new_reviewfilters; return true; } else return false; } function updateFilters(filter_type) { function addFilters(names, path) { new_reviewfilters = {}; for (var key in reviewfilters) new_reviewfilters[key] = reviewfilters[key]; names.forEach( function (name) { var key = JSON.stringify({ username: name, path: path }); new_reviewfilters[key] = filter_type; }); return updateReviewersAndWatchers(new_reviewfilters); } addReviewFiltersDialog({ filter_type: filter_type, callback: addFilters, reload_page: false }); } function addReviewer() { updateFilters("reviewer"); } function addWatcher() { updateFilters("watcher"); } function editRecipientList() { var recipient_list_dialog = $("<div id='recipients' title='Edit Recipient List'>" + "<p>The recipient list determines the list of users that receive " + "e-mails about various updates to the review. The recipient " + "list is constructed from the list of users associated with the " + "review (reviewers and watchers) either in an opt-in or opt-out " + "fashion. The default is opt-out, meaning all associated users " + "receive e-mails unless they specifically ask not to. By " + "choosing opt-in mode, the review owner can restrict the list " + "of recipients.</p>" + "<p>Note: the review owner (you) is always included in the " + "recipient list.</p>" + "<table>" + "<tr><td class=key>Mode:</td><td class=value>" + "<select id='mode'>" + "<option value='opt-out'>Opt-out (all users not specified below receive e-mails)</option>" + "<option value='opt-in'>Opt-in (only users specified below receive e-mails)</option>" + "</select>" + "</td></tr>" + "<tr><td class=key>Users:</td><td class=value>" + "<input id='users'>" + "</td></tr>" + "</table>" + "</div>"); if (recipients_mode == "opt-out") names = Object.keys(recipients_excluded); else names = Object.keys(recipients_included); recipient_list_dialog.find("#mode").val(recipients_mode); recipient_list_dialog.find("#users").val(names.join(", ")); function save() { recipients_mode = recipient_list_dialog.find("#mode").val(); recipients_included = {}; recipients_excluded = {}; var users = recipient_list_dialog.find("#users").val().split(/[\s,]+/g); for (var index = 0; index < users.length; ++index) { var name = users[index]; if (name) if (recipients_mode == "opt-in") recipients_included[name] = true; else recipients_excluded[name] = true; } var mode; if (recipients_mode == "opt-in") if (Object.keys(recipients_included).length != 0) { mode = "No-one except "; users = Object.keys(recipients_included); } else mode = "No-one at all"; else if (Object.keys(recipients_excluded).length != 0) { mode = "Everyone except "; users = Object.keys(recipients_excluded); } else mode = "Everyone"; $("span.mode").text(mode); if (users) $("span.users").text(users.join(", ")); recipient_list_dialog.dialog("close"); } function cancel() { recipient_list_dialog.dialog("close"); } function handleKeypress(ev) { if (ev.keyCode == 13) save(); } recipient_list_dialog.find("#users").keypress(handleKeypress); recipient_list_dialog.dialog({ width: 620, modal: true, buttons: { Save: save, Cancel: cancel }}); function enableAutoCompletion(result) { recipient_list_dialog.find("#users").autocomplete( { source: AutoCompleteUsers(result.users) }); } var operation = new Operation({ action: "get auto-complete data", url: "getautocompletedata", data: { values: ["users"] }, callback: enableAutoCompletion }); operation.execute(); } function connectApplyFilters() { $("tr.applyfilters").click(function (ev) { if (ev.target.nodeName.toLowerCase() != "input") { var checkbox = $(ev.currentTarget).find("input"); checkbox.get(0).checked = !checkbox.get(0).checked; updateReviewersAndWatchers(); } }); $("tr.applyfilters input").click(function (ev) { updateReviewersAndWatchers(); }); } $(document).ready(function () { connectApplyFilters(); $(".repository-select") .change( function () { var name = $(this).val(); if (default_remotes[name]) $("input.remote").val(default_remotes[name]); if (default_branches[name]) $("input.upstreamcommit").val(default_branches[name] ? "refs/heads/" + default_branches[name] : ""); }) .chosen({ inherit_select_classes: true }); function getCurrentRemote() { var remote = $("input.remote").val(); if (!remote) return undefined; return remote.trim(); } var input_workbranch = $("input.workbranch"); input_workbranch.autocomplete({ source: AutoCompleteRef(getCurrentRemote, "refs/heads/"), html: true }); input_workbranch.keypress( function (ev) { if (ev.keyCode == 13) $("button.fetchbranch").click(); }); var input_upstreamcommit = $("input.upstreamcommit"); input_upstreamcommit.autocomplete({ source: AutoCompleteRef(), html: true }); $("button.fetchbranch").click( function () { var branch = $("input.workbranch").val().trim(); var upstream = $("input.upstreamcommit").val().trim(); if (!branch) { showMessage("Invalid input!", "Invalid input!", "Please provide a non-empty branch name."); return; } if (!upstream) { showMessage("Invalid input!", "Invalid input!", "Please provide a non-empty upstream commit reference."); return; } function finish(result) { if (result) location.href = ("/createreview" + "?repository=" + encodeURIComponent($("select.repository-select").val().trim()) + "&commits=" + encodeURIComponent(result.commit_ids) + "&remote=" + encodeURIComponent(getCurrentRemote()) + "&branch=" + encodeURIComponent(branch) + "&upstream=" + encodeURIComponent(upstream) + "&reviewbranchname=" + encodeURIComponent(branch)); } var operation = new Operation({ action: "fetch remote branch", url: "fetchremotebranch", data: { repository_name: $("select.repository-select").val().trim(), remote: getCurrentRemote(), branch: branch, upstream: upstream }, wait: "Fetching branch...", callback: finish }); operation.execute(); }); if (getCurrentRemote()) input_workbranch.focus(); else $("input.remote").focus(); }); ================================================ FILE: src/resources/createuser.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2014 the Critic contributors, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.createuser { margin-top: 1em; padding: 1em; border: 1px solid #cca; border-radius: 2px; background-color: #fffff4 } table.createuser tr.header td { text-align: left; padding-left: 20%; padding-bottom: 1rem; font-family: sans-serif; font-size: 120%; text-decoration: underline } table.createuser td.value a.external { font-family: monospace; background-color: white; border: 1px solid #cca; padding: 2px 6px } table.createuser tr.button td { padding-top: 1rem } table.createuser tr.separator1 td { border-bottom: 1px solid #cca; padding-top: 1em } table.createuser tr.separator2 td { padding-top: 1em } table.createuser tr.status td { color: red; font-weight: bold; text-align: center; padding: 0 0 1em 1em; } table.createuser tr.status td > .message { max-width: 40em; text-align: left } table.createuser tr.status.disabled { display: none } table.createuser td.key { padding-left: 2em; padding-right: 0.5em; font-weight: bold; text-align: right } table.createuser td.value { font-family: monospace; padding-left: 0.5em; padding-right: 2em; text-align: left } table.createuser td.value input { font-family: monospace } table.createuser tr.button td { text-align: center } ================================================ FILE: src/resources/createuser.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2014 the Critic contributors, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function createUser() { var data = { username: $("#newusername").val().trim(), fullname: $("#fullname").val().trim(), email: $("#email").val().trim() }; if (external) { data.external = external; } else { var password1 = $("#password1").val(); var password2 = $("#password2").val(); if (password1 != password2) { showMessage("Invalid input", "Password mismatch!", "The password must be input twice."); return; } data["password"] = password1; } var operation = new Operation({ action: "create user", url: "registeruser", data: data }); var result = operation.execute(); if (result) { if (result.message) { $(".status").removeClass("disabled").find(".message").html(result.message); if (result.focus) $(result.focus).select().focus(); } else if (typeof target != "undefined") { location.replace(target); } else { location.replace("/"); } } } $(function () { if ($(".status .message").text().trim()) $(".status").removeClass("disabled"); $(".create").click(createUser); }); ================================================ FILE: src/resources/dashboard.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main table.reviews td.h2 a { font-size: 50%; margin-left: 1em } div.main table.reviews tr.review:hover { background: #eec } div.main table.reviews tr.review td { padding-top: 3px; padding-bottom: 3px } div.main table.reviews tr.review td.name { font-weight: bold } div.main table.reviews tr.review td.lines, div.main table.reviews tr.review td.comments { text-align: right } a.repository, a.branch { color: #222; text-decoration: none } a.branch { white-space: nowrap } ================================================ FILE: src/resources/dashboard.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function markChainsAsRead(review_ids) { var operation = new Operation({ action: "mark comments as read", url: "markchainsasread", data: { review_ids: review_ids }, callback: function (result) { if (result) location.reload(); } }); operation.execute(); } $(document).ready(function () { $("h1[title], h2[title]").tooltip({ items: "h1[title], h2[title]" }); $("div.main").sortable({ handle: "td.h1", stop: function () { if (typeof history.replaceState == "function") { var items = []; $("div.main > table.reviews").each(function (index, element) { items.push(element.id); }); var href = location.href.replace(/(?:([?&]show=)[^&#]+|$)/, function (all, group1) { return (group1 ? group1 : "?show=") + items; }); history.replaceState(null, document.title, href); } } }); }); ================================================ FILE: src/resources/diff.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* The coloring of diffs (deleted => red, inserted => green, et.c.) Basic line HTML: <tr class="line [type]"> ... <td class="line old"> [old code] </td> ... <td class="line new"> [new code] </td> ... </tr> The [type] is one of the following: context => a line that wasn't changed whitespace => line with only white-space changes replaced => changed line where old and new version has little in common modified => changed line with inter-line diff inserted => line that only exists in new version ([old code] is empty) deleted => line that only exists in old version ([new code] is empty) */ /* White background color for context lines, the left side where the right side is an inserted line and the right side where the left side is a deleted line; and slightly darker white when hovered. */ tr.line.context > td.line, tr.line.inserted > td.line.old, tr.line.deleted > td.line.new { background-color: #fff } tr.line.context:hover > td.line { background-color: #eee } /* Red background color on the left side of deleted or replaced lines; and slightly darker red when hovered. */ tr.line.deleted > td.line.old, tr.line.replaced > td.line.old { background-color: #fdd } tr.line.deleted:hover > td.line.old, tr.line.replaced:hover > td.line.old { background-color: #ecc } /* Green background color on the left side of inserted or replaced lines; and slightly darker green when hovered. */ tr.line.inserted > td.line.new, tr.line.replaced > td.line.new { background-color: #dfd } tr.line.inserted:hover > td.line.new, tr.line.replaced:hover > td.line.new { background-color: #cec } /* Yellowish background color on both sides of modified lines; and slightly darker yellowish when hovered. */ tr.line.modified > td.line { background-color: #ffffe6 } tr.line.modified:hover > td.line { background-color: #eec } /* White background color for white-space change lines and slightly darker white when hovered. This overrides the 'modified' style which otherwise also applies to white-space lines. */ tr.line.whitespace > td.line { background-color: #fff } tr.line.whitespace:hover > td.line { background-color: #eee } /* Blue background color on left side of conflict markers; and slightly darker blue when hovered. */ tr.line.conflict > td.line.old { background-color: #8af !important } tr.line.conflict:hover > td.line.old { background-color: #79e !important } /* Inter-line diff highlight is done using i (italics) elements, with the classes: r => replaced (both sides) d => deleted (left side only) i => inserted (right side only) Additionally, to represent changes in line ending on last line: eol => marker element signaling missing line break */ /* We don't actually want italics: */ tr.line i { font-style: normal } /* Slightly darker color for replaced/edited portions of the line (and slightly darker still when hovered): */ tr.line td.old i.r, tr.line.modified i.d { background-color: #fdd } tr.line:hover td.old i.r, tr.line.modified:hover i.d { background-color: #ecc } tr.line td.new i.r, tr.line.modified i.i { background-color: #dfd } tr.line:hover td.new i.r, tr.line.modified:hover i.i { background-color: #cec } /* On line with white-space changes, render deleted white-space in red (the line will otherwise be rendered as a context line.) */ tr.line.whitespace i.d { background-color: #fdd } tr.line.whitespace:hover i.d { background-color: #ecc } /* On line with white-space changes, render inserted white-space in green (the line will otherwise be rendered as a context line.) */ tr.line.whitespace i.i { background-color: #dfd } tr.line.whitespace:hover i.i { background-color: #cec } /* This element will contain the text "[missing linebreak]" and be added last on the last line of the file if it was changed and is missing a linebreak. */ tr.line i.eol { font-weight: bold; float: right; padding-right: 1em } ================================================ FILE: src/resources/editresource.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.paleyellow tr.select td.heading { font-family: serif; font-weight: bold; text-align: right; vertical-align: top } div.main table.paleyellow tr.select td.value { vertical-align: top } div.main table.paleyellow tr.help td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } div.main table.paleyellow tr.value td.value { padding-top: 1em; padding-bottom: 1em } div.main table.paleyellow tr.value td.value textarea { width: 100% } ================================================ FILE: src/resources/editresource.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function saveResource() { var source = $("textarea").first().val(); var operation = new Operation({ action: "save resource", url: "storeresource", data: { name: resource_name, source: source }, wait: "Saving changes..." }); if (operation.execute()) original_source = source; } function resetResource() { function proceed() { var operation = new Operation({ action: "reset resource", url: "resetresource", data: { name: resource_name }}); return operation.execute() != null; } var content = $("<div title='Confirm'><p><b>Are you sure you want to stop using your edited resource?</b></p><p>Note that you will be able to switch back to your current edited version later on, unless you save another edited version.</p></div>"); content.dialog({ modal: true, width: 600, buttons: { "Reset to built-in version": function () { if (proceed()) { content.dialog("close"); location.reload(); }}, "Keep edited version": function () { content.dialog("close"); }}}); } function restoreResource() { var operation = new Operation({ action: "restore resource", url: "restoreresource", data: { name: resource_name }}); if (operation.execute()) location.reload(); } function switchResource(name) { if (name && name != resource_name) { function switchNow() { location.replace("/editresource?name=" + name); } $("select").val(resource_name); if (resource_name && $("textarea").val() != original_source) { var content = $("<div title='Save First?'><p>You have edited this resource. Do you want to save it before selecting another resource?</p></div>"); content.dialog({ modal: true, width: 600, buttons: { "Save and switch": function () { if (saveResource()) { content.dialog("close"); switchNow(); } }, "Don't switch": function () { content.dialog("close"); }, "Switch without saving": function () { content.dialog("close"); switchNow(); }}}); } else switchNow(); } else $("select").val(resource_name); } $(document).ready(function () { $("tr.select td.value select").change(function (ev) { switchResource(ev.target.value); }); $("button.save").click(saveResource); $("button.reset").click(resetResource); $("button.restore").click(restoreResource); }); ================================================ FILE: src/resources/filterchanges.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.filter { table-layout: fixed } div.main table.filter tr.header td.button { vertical-align: top } div.main table.filter tr.footer td.spacer { padding-top: 1em } div.main table.filter tr.footer td.button { border-top: 1px solid #cca } div.main table.filter tr.footer td { padding-top: 1em } div.main table.filter td.button { text-align: right } div.main table.filter tr.headings td { font-weight: bold } div.main table.filter tr.headings td { padding-top: 1em } div.main table.filter tr td.select { text-align: right; padding-right: 1em } div.main table.filter tr td.path { white-space: pre; font-family: monospace } div.main table.filter tr td.right { text-align: right } div.main table.filter tr td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } ================================================ FILE: src/resources/filterchanges.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function checkDirectory(line) { var level = parseInt(line.getAttribute("critic-level")); var all_checked = true; var dirline = line; $(line).nextAll("tr").each(function (index, line) { if (parseInt(line.getAttribute("critic-level")) <= level) return false; if (line.className == "file") $(line).find("input").each(function () { if (!this.checked) all_checked = false; }); return all_checked; }); $(line).find("input").each(function () { this.checked = all_checked; }); } function checkDirectories() { $("tr.directory").each(function (index, line) { checkDirectory(line); }); } $(document).ready(function () { $("tr.directory").click(function (ev) { var line = $(ev.currentTarget); var level = parseInt(line.attr("critic-level")); var checkbox = line.find("input"); var on; if (ev.target.nodeName.toLowerCase() != "input") { checkbox.each(function () { on = this.checked = !this.checked; }); ev.preventDefault(); } else checkbox.each(function () { on = this.checked; }); line.nextAll("tr").each(function (index, line) { if (parseInt(line.getAttribute("critic-level")) <= level) return false; $(line).find("input").each(function () { this.checked = on; }); }); line.prevAll("tr.directory").each(function (index, line) { var line_level = parseInt(this.getAttribute("critic-level")); if (line_level < level) { if (!ev.currentTarget.checked) $(line).find("input").each(function () { this.checked = false; }); else checkDirectory(line); level = line_level; } }); }); $("tr.file").click(function (ev) { var line = $(ev.currentTarget); var level = parseInt(line.attr("critic-level")); if (ev.target.nodeName.toLowerCase() != "input") { $(ev.currentTarget).find("input").each(function () { this.checked = !this.checked; }); ev.preventDefault(); } line.prevAll("tr.directory").each(function (index, line) { var line_level = parseInt(this.getAttribute("critic-level")); if (line_level < level) { if (!ev.currentTarget.checked) $(line).find("input").each(function () { this.checked = false; }); else checkDirectory(line); level = line_level; } }); }); $("button.display").click(function () { var files = []; $("tr.file").each(function (index, line) { var selected; $(line).find("input").each(function () { selected = this.checked; }); if (selected) files.push(line.getAttribute("critic-file-id")); }); if (files.length != 0) if (commitRange) location.href = "/showcommit?review=" + review.id + "&first=" + commitRange.first + "&last=" + commitRange.last + "&filter=files&file=" + files.join(","); else location.href = "/showcommit?review=" + review.id + "&filter=files&file=" + files.join(","); else alert("No files selected!"); }); }); keyboardShortcutHandlers.push(function (key) { if (key == "g".charCodeAt(0)) { $("button.display").click(); return true; } else if (key == "a".charCodeAt(0)) { $("tr.directory[critic-level=-1]").click(); return true; } }); ================================================ FILE: src/resources/home.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main table td.repositories { text-align: right; border-bottom: 1px solid #cca } div.main table.basic tr.line td { padding-top: 0.5em; vertical-align: baseline } div.main table.basic td.heading { font-family: serif; font-weight: bold; text-align: right; width: 20% } div.main table.basic td.value { font-family: monospace; } div.main table.basic td.value input.value { font-family: monospace; width: 32em } div.main table.basic td.value .buttons { float: right } div.main table.basic td.value .status { padding-left: 1em; font-weight: bold } div.main table.basic td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } div.main table.basic tr.email td.value.multiple { vertical-align: top } div.main table.basic td.value .addresses { display: inline-block } div.main table.basic td.value .addresses label { margin: 2px 0 } .addresses .address input { margin: 0 0.5rem 0 0 } .addresses .address.selected .value { font-weight: bold } .addresses .address:first-child { margin-top: 0 } .addresses .address:last-child { margin-bottom: 0 } .addresses .actions { padding-left: 1rem; margin-left: auto } .addresses .actions .action { padding-left: 0.4rem; text-decoration: none; color: black } .addresses .actions a.action:hover { text-decoration: underline } .addresses .actions .verified { color: green } .addresses .actions .verified.now { font-weight: bold } .addresses .actions .unverified { color: red } div.main table.basic td.value a.external { background-color: white; border: 1px solid #cca; padding: 2px 6px } table.filters { width: 90%; margin-top: 1rem; } table.filters tbody th { padding-top: 1em; } table.filters tbody:first-child th { padding-top: 0; } table.filters td.path, table.filters td.delegates, table.filters td.files { font-family: monospace; } table.filters td.files, table.filters td.links { text-align: right; } table.filters td.path { width: 25%; } table.filters td.delegates { width: 35%; } table.filters td.title { width: 20%; } table.filters td.data { width: 20%; font-family: monospace } table.filters td.files { width: 10%; } table.filters td.links { width: 25%; } table.filters td.links a { margin-left: 0.5em; font-weight: bold; } table.filters a { color: #222; text-decoration: none } table.filters a:hover { text-decoration: underline } div.password input { width: 100%; } div.filterdialog select { width: 100% } div.filterdialog input[type='text'] { font-family: monospace; width: 100% } div.filterdialog span.matchedfiles { float: right; font-style: italic } div.filterdialog span.matchedfiles.clickable:hover { text-decoration: underline } div.reapplyresult th { text-align: left } div.reapplyresult td.id { text-align: right; padding-right: 0.5em; font-family: monospace } div.reapplyresult td.summary { font-family: monospace; background-color: #fffff4; padding: 3px 1em 3px 1em; border-left: 1px dotted #cca; border-bottom: 1px dotted #cca; border-right: 1px dotted #cca } div.reapplyresult tr.first td.summary { border-top: 1px dotted #cca } div.matchedfiles select { width: 99% } div.unverified-dialog .address { font-family: monospace } div.add-email-dialog input { width: 100%; font-family: monospace; } ================================================ FILE: src/resources/home.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function hasClass(element, cls) { return new RegExp("(^|\\s)" + cls + "($|\\s)").test(element.className) } function addClass(element, cls) { if (!hasClass(element, cls)) element.className += " " + cls; } function removeClass(element, cls) { if (hasClass(element, cls)) element.className = element.className.replace(new RegExp("(^|\\s+)" + cls + "($|(?=\\s))"), ""); } function saveFullname() { var input = $("#user_fullname"); var status = $("#status_fullname"); var value = input.val().trim(); if (value == user.displayName) status.text("Value not changed"); else if (!value) status.text("Empty name not saved"); else { var operation = new Operation({ action: "save changes", url: "setfullname", data: { user_id: user.id, value: value }}); if (operation.execute()) { status.text("Value saved"); user.displayName = value; } } } function resetFullname() { var input = $("#user_fullname"); var status = $("#status_fullname"); input.val(user.displayName); status.text(""); } function saveGitEmails() { var input = $("#user_gitemails"); var status = $("#status_gitemails"); var value = input.val().trim(); if (value == user.gitEmails) status.text("Value not changed"); else if (!value) status.text("Empty name not saved"); else { var operation = new Operation({ action: "save changes", url: "setgitemails", data: { subject_id: user.id, value: value.split(/,\s*|\s+/g) }}); if (operation.execute()) { status.text("Value saved"); user.gitEmails = value; } } } function resetGitEmails() { var input = $("#user_gitemails"); var status = $("#status_gitemails"); input.val(user.gitEmails); status.text(""); } function setPassword() { var dialog = $("<div class=password title='Set password'>" + "<p><b>New password:</b><br>" + "<input id=newpw1 type=password>" + "</p>" + "<p><b>New password, again:</b><br>" + "<input id=newpw2 type=password>" + "</p>" + "</div>"); function save() { var newpw1 = $("#newpw1").val(); var newpw2 = $("#newpw2").val(); if (newpw1 != newpw2) { showMessage("Invalid input", "New password mismatch!", "The new password must be input twice."); return; } var operation = new Operation({ action: "set password", url: "changepassword", data: { subject: user.id, new_pw: newpw1 }}); if (operation.execute()) { dialog.dialog("close"); showMessage("Success", "Password set!", null, function () { location.reload(); }); } } function cancel() { dialog.dialog("close"); } dialog.find("input").keypress( function (ev) { if (ev.keyCode == 13) { if ($("#newpw1").is(":focus")) $("#newpw2").focus(); else if ($("#newpw2").is(":focus")) save(); } }); dialog.dialog({ width: 400, modal: true, buttons: { "Save": save, "Cancel": cancel }}); $("newpw1").focus(); } function changePassword() { var dialog = $("<div class=password title='Change password'>" + "<p><b>Current password:</b><br>" + "<input id=currentpw type=password>" + "</p>" + "<p><b>New password:</b><br>" + "<input id=newpw1 type=password>" + "</p>" + "<p><b>New password, again:</b><br>" + "<input id=newpw2 type=password>" + "</p>" + "</div>"); function save() { var currentpw = $("#currentpw").val(); var newpw1 = $("#newpw1").val(); var newpw2 = $("#newpw2").val(); if (newpw1 != newpw2) { showMessage("Invalid input", "New password mismatch!", "The new password must be input twice."); return; } var operation = new Operation({ action: "change password", url: "changepassword", data: { subject: user.id, current_pw: currentpw, new_pw: newpw1 } }); if (operation.execute()) { dialog.dialog("close"); showMessage("Success", "Password changed!"); } } function cancel() { dialog.dialog("close"); } dialog.find("input").keypress( function (ev) { if (ev.keyCode == 13) { if ($("#currentpw").is(":focus")) $("#newpw1").focus(); else if ($("#newpw1").is(":focus")) $("#newpw2").focus(); else if ($("#newpw2").is(":focus")) save(); } }); dialog.dialog({ width: 400, modal: true, buttons: { "Save": save, "Cancel": cancel }}); $("#currentpw").focus(); } function ModificationChecker(current, input, status) { var is_modified_last = false; setInterval( function () { var is_modified_now = input.val() != current(); if (is_modified_last != is_modified_now) { status.text(is_modified_now ? "Modified" : ""); input.nextAll("button").prop("disabled", !is_modified_now); } }, 100); } function showUnverifiedAddressDialog(ev) { ev.preventDefault(); var context = $(this).closest(".address"); var address = context.find(".value").text(); var content = $("<div class='unverified-dialog' title='Unverified email address'>" + "<p>The address <span class='address inset'>" + htmlify(address) + "</span> needs to be verified as valid and in your control. " + "A verification email has been sent to the address already " + "and should arrive shortly.</p>" + "<p>If you suspect it has been lost in transit, you can " + "request another one.</p>" + "</div>"); function sendVerificationEmail() { var operation = new Operation({ action: "request verification email", url: "requestverificationemail", data: { email_id: context.data("email-id") } }); if (operation.execute()) content.dialog("close"); } function close() { content.dialog("close"); } content.dialog({ modal: true, width: 600, buttons: { "Send verification email": sendVerificationEmail, "Close": close } }); } function showDeleteAddressDialog(ev) { ev.preventDefault(); var context = $(this).closest(".address"); var is_current = context.is(".selected"); function deleteAddress(dialog) { var operation = new Operation({ action: "delete address", url: "deleteemailaddress", data: { email_id: context.data("email-id") } }); if (operation.execute()) { if (dialog) dialog.dialog("close"); location.reload(); } } if (is_current) { if (context.closest(".addresses").children(".address").size() > 1) { showMessage("Not allowed", "Will not delete current address", "This email address is your current address. Please select " + "one of the other addresses as your current address before " + "deleting it."); return; } else { var content = $("<div class='delete-current-dialog' title='Delete current address?'>" + "<p>Deleting your current email address means Critic will " + "stop sending emails to you. Are you sure you want that?</p>" + "</div>"); content.dialog({ modal: true, width: 600, buttons: { "Delete address": function () { deleteAddress(content); }, "Do nothing": function () { content.dialog("close"); } } }); } } else { deleteAddress(); } } function showSelectEmailAddressDialog(ev) { var context = $(this).closest(".address"); var is_unverified = context.find(".unverified").size() != 0; function selectAddress(dialog) { var operation = new Operation({ action: "select address", url: "selectemailaddress", data: { email_id: context.data("email-id") } }); if (operation.execute()) { context.closest(".addresses") .find(".address").not(context).removeClass("selected"); context.addClass("selected"); context.find("input").prop("checked", true); if (dialog) dialog.dialog("close"); } } if (is_unverified) { var content = $("<div class='select-unverified-dialog' title='Select unverified address?'>" + "<p>Selecting an unverified email address means Critic will stop " + "sending emails to you until the address has been verified. Are you " + "sure you want that?</p>" + "</div>"); content.dialog({ modal: true, width: 600, buttons: { "Select address": function () { selectAddress(content); }, "Do nothing": function () { $(".address.selected input").prop("checked", true); content.dialog("close"); } } }); ev.preventDefault(); } else { selectAddress(); } } function showAddEmailAddressDialog() { var content = $("<div class='add-email-dialog' title='Add primary address'>" + "<p>Add a primary email address. You can have several addresses registered, " + "but emails will only be sent to the one that is selected.</p>" + "</div>"); if (verifyEmailAddresses) { content.append("<p>Note that a verification email will be sent to the added " + "email address, containing a link that must be followed before " + "Critic will send any other emails to the address.</p>"); } content.append("<p><b>Email address:</b><br><input placeholder='user@domain'></p>"); function isValidAddress() { var address = content.find("input").val().trim(); return /^[^@]+@[^.]+(?:\.[^.]+)*$/.test(address); } function addAddress() { if (!isValidAddress()) { showMessage("Invalid email address", "Invalid email address", "That does not look like a valid email address. " + "Please try again."); } else { var operation = new Operation({ action: "add email address", url: "addemailaddress", data: { subject_id: user.id, email: content.find("input").val().trim() } }); if (operation.execute()) { content.dialog("close"); location.reload(); } } } content.dialog({ modal: true, width: 600, buttons: { "Add address": function () { addAddress(); }, "Do nothing": function () { content.dialog("close"); } } }); content.find("input").keypress(function (ev) { if (ev.keyCode == 13 && isValidAddress()) addAddress(); }); } $(function () { var fullname_input = $("#user_fullname"); var fullname_status = $("#status_fullname"); if (fullname_input.size() && fullname_status.size()) new ModificationChecker(function () { return user.displayName; }, fullname_input, fullname_status); $(".unverified").click(showUnverifiedAddressDialog); $(".delete").click(showDeleteAddressDialog); $(".address input").click(showSelectEmailAddressDialog); $(".addemail").click(showAddEmailAddressDialog); if (/^\?email_verified=\d+/.test(location.search)) { if (typeof history.replaceState == "function") { var new_url = "/home"; var match = /&(.+)$/.exec(location.search); if (match) new_url += "?" + match[1]; history.replaceState(null, document.title, new_url); } } var gitemails_input = $("#user_gitemails"); var gitemails_status = $("#status_gitemails"); if (gitemails_input.size() && gitemails_status.size()) new ModificationChecker(function () { return user.gitEmails; }, gitemails_input, gitemails_status); }); function deleteFilterById(filter_id) { var operation = new Operation({ action: "delete filter", url: "deletefilter", data: { filter_id: filter_id }}); return !!operation.execute(); } function deleteExtensionHookFilterById(filter_id) { var operation = new Operation({ action: "delete filter", url: "deleteextensionhookfilter", data: { subject_id: user.id, filter_id: filter_id }}); return !!operation.execute(); } function editFilter(repository_name, filter_id, filter_type, filter_path, filter_data) { function getPaths(prefix, callback) { var repository_name = repository.val(); if (repository_name) { var operation = new Operation({ action: "fetch path suggestions", url: "getrepositorypaths", data: { prefix: prefix, repository_name: repository_name }, callback: function (result) { if (result) callback(result.paths, true); }}); operation.execute(); return operation; } else return null; } if (typeof no_repositories != "undefined") { /* There are no repositories. */ showMessage("Impossible!", "No repositories", ("There are no repositories in this Critic system, and it is " + "consequently impossible to create filters. You might want to " + "<a href=/newrepository>add a repository</a>.")); return; } var dialog = $("div.hidden > div.filterdialog").clone(); dialog.addClass("active"); if (filter_id) dialog.attr("title", "Edit Filter"); else dialog.attr("title", "Add Filter"); var repository = dialog.find("select[name='repository']"); var type = dialog.find("select[name='type']"); var path = dialog.find("input[name='path']"); var matchedfiles = dialog.find("span.matchedfiles"); var delegates = dialog.find("input[name='delegates']"); var apply = dialog.find("input[name='apply']"); var matchedfiles_repository = null; var matchedfiles_path = null; var matchedfiles_error = null; matchedfiles.click( function () { if (matchedfiles_error) showMessage("Error", "Invalid pattern!", matchedfiles_error); else if (matchedfiles_repository && matchedfiles_path) showMatchedFiles(matchedfiles_repository, matchedfiles_path); }); function updateMatchedFiles() { if (!repository.val()) return; var repository_value = repository.val(); var path_value = path.val().trim(); function update(result) { if (result) { matchedfiles.text("Matches " + result.count + " file" + (result.count == 1 ? "" : "s")); if (result.count != 0) { matchedfiles.addClass("clickable"); matchedfiles_repository = repository_value; matchedfiles_path = path_value; } else { matchedfiles.removeClass("clickable"); matchedfiles_repository = matchedfiles_path = null; } matchedfiles_error = null; } } function invalid(result) { matchedfiles.text("Invalid pattern!"); matchedfiles.addClass("clickable"); matchedfiles_repository = matchedfiles_path = null; matchedfiles_error = result.message; return true; } if (path_value && path_value != "/") { var operation = new Operation({ action: "count matched files", url: "countmatchedpaths", data: { single: { repository_name: repository.val(), path: path_value }}, callback: update, failure: { invalidpattern: invalid }}); operation.execute(); } else { matchedfiles.text("Matches all files."); matchedfiles.removeClass("clickable"); matchedfiles_repository = matchedfiles_path = null; matchedfiles_error = null; } } if (filter_id !== void 0) { repository.val(repository_name); type.val(filter_type); path.val(filter_path); updateFilterType().val(filter_data); updateMatchedFiles(); } else { type.val("reviewer"); path.val(""); delegates.val(""); delegates.prop("disabled", false); } function updateFilterType() { switch (type.val()) { case "reviewer": dialog.find(".regular").show(); dialog.find(".extensionhook").hide(); delegates.prop("disabled", false); return delegates; case "watcher": case "ignored": dialog.find(".regular").show(); dialog.find(".extensionhook").hide(); delegates.prop("disabled", true); return $(); case "extensionhook": var key = selectedExtensionHookKey(); dialog.find(".regular, .extensionhook").hide(); dialog.find("." + key).show(); return dialog.find("." + key + " input"); } } function selectedExtensionHookKey() { var option = type.find(":selected"); return option.data("extension-id") + "_" + option.data("filterhook-name"); } function saveExtensionHookFilter(path_value) { var option = type.find(":selected"); var data = { subject_id: user.id, extension_id: option.data("extension-id"), repository_name: repository.val(), filterhook_name: option.data("filterhook-name"), path: path_value }; var data_input = dialog.find("." + selectedExtensionHookKey() + " input"); if (data_input.length) data.data = data_input.val(); if (filter_id !== void 0) data.replaced_filter_id = filter_id; var operation = new Operation({ action: "save filter", url: "addextensionhookfilter", data: data }); var result = operation.execute(); if (result) { dialog.dialog("close"); location.reload(); } } function saveFilter() { var type_value = type.val(); var path_value = path.val().trim(); var delegates_value; if (type_value == "reviewer") delegates_value = delegates.val().trim().split(/\s*,\s*|\s+/g); else delegates_value = []; if (!repository.val()) { showMessage("Invalid input", "No repository selected!", "Please select a repository.", function () { repository.focus(); }); return; } if (type_value == "extensionhook") { saveExtensionHookFilter(path_value); return; } var data = { filter_type: type_value, path: path_value, delegates: delegates_value, repository_name: repository.val() }; if (filter_id !== void 0) data.replaced_filter_id = filter_id; var operation = new Operation({ action: "save filter", url: "addfilter", data: data }); var result = operation.execute(); if (result) { var do_apply = apply.is(":checked"); dialog.dialog("close"); if (do_apply) reapplyFilters(result.filter_id, true); else location.reload(); } } function deleteFilter() { if (deleteFilterById(filter_id)) { dialog.dialog("close"); location.reload(); } } function closeDialog() { dialog.dialog("close"); } var buttons = {}; buttons["Save"] = saveFilter; if (filter_id) { buttons["Delete"] = deleteFilter; buttons["Close"] = closeDialog; } else buttons["Cancel"] = closeDialog; dialog.dialog({ width: 600, modal: true, buttons: buttons }); dialog.find(".repository-select").chosen({ inherit_select_classes: true }); dialog.find("select[name='type']").chosen({ disable_search: true }); if (!repository.val()) repository.focus(); else path.focus(); function handleKeypress(ev) { if (ev.keyCode == 13) saveFilter(); } path.keypress(handleKeypress); delegates.keypress(handleKeypress); path.change(updateMatchedFiles); type.change(updateFilterType); path.autocomplete({ source: AutoCompletePath(getPaths), html: true }); } function showMatchedFiles(repository_name, path) { function finished(result) { if (result) { var options = []; for (var index = 0; index < result.paths.length; ++index) { options.push("<option>" + htmlify(result.paths[index]) + "</option>"); } var dialog = $("<div class=matchedfiles><select multiple>" + options.join("") + "</select></div>"); dialog.attr("title", options.length + " file" + (options.length != 1 ? "s" : "") + " matched by " + path); dialog.find("select").attr("size", Math.min(20, options.length)); dialog.dialog({ width: 600, buttons: { "Close": function () { dialog.dialog("close"); }}}); } } var operation = new Operation({ action: "fetch matched paths", url: "getmatchedpaths", data: { repository_name: repository_name, path: path, user_id: user.id }, wait: "Fetching matched paths...", cancelable: true, callback: finished }); operation.execute(); } function reapplyFilters(filter_id, reload_when_finished) { function finished(result) { if (result && filter_id === void 0) { var changes, first; if (result.assigned_reviews.length == 0 && result.watched_reviews.length == 0) changes = "<tr><th colspan=2>No changes.</th></tr>"; else { changes = ""; if (result.assigned_reviews.length > 0) { changes += "<tr><th colspan=2>Reviews with new changes to review:</th></tr>"; first = " class=first"; result.assigned_reviews.forEach( function (review_id) { changes += "<tr" + first + "><td class=id><a href=/r/" + review_id + ">r/" + review_id + "</a></td><td class=summary>" + htmlify(result.summaries[review_id]) + "</td></tr>"; first = ""; }); } if (result.watched_reviews.length > 0) { changes += "<tr><th colspan=2>New watched reviews:</th></tr>"; first = " class=first"; result.watched_reviews.forEach( function (review_id) { changes += "<tr" + first + "><td><a href=/r/" + review_id + ">r/" + review_id + "</a></td><td>" + htmlify(result.summaries[review_id]) + "</td></tr>"; first = ""; }); } } var dialog = $("<div class=reapplyresult>" + "<h1>Result:</h1>" + "<table>" + changes + "</table>" + "</div>"); dialog.dialog({ width: 800, buttons: { Close: function () { dialog.dialog("close"); }}, close: function () { if (reload_when_finished) location.reload(); }}); } else if (reload_when_finished) location.reload(); } var operation = new Operation({ action: "reapply filters", url: "reapplyfilters", data: { filter_id: filter_id }, wait: "Please wait...", callback: finished }); operation.execute(); } function countMatchedFiles() { if (typeof count_matched_files == "undefined" || count_matched_files.length == 0) return; var item = count_matched_files.shift(); function update(result) { if (result) { result.filters.forEach( function (filter) { var link = $("#f" + filter.id); if (filter.count) link.text(filter.count + " file" + (filter.count == 1 ? "" : "s")); else link.replaceWith("no files") }); countMatchedFiles(); } } var operation = new Operation({ action: "count matched files", url: "countmatchedpaths", data: { multiple: item, user_id: user.id }, callback: update }); operation.execute(); } $(function () { countMatchedFiles(); }); ================================================ FILE: src/resources/log.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.log { background: #ffffe6; table-layout: fixed } div.main > table.log, tbody.content table.log { padding: 1em; margin-top: 1em; border: 1px solid #cca; border-radius: 4px } tbody.content table.log { width: 70% } div.main > table.log > thead > tr.headings > td, tbody.content table.log > thead > tr.headings > td { padding-top: 0.5em } table.log table.log { width: 98% } table.log tr.title td { padding-left: 1em } table.log tr.title td { border-bottom: 1px solid #cca } table.log tr.title td h1 { margin-top: 0; margin-bottom: 0.5em } table.log tr.headings td { padding-left: 1em; font-weight: bold; text-decoration: underline } table.log tr.headings td.when { text-align: right; padding-right: 1em } table.log tr.headings td.type { text-decoration: none } table.log tr.headings td.author { text-align: right } table.log tr.commit:hover { background: #eec } table.log tr.commit td { padding-left: 1em; padding-top: 3px; padding-bottom: 3px } table.log tr.commit td.when { text-align: right; font-weight: bold; padding-right: 1em } table.log tr.commit td.type { font-weight: bold } table.log tr.commit td.summary { font-family: monospace; font-size: 10pt; background-color: #fffff4; border-left: 1px dotted #cca; border-bottom: 1px dotted #cca; border-right: 1px dotted #cca; white-space: nowrap; overflow: hidden } table.log tr.commit:hover td.summary { background-color: #eed } table.log tr.commit.highlight td.summary { background-color: #dfd } table.log tr.commit td.summary.selected { background-color: #eed } table.log tr.commit:first-child td.summary { border-top: 1px dotted #cca } table.log tr.commit td.author { text-align: right; font-weight: bold } table.log tr.commit.selected { background: #ddb } table.log tr.commit td.summary .nocomment, table.log tr.commit td.summary .rebase { font-style: italic; text-decoration: none } table.log > thead > tr.basemerge > td, table.log > thead > tr.rebase > td { padding-top: 0.5em; padding-bottom: 0.5em; font-weight: bold } table.log > thead > tr.upstream > td { padding-top: 0.5em; font-weight: bold } table.log > thead > tr.error > td { padding-top: 0.5em; padding-bottom: 0.5em; font-weight: bold } table.log > tfoot > tr > td { text-align: right; border-top: 1px solid #cca; padding-top: 0.5em } table.log.collapsable.collapsed > thead.title { display: table-row-group } table.log.collapsable.collapsed > thead.title > tr.headings { display: none } table.log.collapsable.collapsed > thead, table.log.collapsable.collapsed > tbody { display: none } tr.sublog > td { padding-bottom: 0.5em } tr.sublog > td > table.log { padding-top: 0.5em; padding-bottom: 0.5em } tr.sublog > td > table.log tr > td.when { border-left: 3px solid #cca; } div.main div.submit { margin-left: 30%; margin-right: 30%; background-color: #8f8; border: 3px solid #6b6; text-align: center; margin-top: 1em; margin-bottom: 1em; border-radius: 10px; color: #141 } div.main div.submit:hover { background-color: #7e7; border-color: #5a5; cursor: hand } div.main div.submit:active { background-color: #6d6; border-color: #494; cursor: hand } div.marker { position: absolute; border: 2px solid; width: 7px; background-color: #88f; border-color: #66b } div.marker.right { border-top-right-radius: 10px; border-bottom-right-radius: 10px; } div.marker.left { border-top-left-radius: 10px; border-bottom-left-radius: 10px; } select.base option.base { font-weight: bold } table.log.relevant tr.commit.merge { display: none } table.log.relevant td h1 a { font-size: 12px; margin-left: 1em } .followup-tooltip { max-width: 600px } .followup-tooltip .header { vertical-align: top; font-style: italic; white-space: nowrap; padding-right: 0.5em } .followup-tooltip .summary { font-style: normal; font-family: monospace } ================================================ FILE: src/resources/log.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var anchorCommit = null, focusCommit = null, commitMarkers = null; function CommitMarkers(commits) { this.bothMarkers = $("<div class='marker left'></div><div class='marker right'></div>"); $(document.body).append(this.bothMarkers); this.leftMarker = this.bothMarkers.first(); this.rightMarker = this.bothMarkers.last(); this.firstCommit = this.lastCommit = null; this.setCommits(commits); } CommitMarkers.prototype.setCommits = function (commits) { if (this.firstCommit) this.firstCommit.parent().removeClass("first"); if (this.lastCommit) this.lastCommit.parent().removeClass("last"); $("td.summary.selected").removeClass("selected"); this.firstCommit = commits.first().children("td.summary"); this.lastCommit = commits.last().children("td.summary"); this.firstCommit.addClass("selected"); this.lastCommit.addClass("selected"); if (this.firstCommit.get(0) != this.lastCommit.get(0)) { this.lastCommit.parent().addClass("last"); this.firstCommit.parent().nextUntil("tr.commit.last").children("td.summary").addClass("selected"); this.lastCommit.parent().removeClass("last"); } this.updatePosition(); }; CommitMarkers.prototype.updatePosition = function () { var firstOffset = this.firstCommit.offset(); var top = firstOffset.top - parseInt(this.firstCommit.css("padding-top")); var bottom = this.lastCommit.offset().top + this.lastCommit.height() + parseInt(this.lastCommit.css("padding-bottom")); this.leftMarker.offset({ top: top, left: firstOffset.left - parseInt(this.leftMarker.outerWidth()) - 1 }); this.rightMarker.offset({ top: top, left: firstOffset.left + this.firstCommit.outerWidth() + 1 }); this.bothMarkers.height(bottom - top + 2); }; CommitMarkers.prototype.remove = function () { $("td.summary.selected").removeClass("selected"); this.leftMarker.remove(); this.rightMarker.remove(); }; function rebase(name, base, newBaseBase, oldCount, newCount, baseOldCount, baseNewCount) { function finish() { $.ajax({ async: false, url: "/rebasebranch?repository=" + repository.id + "&name=" + encodeURIComponent(name) + "&base=" + encodeURIComponent(base), dataType: "text", success: function (data) { if (data == "ok") location.replace("/log?repository=" + repository.id + "&branch=" + encodeURIComponent(name)); else reportError("update base branch", "Server reply: <i style='white-space: pre'>" + htmlify(data) + "</i>"); }, error: function () { reportError("update base branch", "Request failed."); } }); } var content = $("<div title=Please Confirm'><p>You are about to update Critic's record of the branch <b>" + htmlify(name) + "</b>. It used to contain " + oldCount + " commits and will now contain these " + newCount + " commits instead.</p>" + (typeof baseOldCount == "number" ? "<p>In addition, the branch <b>" + htmlify(base) + "</b> will be modified to have <b>" + htmlify(newBaseBase) + "</b> as its new base branch instead of <b>" + htmlify(name) + "</b>, and will contain " + baseNewCount + " commits instead of " + baseOldCount + " commits.</p>" : "") + "<p><b>Note:</b> The git repository will not be affected at all by this.</p><p>Are you sure you want to do this?</p></div>"); content.dialog({ width: 400, modal: true, buttons: { "Perform Rebase": finish, "Do Nothing": function () { content.dialog("close"); }}}); } function showRelevantMerges(ev) { $(ev.currentTarget).parents("table.log.relevant").find("tr.commit.merge").show(); var text = ev.currentTarget.firstChild; text.nodeValue = text.nodeValue.replace("Show", "Hide"); ev.currentTarget.onclick = hideRelevantMerges; } function hideRelevantMerges(ev) { $(ev.currentTarget).parents("table.log.relevant").find("tr.commit.merge").hide(); var text = ev.currentTarget.firstChild; text.nodeValue = text.nodeValue.replace("Hide", "Show"); ev.currentTarget.onclick = showRelevantMerges; } var overrideShowSquashedDiff = null; function resetSelection() { if (anchorCommit && commitMarkers) { commitMarkers.remove(); commitMarkers = null; } if (typeof automaticAnchorCommit == "string") anchorCommit = $("#" + automaticAnchorCommit); else anchorCommit = null; focusCommit = null; } function executeSelection(commit) { if (anchorCommit && commitMarkers) { if (commit.size() && commit.get(0).parentNode == anchorCommit.get(0).parentNode) { var re_sha1 = /[0-9a-f]{8,40}(?=\?|$)/; var to_sha1 = re_sha1.exec($("td.summary.selected > a").first().attr("href"))[0]; //.parent("tr.commit").attr("id"); var from_sha1 = re_sha1.exec($("td.summary.selected > a").last().attr("href"))[0]; //.parent("tr.commit").attr("id"); if (overrideShowSquashedDiff) overrideShowSquashedDiff(from_sha1, to_sha1); else location.href = "/showcommit?first=" + from_sha1 + "&last=" + to_sha1 + (typeof review != "undefined" && typeof review.id == "number" ? "&review=" + review.id : ""); } resetSelection(); return true; } return false; } $(document).ready(function () { $("tr.commit td.summary").click(function (ev) { resetSelection(); }); $("tr.commit td.summary").mousedown(function (ev) { if (ev.button != 0 || ev.ctrlKey || ev.shiftKey || ev.altKey || ev.metaKey) return; if (!executeSelection($(ev.target).parents("tr.commit"))) { anchorCommit = $(ev.currentTarget).parent("tr.commit"); focusCommit = null; ev.preventDefault(); } }); $("tr.commit td.summary").mouseover(function (ev) { if (anchorCommit) { var commit = $(ev.currentTarget).parent("tr.commit"); if (commit.size() && commit.get(0).parentNode == anchorCommit.get(0).parentNode) if (!commitMarkers) { if (commit.get(0) != anchorCommit.get(0)) { focusCommit = commit; commitMarkers = new CommitMarkers(anchorCommit.add(focusCommit)); } } else { if (commit.get(0) == anchorCommit.get(0)) { commitMarkers.remove(); commitMarkers = null; } else { focusCommit = commit; commitMarkers.setCommits(anchorCommit.add(focusCommit)); } } if (commitMarkers) commitMarkers.updatePosition(); } ev.stopPropagation(); }); $(document).mouseover(function (ev) { if (typeof automaticAnchorCommit == "string") resetSelection(); }); $(document).mouseup(function (ev) { if (!executeSelection($(ev.target).parents("tr.commit"))) resetSelection(); }); $("select.base").change(function (ev) { if (ev.target.value != "*") location.replace("/log?repository=" + repository.id + "&branch=" + encodeURIComponent(branch.name) + "&base=" + encodeURIComponent(ev.target.value)); }); $("span.squash, span.fixup").tooltip({ items: "span.fixup, span.fixup", content: function () { var element = $(this); return $("<table class='tooltip'><tr><td class=header>" + (element.hasClass("squash") ? "Squash into" : "Fixup of") + "</td><td class=summary>" + htmlify(element.attr("critic-ref")) + "</td></tr></table>"); }, track: true, hide: false, tooltipClass: "followup-tooltip" }); resetSelection(); var highlight = $("tr.commit.highlight td"); if (highlight.size()) window.scrollTo(pageXOffset, highlight.offset().top - (innerHeight - highlight.height()) / 2); $("table.log.collapsable > thead.title").click(function (ev) { $(ev.currentTarget.parentNode).toggleClass("collapsed"); }); }); ================================================ FILE: src/resources/login.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.login { margin-top: 1em; } table.login tr.separator1 td { border-bottom: 1px solid #cca; padding-top: 1em } table.login tr.separator2 td { padding-top: 1em } table.login tr.status td { color: red; font-weight: bold; text-align: center; padding-bottom: 1em } table.login tr.status.disabled { display: none } table.login td.key { padding-left: 2em; padding-right: 0.5em; font-weight: bold; text-align: right } table.login td.value { padding-left: 0.5em; padding-right: 2em } table.login tr.login td { text-align: center; padding-top: 1em } table.login tr.external td div, table.login tr.register td, table.login tr.continue td { text-align: right; font-weight: bold } table.login tr.continue a, table.login tr.continue a:visited { color: #222 } ================================================ FILE: src/resources/login.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ $(function () { var fields = $("input.field"); var submit = $("input.login"); var form = $("form"); fields.each(function (index) { $(this).keypress(function (ev) { if (ev.keyCode == 13) { if (index == fields.length - 1) submit.click(); else fields[index + 1].focus(); } }); }); submit.button(); form.submit(function (ev) { var data = { fields: {} }; fields.each(function () { data.fields[this.name] = this.value; }); var operation = new Operation({ action: "login", url: "validatelogin", data: data }); var result = operation.execute(); if (!result || result.message) { ev.preventDefault(); if (result) { $("tr.status td").text(result.message); $("tr.status").removeClass("disabled"); } } }); }); ================================================ FILE: src/resources/manageextensions.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ tr.item > td.value > span.name { border-bottom: 1px solid #cca } tr.item > td.value > span.installed { font-weight: bold; color: #0e0 } tr.item > td.value > span.details { float: right } tr.item > td.value > div.description { padding-top: 1em; padding-bottom: 1em; font-family: serif } tr.item > td.value > div.description.broken { font-weight: bold; color: red } tr.item > td.value > div.description.broken > a { text-decoration: none } tr.item > td.value > div.description.broken > a:hover { text-decoration: underline } tr.item > td.value > div.authors > b { padding-right: 1em } table.roles { margin-left: 1em } table.roles > tbody > tr > th { padding-top: 0.5em; border-bottom: 1px solid #cca; } table.roles > tbody > tr > td.description { font-family: serif } table.roles > tbody > tr > td.pattern { padding-right: 1em } table.roles > tbody > tr > td > ul { padding-left: 1.5em; margin: 0; font-family: serif } table.roles span.inactive { font-weight: bold; color: red } ================================================ FILE: src/resources/manageextensions.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function installExtension(author_name, extension_name, version, universal) { $("button").prop("disabled", true); var data = { extension_name: extension_name, version: version, universal: Boolean(universal) }; if (author_name) data.author_name = author_name; var operation = new Operation({ action: "install extension", url: "installextension", data: data }); var result = operation.execute(); if (result) showMessage("Extension installed!", extension_name + " installed!", "The extension was installed successfully.", function () { location.reload(); }); } function uninstallExtension(author_name, extension_name, universal) { $("button").prop("disabled", true); var data = { extension_name: extension_name, universal: Boolean(universal) }; if (author_name) data.author_name = author_name; var operation = new Operation({ action: "uninstall extension", url: "uninstallextension", data: data }); var result = operation.execute(); if (result) location.reload(); } function reinstallExtension(author_name, extension_name, version, universal) { $("button").prop("disabled", true); var data = { extension_name: extension_name, version: version, universal: Boolean(universal) }; if (author_name) data.author_name = author_name; var operation = new Operation({ action: "reinstall extension", url: "reinstallextension", data: data }); var result = operation.execute(); if (result) location.reload(); } function clearExtensionStorage(author_name, extension_name) { function clear() { var data = { extension_name: extension_name }; if (author_name) data.author_name = author_name; var operation = new Operation({ action: "clear extension storage", url: "clearextensionstorage", data: data }); if (operation.execute()) { close(); location.reload(); } } function close() { dialog.dialog("close"); } var dialog = $( "<div title='Please confirm'>" + "<p>Clearing an extension's storage deletes whatever state the extension " + "has stored about your use of it since you first installed it. The " + "state can not be restored!</p><p><b>Are you sure?</b></p>" + "</div>"); dialog.dialog({ width: 600, modal: true, buttons: { "Clear storage": clear, "Do nothing": close } }); } $(function () { $("a.button").button(); $("select.details").change(function (ev) { var select = $(ev.currentTarget); var previous = JSON.stringify(selected_versions); var value = select.val(); var author_name = select.attr("critic-author"); var extension_name = select.attr("critic-extension"); var key; if (author_name) key = author_name + "/" + extension_name; else key = extension_name; if (!value) delete selected_version[key]; else if (value == "live") selected_versions[key] = null; else /* value = "version/*" */ selected_versions[key] = value.substring(8); var next = JSON.stringify(selected_versions); if (next != previous) { /* Restore state before we leave the page: */ selected_versions = JSON.parse(previous); location.href = "/manageextensions?select=" + encodeURIComponent(next) + "&focus=" + encodeURIComponent(key); } }); }); ================================================ FILE: src/resources/managereviewers.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.manage { table-layout: fixed; } div.main table.manage tr.current td.select, div.main table.manage tr.reviewer td.select, div.main table.manage tr.headings td { font-weight: bold } div.main table.manage tr.current td, div.main table.manage tr.reviewer td, div.main table.manage tr.headings td { padding-top: 1em } div.main table.manage tr.reviewer span.message { font-weight: normal; font-style: italic; padding-left: 2em } div.main table.manage tr.reviewer input.reviewer { font-family: monospace } div.main table.manage tr td.select { text-align: right; padding-right: 1em } div.main table.manage tr.current td.select { vertical-align: top } div.main table.manage tr.current td.value { font-family: monospace } div.main table.manage tr td.path { white-space: pre; font-family: monospace } div.main table.manage tr td.right { text-align: right } div.main table.manage tr td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } ================================================ FILE: src/resources/managereviewers.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var currentReviewer = null; function selectReviewer(reset) { var reviewer = $("input.reviewer").val(); if (reviewer == currentReviewer) return; if (reviewer == "") if (reset) $("input.reviewer").val(reviewer = currentReviewer); else return; function handleNoSuchUser(result) { $("tr.reviewer span.message").text("No such user."); return true; } $("tr.reviewer span.message").text(""); var operation = new Operation({ action: "fetch assigned changes", url: "getassignedchanges", data: { review_id: review.id, user_name: reviewer }, failure: { nosuchuser: handleNoSuchUser }}); var result = operation.execute(); if (result) { var files = {}; for (var index = 0; index < result.files.length; ++index) files[result.files[index]] = true; currentReviewer = reviewer; $("tr.file").each( function () { $(this).find("input").get(0).checked = $(this).attr("critic-file-id") in files; }); checkDirectories(); $("input.reviewer").autocomplete("close"); } } function checkDirectory(line) { line = $(line); var level = parseInt(line.attr("critic-level")); var all_checked = true; var dirline = line; line.nextAll("tr").each(function (index, line) { if (parseInt(line.getAttribute("critic-level")) <= level) return false; if (line.className == "file") $(line).find("input").each(function () { if (!this.checked) all_checked = false; }); return all_checked; }); line.find("input").each(function () { this.checked = all_checked; }); } function checkDirectories() { $("tr.directory").each(function (index, line) { checkDirectory(line); }); } $(document).ready(function () { $("tr.directory").click(function (ev) { var line = $(ev.currentTarget); var level = parseInt(line.attr("critic-level")); var checkbox = line.find("input").get(0); var on; if (ev.target.nodeName.toLowerCase() != "input") { on = checkbox.checked = !checkbox.checked; ev.preventDefault(); } else on = checkbox.checked; line.nextAll("tr").each( function (index, line) { if (parseInt(line.getAttribute("critic-level")) <= level) return false; $(line).find("input").each(function () { this.checked = on; }); }); line.prevAll("tr.directory").each( function (index, line) { var line_level = parseInt(line.getAttribute("critic-level")); if (line_level < level) { if (!checkbox.checked) $(line).find("input").each(function () { this.checked = false; }); else checkDirectory(line); level = line_level; } }); }); $("tr.file").click(function (ev) { var line = $(ev.currentTarget); var level = parseInt(line.attr("critic-level")); var checkbox = line.find("input").get(0); var on; if (ev.target.nodeName.toLowerCase() != "input") { on = checkbox.checked = !checkbox.checked; ev.preventDefault(); } else on = checkbox.checked; line.prevAll("tr.directory").each( function (index, line) { var line_level = parseInt(this.getAttribute("critic-level")); if (line_level < level) { if (!checkbox.checked) $(line).find("input").each(function () { this.checked = false; }); else checkDirectory(line); level = line_level; } }); }); $("input.reviewer").autocomplete({ source: users }); $("input.reviewer").keypress(function (ev) { if (ev.keyCode == 13) selectReviewer(false); }); $("input.reviewer").blur(function (ev) { setTimeout(function () { selectReviewer(true); }, 100); }); $("button.save").click(function () { var files = [], reviewer = $("input.reviewer").val(); $("tr.file").each(function () { var row = $(this); if (row.find("input").get(0).checked) files.push(parseInt(row.attr("critic-file-id"))); }); var operation = new Operation({ action: "assign changes", url: "setassignedchanges", data: { review_id: review.id, user_name: reviewer, files: files }}); if (operation.execute()) $("tr.reviewer span.message").text("Assignments saved."); }); $("input.reviewer").val(user.name); selectReviewer(); $("span.reviewer").click(function (ev) { $("input.reviewer").val($(this).attr("critic-username")); selectReviewer(); }); }); ================================================ FILE: src/resources/message.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ .message { width: 50%; } .message h1.center { text-align: center; border-bottom: 0; } .message > p { margin-left: 0.5rem; margin-right: 0.5rem; font-size: 1rem; } .message > .pre { white-space: pre; font-family: monospace; } ================================================ FILE: src/resources/newrepository.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main table.paleyellow td.heading { font-family: serif; font-weight: bold; text-align: right; width: 20% } div.main table.paleyellow td.prefix { font-family: monospace; text-align: right; white-space: nowrap } div.main table.paleyellow td.value { font-family: monospace; padding-left: 5px } div.main table.paleyellow td.value input { font-family: monospace; width: 100% } div.main table.paleyellow td.suffix { font-family: monospace; padding-left: 5px } div.main table.paleyellow tr.help td { font-style: italic; text-align: right; border-bottom: 1px solid #cca } div.main table.paleyellow tr.buttons td { padding-top: 1em } ================================================ FILE: src/resources/newrepository.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ $(document).ready( function () { function updateBranchDisabled() { if ($("input[name='remote']").val().trim()) $("input[name='branch']").prop("disabled", false); else $("input[name='branch']").prop("disabled", true); } $("input[name='remote']") .keyup(updateBranchDisabled) .change(updateBranchDisabled) .bind("input", updateBranchDisabled); $("button.add").click( function (ev) { var name = $("input[name='name']").val(); var path = $("input[name='path']").val(); var remote = $("input[name='remote']").val(); var branch = $("input[name='branch']").val(); var data = { name: name, path: path }; if (remote.trim()) data.mirror = { remote_url: remote, remote_branch: branch, local_branch: branch }; var operation = new Operation({ action: "add repository", url: "addrepository", data: data }); if (operation.execute()) location.href = "/repositories#" + name; }); function handleKeypress(ev) { if (ev.keyCode == 13) $("button.add").click(); } $("input[name='name']").keypress(handleKeypress); $("input[name='path']").keypress(handleKeypress); $("input[name='remote']").keypress(handleKeypress); $("input[name='branch']").keypress(handleKeypress); $("input[name='name']").focus(); }); ================================================ FILE: src/resources/news.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table tr.item:hover { background-color: #eec; cursor: pointer } table tr.item td.date { text-align: right; width: 15%; padding-right: 1em } table tr.item td.title { font-weight: bold } table tr.item td.status { text-align: right; font-weight: bold; color: red; width: 15% } table tr.nothing td.nothing { text-align: center; font-weight: bold; padding-top: 1em } div.main table td.show { text-align: center; padding-left: 0; padding-top: 1em } div.main table td.show div { padding-top: 1em; border-top: 1px solid #cca } ================================================ FILE: src/resources/news.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function addOrEditNewsItem(edit_item_id, edit_text) { function finish() { var text = content.find("textarea").val(); var operation; if (edit_item_id) operation = new Operation({ action: "edit news item", url: "editnewsitem", data: { item_id: edit_item_id, text: text }}); else operation = new Operation({ action: "add news item", url: "addnewsitem", data: { text: text }}); return !!operation.execute(); } var verb = edit_item_id ? "Edit" : "Create" var content = $("<div class='comment flex' title='" + verb + " News Item'>" + "<textarea class='text flexible' rows=8></textarea></div>"); if (edit_text) content.find("textarea").val(edit_text); var buttons = { Save: function () { if (finish()) { $(content).dialog("close"); location.reload(); } }, Cancel: function () { $(content).dialog("close"); } }; content.dialog({ width: 600, buttons: buttons, closeOnEscape: false }); } $(document).ready(function () { $("button, a.show, a.back").button(); $("button.addnewsitem").click(function () { addOrEditNewsItem(false); }); $("button.editnewsitem").click(function () { addOrEditNewsItem(news_item_id, news_text); }); $("tr.item").click(function (ev) { var target = $(ev.currentTarget); location.search = "item=" + target.attr("critic-item-id"); }); }); ================================================ FILE: src/resources/overrides.css ================================================ .ui-widget, .ui-widget input, .ui-widget select, .ui-widget textarea, .ui-widget button, .repository-select { font-family: Verdana, Helvetica, Arial, sans-serif } .chosen-container .chosen-drop { background-color: #f2ece0; } .chosen-container-single .chosen-single { border-color: #cdc3b7; } .chosen-container .chosen-results li.group-result { color: #444; border-bottom: 1px solid #444; } .chosen-container-multi .chosen-choices li.search-field input[type="text"] { -webkit-box-sizing: content-box; -moz-box-sizing: content-box; box-sizing: content-box; } .chosen-container-single .chosen-single, .chosen-container-active.chosen-with-drop .chosen-single, .chosen-container-multi .chosen-choices li.search-choice { background: #ede4d4 url(third-party/images/ui-bg_glass_70_ede4d4_1x400.png) 50% 50% repeat-x; } .chosen-container .chosen-drop, .chosen-container-active .chosen-single, .chosen-container-active.chosen-with-drop .chosen-single, .chosen-container-active .chosen-choices { border-color: #f5ad66; } ================================================ FILE: src/resources/rebasetrackingreview.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ select, input { width: 50% } input { font-family: monospace; } ================================================ FILE: src/resources/rebasetrackingreview.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function editNewBranch(button) { var dropdown = $("select#newbranch"); var selected = dropdown.val(); dropdown.replaceWith("<input id=newbranch>"); $("input#newbranch").val(selected); $(button).remove(); } function fetchBranch() { var newbranch = "refs/heads/" + $("#newbranch").val().trim(); var upstream = $("#upstream").val().trim(); if (!newbranch) { showMessage("Invalid input!", "Invalid input!", "Please provide a non-empty branch name."); return; } if (!upstream) { showMessage("Invalid input!", "Invalid input!", "Please provide a non-empty upstream."); return; } function finish(result) { if (result) location.href = ("/rebasetrackingreview" + "?review=" + encodeURIComponent(review.id) + "&newbranch=" + encodeURIComponent(newbranch) + "&upstream=" + encodeURIComponent(upstream) + "&newhead=" + encodeURIComponent(result.head_sha1) + "&newupstream=" + encodeURIComponent(result.upstream_sha1)); } var operation = new Operation({ action: "fetch remote branch", url: "fetchremotebranch", data: { repository_name: repository.name, remote: trackedbranch.remote, branch: newbranch, upstream: upstream }, wait: "Fetching branch...", callback: finish }); operation.execute(); } function rebaseReview() { function finish(result) { if (result) location.replace("/r/" + review.id); } var data = { review_id: review.id, new_head_sha1: check.new_head_sha1, new_trackedbranch: check.new_trackedbranch }; if (check.new_upstream_sha1) data.new_upstream_sha1 = check.new_upstream_sha1; var operation = new Operation({ action: "rebase review", url: "rebasereview", data: data, wait: "Rebasing review...", callback: finish }); operation.execute(); } $(function () { $("input#newbranch").autocomplete({ source: AutoCompleteRef(trackedbranch.remote, "refs/heads/"), html: true });; $("input#upstream").autocomplete({ source: AutoCompleteRef(trackedbranch.remote), html: true });; function updateConflictsStatus(result) { if (result) { var message; if (result.has_conflicts && result.has_changes) message = "Has conflicts and other changes."; else if (result.has_conflicts) message = "Has conflicts."; else if (result.has_changes) message = "Has unexpected changes!"; var status_conflicts = $("#status_conflicts"); status_conflicts.text(message || "Clean."); if (message) status_conflicts.attr("href", result.url); $("button#rebasereview").removeAttr("disabled").button("refresh"); } } function updateMergeStatus(result) { if (result) { var message; if (result.has_conflicts) message = "Will need review."; var status_merge = $("#status_merge"); status_merge.text(message || "Clean."); if (message) status_merge.attr("href", "/showcommit?repository=" + repository.id + "&sha1=" + result.merge_sha1); var conflicts_status = new Operation({ action: "check conflicts status", url: "checkconflictsstatus", data: { review_id: review.id, merge_sha1: result.merge_sha1 }, callback: updateConflictsStatus }); conflicts_status.execute(); $("#status_conflicts").text("Checking..."); } } function updateHistoryRewriteStatus(result) { if (result) { var status_historyrewrite = $("#status_historyrewrite"); status_historyrewrite.text(result.valid ? "Clean." : "Not valid!"); if (result.valid) $("button#rebasereview").removeAttr("disabled").button("refresh"); } } if (typeof check != "undefined") { if (check.rebase_type == "history") { var historyrewrite_status = new Operation({ action: "check history rewrite status", url: "checkhistoryrewritestatus", data: { review_id: review.id, new_head_sha1: check.new_head_sha1 }, callback: updateHistoryRewriteStatus }); historyrewrite_status.execute(); $("#status_historyrewrite").text("Checking..."); } else if (check.rebase_type == "move:ff") { var merge_status = new Operation({ action: "check merge status", url: "checkmergestatus", data: { review_id: review.id, new_head_sha1: check.new_head_sha1, new_upstream_sha1: check.new_upstream_sha1 }, callback: updateMergeStatus }); merge_status.execute(); $("#status_merge").text("Checking..."); } else { var conflicts_status = new Operation({ action: "check conflicts status", url: "checkconflictsstatus", data: { review_id: review.id, new_head_sha1: check.new_head_sha1, new_upstream_sha1: check.new_upstream_sha1 }, callback: updateConflictsStatus }); conflicts_status.execute(); $("#status_conflicts").text("Checking..."); } } }); ================================================ FILE: src/resources/repositories.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.repositories { min-width: 60%; margin-top: 1rem; font-family: monospace; } table.repositories tr.repository:hover { background-color: #eed; cursor: pointer; } table.repositories .details { display: none } table.repositories .details.show { display: table-row } table.repositories .details .buttons { text-align: right; padding-top: 0.5em } table.repositories .details > td { padding: 0.25rem 0 0; } table.repositories .upstream { text-align: right; } table.trackedbranches { width: 100% } table.trackedbranches .title { text-align: center; } table.trackedbranches th { border-bottom: 1px solid #222; font-family: serif } table.trackedbranches tr.branch td { padding-top: 3px; padding-bottom: 3px } table.trackedbranches tr.branch:hover { background-color: #eed; cursor: pointer } table.trackedbranches .localname, table.trackedbranches .remote, table.trackedbranches .remotename { padding-right: 1em } table.trackedbranches .enabled { text-align: right } table.trackedbranches .users { padding-left: 1em } table.trackedbranches td.buttons { text-align: right } div.trackedbranchlog .log { max-height: 500px; overflow: auto } div.trackedbranchlog .log > table { width: 100% } div.trackedbranchlog .from, div.trackedbranchlog .to { font-family: monospace } div.trackedbranchlog .range > a { text-decoration: none } div.trackedbranchlog .output { padding-bottom: 0.5em } div.trackedbranchlog .output pre { font-family: monospace; white-space: pre; background-color: #fff; margin: 0; border: 1px solid #888; padding: 3px; } div#addtrackedbranch table { width: 100% } div#addtrackedbranch td { white-space: nowrap } div#addtrackedbranch td.key { font-weight: bold } div#addtrackedbranch td.value { font-family: monospace } div#addtrackedbranch td.note { font-family: serif; font-style: italic; white-space: normal; padding-bottom: 0.5em } div#addtrackedbranch input { font-family: monospace; width: 100% } ================================================ FILE: src/resources/repositories.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- mode: js; indent-tabs-mode: nil -*- */ $(function () { $("a.button").button(); $("tr.repository").click( function (ev) { $(ev.currentTarget).next("tr.details").toggleClass("show"); ev.preventDefault(); }); if (location.hash) $("tr.details." + location.hash.substring(location.hash.indexOf("#") + 1)).addClass("show"); $("tr.branch").click( function (ev) { var branch_row = $(ev.currentTarget); var branch_id = parseInt(branch_row.attr("critic-branch-id")); var user_ids = branch_row.attr("critic-user-ids").split(",").map(Number); var operation = new Operation({ action: "fetch log", url: "trackedbranchlog", data: { branch_id: branch_id } }); var result = operation.execute(); var html = "<div class='trackedbranchlog' title='Update Log'>"; if (result.items.length) { html += "<div class='log'><table><tr><th colspan=2>Update log:</th></tr>"; for (var index = 0; index < result.items.length; ++index) { var item = result.items[index]; html += "<tr><td class='when'>" + new Date(item.time * 1000) + "</td>" + "<td class='range'>"; if (item.from_sha1 !== null && item.to_sha1 !== null) { html += "<a href='" + result.repository.name + "/" + item.from_sha1 + ".." + item.to_sha1 + "'>" + item.from_sha1.substring(0, 8) + ".." + item.to_sha1.substring(0, 8) + "</a>"; } else { html += "N/A"; } html += "</td></tr>"; if (item.hook_output.trim()) html += "<tr><td class='output' colspan=2><pre>" + htmlify(item.hook_output) + "</pre></td></tr>"; } html += "</table></div>"; } html += "<p>"; html += "<b>Last check:</b> " + (result.previous === null ? "Never" : new Date(result.previous * 1000)) + "<br>"; html += "<b>Next scheduled check:</b> " + (result.next === null ? "ASAP" : new Date(result.next * 1000)); html += "</p>"; html += "</div>"; var dialog = $(html); function triggerUpdate() { var operation = new Operation({ action: "trigger update", url: "triggertrackedbranchupdate", data: { branch_id: branch_id }}); var result = operation.execute(); if (result) dialog.dialog("close"); } function disable() { function finish() { confirm.dialog("close"); var operation = new Operation({ action: "disable tracking", url: "disabletrackedbranch", data: { branch_id: branch_id }}); var result = operation.execute(); if (result) { dialog.dialog("close"); branch_row.find("td.enabled").text("No"); } } var confirm = $("<div title='Disable Branch Tracking'><p>Are you sure you want to disable the tracking of this branch?</p></div>"); confirm.dialog({ buttons: { "Disable the tracking": finish, "Do nothing": function () { confirm.dialog("close"); }}}); } function enable() { var operation = new Operation({ action: "enable tracking", url: "enabletrackedbranch", data: { branch_id: branch_id }}); var result = operation.execute(); if (result) { dialog.dialog("close"); branch_row.find("td.enabled").text("Yes"); } } function deleteTrackedBranch() { function finish() { confirm.dialog("close"); var operation = new Operation({ action: "delete tracking", url: "deletetrackedbranch", data: { branch_id: branch_id }}); var result = operation.execute(); if (result) { dialog.dialog("close"); branch_row.remove(); } } var confirm = $("<div title='Delete Branch Tracking'><p>Are you sure you want to delete the tracking of this branch?</p></div>"); confirm.dialog({ buttons: { "Delete the tracking": finish, "Do nothing": function () { confirm.dialog("close"); }}}); } var buttons = {}; if (user_ids.indexOf(user.id) != -1 || user.administrator) { if (branch_row.find("td.enabled").text() == "Yes") { buttons["Update now"] = triggerUpdate; buttons["Disable"] = disable; } else if (branch_row.find("td.enabled").text() == "No") buttons["Enable"] = enable; buttons["Delete"] = deleteTrackedBranch; } buttons["Close"] = function () { dialog.dialog("close"); }; dialog.dialog({ width: 600, buttons: buttons }); ev.preventDefault(); }); }); function addTrackedBranch(repository_id) { var repository = repositories[repository_id]; var dialog = $("<div title='Add Tracked Branch' id=addtrackedbranch>" + "<table>" + "<tr><td class=key>Source repository:</td>" + "<td class=value><input id=sourcelocation value='" + htmlify(repository.defaultRemoteLocation) + "'></td></tr>" + "<tr><td class=key></td><td class=note>Source repository location/URL.</td></tr>" + "<tr><td class=key>Source branch name:</td><td class=value><input id=sourcename></td></tr>" + "<tr><td class=key></td><td class=note>Name of the branch in the source repository, without the leading \"refs/heads/\".</td></tr>" + "<tr><td class=key>Target repository:</td><td class=value>" + htmlify(repository.location) + "</td></tr>" + "<tr><td class=key>Target branch name:</td><td class=value><input id=targetname></td></tr>" + "<tr><td class=key></td><td class=note>Name of the branch in Critic's repository, without the leading \"refs/heads/\".</td></tr>" + "<tr><td class=key>Users:</td><td class=value><input id=users value='" + htmlify(user.name) + "'></td></tr>" + "<tr><td class=key></td><td class=note>Space or comma separated list of users to send emails to if the tracking fails.</td></tr>" + "</div>"); var sourcename = dialog.find("#sourcename"); var targetname = dialog.find("#targetname"); sourcename.change( function () { if (targetname.val() == "") targetname.val(sourcename.val()); }); function finish() { var source_location = dialog.find("#sourcelocation").val().trim(); var source_name = dialog.find("#sourcename").val().trim(); var target_name = dialog.find("#targetname").val().trim(); var users = dialog.find("#users").val().split(/\s*,\s*|\s+/g); var errors = []; if (source_location == "") errors.push("Empty source repository."); if (source_name == "") errors.push("Empty source branch name."); if (target_name == "") errors.push("Empty target branch name."); if (errors.length == 0) { var operation = new Operation({ action: "add tracked branch", url: "addtrackedbranch", data: { repository_id: repository_id, source_location: source_location, source_name: source_name, target_name: target_name, users: users }}); var result = operation.execute(); if (result) { dialog.dialog("close"); location.reload(); } } else alert(errors.join("\n")); } function cancel() { dialog.dialog("close"); } var buttons = { "Add Tracked Branch": finish, "Cancel": cancel }; dialog.dialog({ width: 600, buttons: buttons }); if (repository.defaultRemoteLocation) dialog.find("#sourcename").focus(); else dialog.find("#sourcelocation").focus(); } ================================================ FILE: src/resources/review.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div#draftStatus { text-align: right; font-weight: bold; font-size: larger } div#draftStatus span.draft { color: red } div#draftStatus span.buttons { font-size: 12px } div.editowner input { font-family: monospace; margin-top: 0.5em; width: 100% } ================================================ FILE: src/resources/review.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- Mode: js; js-indent-level: 2; indent-tabs-mode: nil -*- */ function updateDraftStatus(data) { if (typeof data == "string") { var match = /^ok:(?:.*,)?approved=(\d+),disapproved=(\d+),(?:approvedBinary=(\d+),disapprovedBinary=(\d+),)?comments=(\d+),reopened=(\d+),closed=(\d+),morphed=(\d+)$/.exec(data); if (!match) return; data = { reviewedNormal: parseInt(match[1]), unreviewedNormal: parseInt(match[2]), reviewedBinary: parseInt(match[3]), unreviewedBinary: parseInt(match[4]), writtenComments: parseInt(match[5]), reopenedIssues: parseInt(match[6]), resolvedIssues: parseInt(match[7]), morphedChains: parseInt(match[8]) } } var items = []; function renderCount(count, what) { return count + " " + what + (count != 1 ? "s" : ""); } if (data.reviewedNormal != '0') items.push("<span class='approved'>reviewed " + renderCount(data.reviewedNormal, "line") + "</span>"); if (data.unreviewedNormal != '0') items.push("<span class='disapproved'>unreviewed " + renderCount(data.unreviewedNormal, "line") + "</span>"); if (data.reviewedBinary != '0') items.push("<span class='approved'>reviewed " + renderCount(data.reviewedBinary, "binary file") + "</span>"); if (data.unreviewedBinary != '0') items.push("<span class='disapproved'>unreviewed " + renderCount(data.unreviewedBinary, "binary file") + "</span>") if (data.writtenComments != '0') items.push("<span class='comments'>wrote " + renderCount(data.writtenComments, "comment") + "</span>"); if (data.reopenedIssues != '0') items.push("<span class='reopened'>reopened " + renderCount(data.reopenedIssues, "issue") + "</span>"); if (data.resolvedIssues != '0') items.push("<span class='closed'>resolved " + renderCount(data.resolvedIssues, "issue") + "</span>"); if (data.morphedChains != '0') items.push("<span class='morphed'>morphed " + renderCount(data.morphedChains, "comment") + "</span>"); var draftStatus = $("#draftStatus"); if (items.length == 0) draftStatus.empty().nextAll("div.buttons").show(); else { draftStatus.html("<span class='draft'>Draft:</span>" + " " + items.join(", ") + " " + "<span class='buttons'>" + "<button onclick='previewChanges();'>Preview</button>" + "<button onclick='submitChanges();'>Submit</button>" + "<button onclick='cancelChanges();'>Abort</button>" + "</span>"); draftStatus.find("button").button(); draftStatus.nextAll("div.buttons").hide(); } if (typeof CommentMarkers != "undefined") CommentMarkers.updateAll(); } function markFile(status, file_id, parent_index) { var id_prefix = parent_index !== null ? "p" + parent_index : ""; var checkbox = document.getElementById(id_prefix + "a" + file_id); if (!checkbox) return; var changeset_ids; var reviewableFiles; if (parent_index !== null) { changeset_ids = [changeset[parent_index].id]; reviewableFiles = changeset[parent_index].reviewableFiles; } else { changeset_ids = changeset.ids; reviewableFiles = changeset.reviewableFiles; } if (reviewableFiles[file_id] == status) return; var checked = status == "reviewed"; if (checkbox.checked != checked) checkbox.checked = checked; var data = { review_id: review.id, reviewed: status == "reviewed", changeset_ids: changeset_ids, file_ids: [file_id] }; function finish(result) { if (result) { reviewableFiles[file_id] = status; updateDraftStatus(result.draft_status); } else checkbox.checked = !checked; var all_checked; if (checkbox.checked) { all_checked = true; $(checkbox).parents("table.commit-files").find("td.approve.file > input").each(function (index, element) { if (!element.checked) all_checked = false; }); } else all_checked = false; $(checkbox).parents("table.commit-files").find("td.approve.everything > input").each(function (index, element) { element.checked = all_checked; }); } var callback; if (user.options.ui.asynchronousReviewMarking) callback = finish; else callback = null; var operation = new Operation({ action: "mark files as " + status, url: "markfiles", data: data, callback: callback }); if (callback) operation.execute(); else finish(operation.execute()); } function markAllFiles(status) { var changeset_ids = changeset.ids; var file_ids = []; var reviewableFiles = changeset.reviewableFiles; for (var file_id in reviewableFiles) if (/^\d+$/.test(file_id) && reviewableFiles[file_id] != status) file_ids.push(parseInt(file_id)); if (!file_ids.length) return; var data = { review_id: review.id, reviewed: status == "reviewed", changeset_ids: changeset_ids, file_ids: file_ids }; var operation = new Operation({ action: "mark files as " + status, url: "markfiles", data: data }); var result = operation.execute(); if (result) { for (var index = 0; index < file_ids.length; ++index) reviewableFiles[file_ids[index]] = status; updateDraftStatus(result.draft_status); } else checkbox.checked = status == "pending"; } function previewChanges() { location.href = "/showbatch?review=" + review.id; } function submitChanges() { function start() { function finish(remark) { var success = false; var data = { review_id: review.id }; if (!/^\s*$/.test(remark)) data.remark = remark; var operation = new Operation({ action: "submit changes", url: "submitchanges", data: data, wait: "Submitting changes..." }); var result = operation.execute(); if (result) { if (result.profiling) console.log(result.profiling); return true; } else return false; } var operation = new Operation({ action: "determine review state change", url: "reviewstatechange", data: { review_id: review.id }}); var result = operation.execute(); var state_change = ""; if (result) if (result.current_state == "open" && result.new_state == "accepted") state_change = "<p class='state'>With these changes, the review will be ACCEPTED.</p>"; else if (result.current_state == "accepted" && result.new_state == "open") state_change = "<p class='state'>With these changes, the review will NO LONGER be ACCEPTED.</p>"; var content = $("<div class='comment flex' title='Submit Changes'>" + state_change + "<p class='message' style='margin: 0'>Additional note (optional):</p>" + "<textarea class='text flexible' rows=8></textarea></div>"); var buttons; buttons = { Submit: function () { if (finish(content.find("textarea").val())) { $(content).dialog("close"); location.reload(); } }, Cancel: function () { $(content).dialog("close"); } }; content.dialog({ width: 600, buttons: buttons }); } Operation.whenIdle(start); } function cancelChanges() { function finish() { var data = { review_id: review.id, what: {} }; if (0 != ((data.what["approval"] = approval.is(":checked")) + (data.what["comments"] = comments.is(":checked")) + (data.what["metacomments"] = metacomments.is(":checked")))) { var operation = new Operation({ action: "abort changes", url: "abortchanges", data: data }); var result = operation.execute(); if (result) { location.reload(); if (result.profiling) console.log(result.profiling); } else return false; } return true; } var content = $("<div title='Warning!'><p><b>Aborted changes will be lost permanently.</b></p><legend><input type=checkbox id=what_approval checked>Abort reviewed/unreviewed changes</legend><legend><input type=checkbox id=what_comments checked>Abort written comments</legend><legend><input type=checkbox id=what_metacomments checked>Abort reopened/morphed comments</legend></div>"); var approval = content.find("input#what_approval"); var comments = content.find("input#what_comments"); var metacomments = content.find("input#what_metacomments"); content.find("legend").click(function (ev) { if (ev.target.nodeName.toLowerCase() != "input") $(ev.currentTarget).find("input").click(); }); content.dialog({ width: 400, modal: true, buttons: { "Abort Changes": function () { if (finish()) content.dialog("close"); }, "Do Nothing": function () { content.dialog("close"); }}}); } function closeReview() { function proceed() { var operation = new Operation({ action: "close review", url: "closereview", data: { review_id: review.id }}); if (operation.execute()) location.reload(); } var is_owner = false; for (var index = 0; index < review.owners.length; ++index) { if (user.id == review.owners[index].id) { is_owner = true; break; } } if (!is_owner) { var content = $("<div title=Please Confirm'><p><b>You are not the owner of this review.</b> Are you sure you mean to close it?</p></div>"); content.dialog({ width: 400, modal: true, buttons: { "Close Review": function () { content.dialog("close"); proceed(); }, "Do Nothing": function () { content.dialog("close"); }}}); } else proceed(); } function dropReview() { function proceed() { var operation = new Operation({ action: "drop review", url: "dropreview", data: { review_id: review.id }}); if (operation.execute()) location.reload(); } var content = $("<div title='Please Confirm'><p>Are you sure you mean to drop the review?</p></div>"); content.dialog({ width: 400, modal: true, buttons: { "Drop Review": function () { content.dialog("close"); proceed(); }, "Do Nothing": function () { content.dialog("close"); }}}); } function reopenReview() { var operation = new Operation({ action: "reopen review", url: "reopenreview", data: { review_id: review.id }}); if (operation.execute()) location.reload(); } function pingReview() { var content = $("<div class='comment flex' title='Ping Review'>" + "<textarea class='text flexible' rows=8></textarea></div>"); function finish(type) { var operation = new Operation({ action: "ping review", url: "pingreview", data: { review_id: review.id, note: content.find("textarea").val() }}); return operation.execute() != null; } var buttons = { "Send Ping": function () { if (finish()) { $(content).dialog("close"); } }, "Cancel": function () { $(content).dialog("close"); } }; content.dialog({ width: 600, buttons: buttons, closeOnEscape: false }); } function editSummary() { function finish(type) { var operation = new Operation({ action: "update review", url: "updatereview", data: { review_id: review.id, new_summary: content.find("input").val() }}); return operation.execute() != null; } function checkFinished() { if (finish()) { $(content).dialog("close"); location.reload(); } } function handleKeypress(ev) { if (ev.keyCode == 13) checkFinished(); } var content = $("<div class='comment' title='Edit Summary'><div class='text'>Enter new summary:<br><input></div></div>"); content.find("input").val($("#summary").text()); content.find("input").keypress(handleKeypress); var buttons = { "Save": function () { checkFinished(); }, Cancel: function () { $(content).dialog("close"); } }; content.dialog({ width: 600, buttons: buttons }); } function editDescription() { function finish(type) { var operation = new Operation({ action: "update review", url: "updatereview", data: { review_id: review.id, new_description: content.find("textarea").val() }}); return operation.execute() != null; } var self = this; var content = $("<div class='comment flex' title='Edit Description'>" + "<textarea class='text flexible' rows=8></textarea></div>"); content.find("textarea").val($("#description").text()); var buttons = { Save: function () { if (finish()) { $(content).dialog("close"); location.reload(); } }, Cancel: function () { $(content).dialog("close"); } }; content.dialog({ width: 600, buttons: buttons, closeOnEscape: false }); } function editOwners() { function finish(type) { var operation = new Operation({ action: "update review", url: "updatereview", data: { review_id: review.id, new_owners: content.find("input").val().split(/[,\s]+/g) }}); return operation.execute() != null; } function checkFinished() { if (finish()) { $(content).dialog("close"); location.reload(); } } function handleKeypress(ev) { if (ev.keyCode == 13) checkFinished(); } var self = this; var content = $("<div class='editowner' title='Change Review Owner'><p>Please enter the user name(s) of the new review owner(s):<br><input></p></div>"); content.find("input").val(owners.map(function (user) { return user.name; }).join(", ")); content.find("input").keypress(handleKeypress); var buttons = { Save: function () { checkFinished(); }, Cancel: function () { $(content).dialog("close"); } }; content.dialog({ width: 400, modal: true, buttons: buttons }); } $(document).ready(function () { $("td.approve input").change(function (ev) { var target = $(ev.currentTarget); if (target.parents("td").hasClass("everything")) { markAllFiles(ev.currentTarget.checked ? "reviewed" : "pending"); target.parents("table").find("td.approve.file input").each(function (index, element) { element.checked = ev.currentTarget.checked; }); } else { var row = target.parents("tr"); var file_id = parseInt(row.attr("critic-file-id")); var parent_index = target.attr("critic-parent-index"); if (parent_index) parent_index = parseInt(parent_index); else parent_index = null; markFile(ev.currentTarget.checked ? "reviewed" : "pending", file_id, parent_index); } }); if (typeof updateCheckInterval != "undefined" && updateCheckInterval) { function processResult(result) { if (result.stale) { var content = $("<div title='Review Updated'>" + "<p>The review has been updated since you loaded " + "this page. Would you like to reload?</p>" + "</div>"); content.dialog({ modal: true, buttons: { "Reload": function () { content.dialog("close"); location.reload(); }, "Do Nothing": function () { content.dialog("close"); }} }); return; } else { updateCheckInterval = result.interval; } if (updateCheckInterval) setTimeout(checkSerial, updateCheckInterval * 1000); } function checkSerial() { var operation = new Operation({ action: "check serial", url: "checkserial", data: { "review_id": review.id, "serial": review.serial }, callback: processResult }); operation.execute(); } setTimeout(checkSerial, updateCheckInterval * 1000); } }); ================================================ FILE: src/resources/reviewfilters.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function addReviewFiltersDialog(options) { var title, message; if (options.filter_type == "reviewer") { title = "Add Reviewer"; message = "<p>Make specified users reviewers of given path during this review.</p>"; } else { title = "Add Watcher"; message = "<p>Make specified users watchers of given path during this review. " + "If a user would normally be a reviewer of the path, he/she is " + "reduced to just a watcher.</p>"; } var content = $("<div class='comment' title='" + title + "'>" + message + "<p>" + "<b>User name(s):</b><br>" + "<input class='name sourcefont' style='width: 100%'><br>" + "<b>Directory:</b><br>" + "<input class='path sourcefont' style='width: 100%'" + " placeholder='Leave empty for \"everything\"'>" + "</p>" + "</div>"); function finish() { var names = content.find("input.name").val().trim().split(/[, ]+/); var path = content.find("input.path").val().trim(); /* Filter out empty names. */ names = names.filter(function (name) { return name; }); if (!path) path = "/"; if (names.length) return options.callback(names, path); else return false; } function checkFinished() { if (finish()) { $(content).dialog("close"); if (options.reload_page) location.reload(); } } function handleKeypress(ev) { if (ev.keyCode == 13) checkFinished(); } content.find("input").keypress(handleKeypress); var buttons = { "Add Filter": function () { checkFinished(); }, "Cancel": function () { content.dialog("close"); } }; content.dialog({ width: 600, height: "auto", modal: true, buttons: buttons }); function enableAutoCompletion(result) { content.find("input.name").autocomplete({ source: AutoCompleteUsers(result.users) }); content.find("input.path").autocomplete({ source: AutoCompletePath(result.paths), html: true }); } var data = { values: [ "users", "paths" ] }; if (window.review) /* Called from review front-page. */ data.review_id = review.id; else /* Called from "Create Review" page. */ data.changeset_ids = review_data.changeset_ids; var operation = new Operation({ action: "get auto-complete data", url: "getautocompletedata", data: data, callback: enableAutoCompletion }); operation.execute(); } ================================================ FILE: src/resources/ruler.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2013 Rafał Chłodnicki, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ $(document).ready(function () { var spaces = (new Array(rulerColumn + 1)).join(" "); var space = $('<span class="sourcefont" style="white-space: pre">' + spaces + '</span>'); $("body").append(space); var space_width = space.width(); space.remove(); $("head").append('<style>td.line { background-image: url(/static-resource/ruler.png);' + 'background-position: ' + space_width + 'px 0;' + 'background-repeat: repeat-y; }</style>'); }); ================================================ FILE: src/resources/search.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ .search, .callout.quicksearch { -ms-flex: 1; -webkit-flex: 1; flex: 1; } .callout.quicksearch { -ms-flex-item-align: start; -webkit-align-self: flex-start; align-self: flex-start; max-width: 19rem; margin: 1.5rem .5rem 0 1.5rem; } .search { max-width: 50em; margin: 1.5rem .5rem; } .search > * { margin-bottom: 1.5rem; } .search > *:last-child { margin-bottom: 0; } .input-options { float: right; font-size: .8em; } .search input[type=text], .search select { margin-top: .25rem; width: 100%; } .search-query { width: 100%; } .search-freetext, .search-repository { margin-right: 1.5rem; } .search-repository, .search-branch { -ms-flex: 1; -webkit-flex: 1; flex: 1; } .search-freetext { -ms-flex: 5; -webkit-flex: 5; flex: 5; } .search-state { -ms-flex: 2; -webkit-flex: 2; flex: 2; } .search-buttons > * { margin-right: 0.5rem } .search-result { margin: 0 20% } .search-result h2 { font-weight: normal; font-size: 1.4rem; margin: 1rem 0 0.5rem 0 } .search-result tr.review td { background-color: inherit !important } ================================================ FILE: src/resources/search.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; $(function () { var searchForm = document.forms.search; function doSearch(query) { function handleResult(table, result) { $(".search-result") .css({ display: "block" }) .children(".callout").empty().append(table); history.replaceState(null, null, "/search?" + result.query_string); } quickSearch(query, searchForm.query ? handleResult : null); } if (!searchForm.query) { // Disable the freetext input if none of its checkboxes are checked [ searchForm.freetextSummary, searchForm.freetextDescription ].forEach( function (checkbox, idx, checkboxes) { checkbox.addEventListener('click', function () { var someChecked = checkboxes.some( function (cbox) { return cbox.checked; } ); if (searchForm.freetext.disabled === someChecked) { searchForm.freetext.disabled = !someChecked; } }); }); } searchForm.addEventListener('submit', function (evt) { evt.preventDefault(); var form = this; function phrases(value) { return value.match(/"[^"]+"|'[^']+'|\S+/g).map( function (phrase) { var match = /^'([^']+)'|"([^"]+)"$/.exec(phrase); if (match) return match[1] || match[2] || ""; else return phrase; }); } function tokens(value) { return value.split(/[\s,]+/g).map( function (item) { return item.trim(); }).filter( Boolean ); } function with_keyword(keyword) { return function (term) { return term ? keyword + ":'" + term + "'" : ""; }; } var query; if (form.query) { query = form.query.value.trim(); } else { var terms = []; var freetext = form.freetext.value.trim(); if (freetext) { var textphrases = phrases(freetext); if (form.freetextSummary.checked && form.freetextDescription.checked) { terms.push.apply(terms, textphrases.map(with_keyword("text"))); } else if (form.freetextSummary.checked) { terms.push.apply(terms, textphrases.map(with_keyword("summary"))); } else if (form.freetextDescription.checked) { terms.push.apply(terms, textphrases.map(with_keyword("description"))); } } var users = tokens(form.user.value.trim()), usersKey; if (form.userOwner.checked && form.userReviewer.checked) { usersKey = "owner-or-reviewer"; } else if (form.userOwner.checked) { usersKey = "owner"; } else if (form.userReviewer.checked) { usersKey = "reviewer"; } else { usersKey = "user"; } terms.push.apply(terms, users.map(with_keyword(usersKey))); var repository = form.repository.value; if (repository) { terms.push(with_keyword("repository")(repository)); } var branch = form.branch.value.trim(); if (branch) { terms.push(with_keyword("branch")(branch)); } var paths = tokens(form.path.value.trim()); terms.push.apply(terms, paths.map(with_keyword("path"))); var state = form.state.value; if (state) { terms.push(with_keyword("state")(state)); } query = terms.join(" "); } doSearch(query); }); $(document.forms.search.user) .autocomplete({ source: AutoCompleteUsers(users) }); $("select[name='state']").chosen(); $(".repository-select") .chosen({ inherit_select_classes: true, allow_single_deselect: true, collapsed_width: "100%", expanded_width: "40em" }); if (searchForm.query && searchForm.query.value.trim()) { doSearch(searchForm.query.value.trim()); } }); ================================================ FILE: src/resources/services.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.services { margin-top: 1rem; font-family: monospace } tr.service:hover { background-color: #eed; } tr.service > td.commands a { visibility: hidden; text-decoration: none } tr.service:hover > td.commands a { visibility: visible } table.services .pid, table.services .rss, table.services .cpu, table.services .uptime { text-align: right; } div.servicelog > pre { border: 1px solid #222; padding: 0.5em; background: white; overflow: auto } ================================================ FILE: src/resources/services.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ "use strict"; function restartService(service_name) { var operation = new Operation({ action: "restart service", url: "restartservice", data: { service_name: service_name }, wait: "Restarting service..." }); if (operation.execute()) location.reload(); } function getServiceLog(service_name) { var content; var operation = new Operation({ action: "fetch service log", url: "getservicelog", data: { service_name: service_name }, wait: "Fetching service log..." }); var result = operation.execute(); if (result) { content = $("<div class='servicelog flex' title='Service Log'>" + "<pre class=flexible></pre></div>"); content.find("pre").text(result.lines.join("\n")); content.dialog({ width: Math.min($(document).width() - 100, 1024), buttons: { Close: function () { content.dialog("close"); }} }); } } ================================================ FILE: src/resources/showbatch.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ table.files { margin: 0; } table.files tbody { font-family: monospace; } table.files .path { text-align: left; padding-left: 0.25em; padding-right: 1em; white-space: pre; } table.files .lines { text-align: right; } div.main table.comments td.h2 { padding-top: 2em; padding-left: 2em; } div.main table.comments td.h3 { padding-top: 1.5em; padding-left: 1.5em; } div.main table.comments td.h3 h3 { font-size: 0.9rem; border-bottom: 1px solid #cca; display: inline-block; margin-top: 0; margin-bottom: 0.5em } div.main table.comments td.h3 h3 a { font-size: 60%; margin-left: 1em } ================================================ FILE: src/resources/showbranch.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main { margin-bottom: 1em } ================================================ FILE: src/resources/showcomment.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ keyboardShortcutHandlers.push(function (key) { for (var chain_id = null in commentChainById) break; if (chain_id === null) return false; if (key == "m".charCodeAt(0)) contextLines = Math.ceil(contextLines * 1.5) || 1; else if (key == "l".charCodeAt(0)) contextLines = Math.floor(contextLines / 1.5); else return false; location.replace("/showcomment?chain=" + chain_id + "&context=" + contextLines); return true; }); ================================================ FILE: src/resources/showfile.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.file { table-layout: fixed } div.main table.file td.h1 { border-bottom: none } div.main table td.line { padding-left: 0 } div.main table td.h1 h1 > a { text-decoration: none; color: #222 } div.main table td.h1 h1 > a.root { font-size: 80% } div.main table td.h1 h1 > a:hover { text-decoration: underline; color: #222 } div.main table td.h1 h1 span { padding-left: 3px; padding-right: 3px; } table.file col.edge { width: 1em } table.file col.linenr { width: 4em } table.file col.middle { width: 1em } table.file col.line { width: 50% } table.file > tbody.spacer > tr > td { background-color: #eee; text-align: center; border-left: 1px solid #888; border-right: 1px solid #888 } table.file > tbody.spacer.bottom > tr > td { border-bottom: 1px solid #888 } table.file > tbody.spacer.top > tr > td { border-top: 1px solid #888 } table.file > tbody.spacer > tr > td { padding: 0 } table.file > tbody.spacer > tr + tr.spacer { display: none } table.file > tbody.spacer > tr.expand > td, table.file > tbody.spacer > tr.context > td { padding-top: 3px; padding-bottom: 3px } table.file > tbody.spacer > tr.expand > td > select { background-color: #eee; border: 0 } table.file > tbody.lines > tr > td.edge, table.file > tbody.lines > tr > td.middle, table.file > tbody.lines > tr > td.linenr { background-color: #eee } table.file > tbody.lines > tr > td.edge:first-child { border-left: 1px solid #888 } table.file > tbody.lines > tr > td.edge:last-child { border-right: 1px solid #888 } table.file > tbody.lines > tr > td.linenr.old { text-align: right; padding-right: 3px } table.file > tbody.lines > tr > td.linenr.new { padding-left: 3px } table.file > tbody.lines > tr > td.line { background-color: white; font-family: monospace; border-left: 1px solid #888; border-right: 1px solid #888; white-space: pre-wrap; overflow: hidden; padding-left: 3px } table.file > tbody.lines > tr > td.comment { white-space: pre-wrap; border-left: 1px solid #888; border-right: 1px solid #888 } table.file > tbody.lines > tr:first-child > td.line { border-top: 1px solid #888; padding-top: 3px } table.file > tbody.lines > tr:last-child > td.line { border-bottom: 1px solid #888; padding-bottom: 3px } table.file > tbody.lines > tr.context:hover > td.line, table.file > tbody.lines > tr.context.markold > td.line.old, table.file > tbody.lines > tr.context.marknew > td.line.new { background-color: #eee } table.file > tbody.lines > tr > td.line.highlight { background-color: #dfd } table.file > tbody.lines > tr:hover > td.line.highlight { background-color: #cec } td.line.highlight b.t { color: #aca } table.file > tbody.lines > tr.commented > td.line.commented { background-color: #eef } table.file > tbody.lines > tr.commented:hover > td.line.commented { background-color: #dde } table.file > tbody.lines > tr.commenthead > td.commenthead { font-family: sans-serif; background-color: #ccf; font-weight: bold; border-top: 1px solid #99b; border-bottom: 1px solid #99b; text-align: center } table.file > tbody.lines > tr.commentchain > td.commentchain, table.file > tbody.lines > tr.commentchain > td.comment { font-family: sans-serif; background-color: #ccf; border-bottom: 1px solid #99b; padding-left: 3px; padding-right: 3px; } table.file > tbody.lines > tr.commentchain > td.comment > div { padding: 3px } table.file > tbody.lines > tr.commentchain > td.comment > div.comment.draft > span.time:after { content: " (draft)"; font-weight: bold; color: #f00 } table.file > tbody.lines > tr.commentchain > td.comment > div.warning { font-weight: bold; color: #f00 } table.file > tbody.lines > tr.commentchain > td > div > span.type { font-weight: bold } table.file > tbody.lines > tr.commentchain > td > div > span.author { font-weight: bold } table.file > tbody.lines > tr.commentchain > td > div > span.draft { font-weight: bold; color: #f00 } table.file > tbody.lines > tr.commentchain > td.comment > div > div.text { font-family: monospace; background-color: #ddf; border: 1px solid #99b; margin-top: 3px; padding: 5px; white-space: pre-wrap } table.file > tbody.lines > tr.commentchain > td.comment > div > div.text > textarea { background-color: #ddf; border: 0; width: 100% } table.file > tbody.lines > tr.commentchain > td.comment > div.buttons { text-align: right; padding-top: 3px } table.file > tbody.content > tr > td { background-color: #eee } table.file > tbody.deleted > tr > td { background-color: #eee; text-align: center } table.file > tbody.deleted > tr > td > h2 { margin-top: 0 } table.file > tbody.binary > tr > td { background-color: #eee; text-align: center } table.file > tbody.binary > tr > td > h2 { margin-top: 0 } table.commit-info { margin-top: 1em; width: auto !important } table.commit-info span.branches, table.commit-info span.tags { margin-left: 1em } table.commit-info span.branch, table.commit-info span.tag { margin-right: 0.2em } pre.commit-msg { padding: 0.5em 1em } b.tab:before { content: "\2192" } b.tab { color: #ccc } tr.modified b.tab { color: #cca } tr.replaced > td.old b.tab, tr.deleted > td.old b.tab { color: #caa } tr.replaced > td.new b.tab, tr.inserted > td.new b.tab { color: #aca } input.approve { background-color: #eec; font-weight: bold } body { font-size: 12px; font-family: sans-serif } div.playground { white-space: pre; font-family: monospace } div.parent { display: none } div.parent.show { display: block } div.parent > h1 { padding-left: 1em; font-size: 150%; font-weight: bold } div.detectmoves select { width: 100%; background-color: #fff; padding: 3px } table.commit-info tr.commit-msg > td { padding-top: 1.5em; padding-bottom: 1.5em } table.commit-msg tr.line:hover { background-color: #eee } table.commit-msg tr.line td.edge { padding: 0 1em 0 1em } table.commit-msg tr.line td.line { white-space: pre; font-family: monospace; padding: 0 } table.commit-msg tr.line.first td.line { font-weight: bold } ================================================ FILE: src/resources/showfile.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* -*- mode: text; indent-tabs-mode: nil -*- */ function highlightLines(markers) { var start = /f\d+n(\d+)/.exec(markers.firstLine.id)[1]; var end = /f\d+n(\d+)/.exec(markers.lastLine.id)[1]; var href = location.href; var match = /^(.*)&line=\d+(?:-\d+)?(.*)$/.exec(href); if (match) href = match[1] + match[2]; if (start == end) location.href = href + "&line=" + start; else location.href = href + "&line=" + start + "-" + end; markers.remove(); currentMarkers = null; } var defaultHandleMarkedLines = handleMarkedLines; handleMarkedLines = function (markers) { if (typeof review == "undefined") highlightLines(markers); else { CommentChain.extraButtons = { "Link to Lines": function () { highlightLines(currentMarkers); return true; } }; defaultHandleMarkedLines(markers); } }; $(document).ready(function () { $("td.line").mousedown(startCommentMarking); $("td.line").mouseover(continueCommentMarking); $("td.line").mouseup(endCommentMarking); if (typeof firstSelectedLine == "number") { var markers = new CommentMarkers; markers.setLines($("td.first-selected"), $("td.last-selected")); markers.setType("issue", "open"); } }); ================================================ FILE: src/resources/showreview.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.basic td.h1 h1 a.edit, div.main table.progress td.h1 h1 a { font-size: 12px } div.main table.basic td.h1 h1 a.edit { padding-left: 1em } div.main table.basic tr.line > td { padding-top: 0.5em } div.main table.basic tr.line > td div.buttons { float: right } div.main table.basic tr.line > td div.buttons button { margin-left: 3px } div.main table.basic td.heading { font-family: serif; font-weight: bold; text-align: right; vertical-align: top } div.main table.basic td.value { font-family: monospace; white-space: pre-wrap; vertical-align: top } div.main table.basic code.branch.archived { text-decoration: line-through } div.main table.basic span.user.retired > span.name { text-decoration: line-through } div.main table.basic span.user > span.status { color: red } div.main table.basic table.reviewfilters { white-space: normal; margin-top: 1em } div.main table.basic table.reviewfilters tbody.hidden, div.main table.basic table.reviewfilters tfoot.hidden { display: none } div.main table.basic table.reviewfilters tfoot tr td { font-style: italic } div.main table.basic table.reviewfilters tr.h1 th.h1 { font-family: serif; font-weight: bold; border-bottom: 1px solid #222 } div.main table.basic table.reviewfilters td { vertical-align: top } div.main table.basic table.reviewfilters td.username { font-weight: bold; text-align: right } div.main table.basic table.reviewfilters td.reviews { font-family: sans-serif; text-align: center; padding-left: 0.5em; padding-right: 0.5em } div.main table.basic table.reviewfilters td.path { white-space: pre } div.main table.basic table.reviewfilters tr.filter td.remove { text-align: right; padding-left: 0.5em } div.main table.basic table.reviewfilters tr.filter td.remove a { visibility: hidden } div.main table.basic table.reviewfilters tr.filter:hover td.remove a { visibility: visible; color: red; text-decoration: none } div.main table.basic table.reviewfilters tr.filter:hover td.remove a:hover { text-decoration: underline } div.main table.basic td.value div.text { white-space: pre-wrap } div.main table.basic td.value input.value { font-family: monospace; width: 32em } div.main table.basic td.right { text-align: right } div.main table.basic td.help { font-style: italic; text-align: right; border-bottom: 1px solid #cca } div.main table.basic td.value span.mode { font-style: italic } div.main table.progress { text-align: center; } div.main table.progress .h1 h1 { text-align: left; } div.main table.progress td.percent h1 { font-size: 100px; margin: 10px } div.main table.progress td.percent h1 span.comments { font-size: 30%; padding-left: 1em; padding-right: 1em } div.main table.progress td.percent h1 div { margin-top: -60px; } div.main table.progress td.percent h1 div span.remark { font-size: 12px; padding-left: 1em; padding-right: 1em } div.main table.progress td.stuck { padding-bottom: 10px } div.main table.progress td.stuck a { font-weight: bold; color: red; text-decoration: none } div.main table.progress td.stragglers { font-weight: bold; text-decoration: underline; padding-bottom: 3px } div.main table.progress td.straggler { font-family: monospace } div.main table.progress td.straggler.absent:after { content: " (absent)"; color: red } div.main table.progress td.pinging { font-style: italic; padding-top: 5px } div.main table.progress td.pinging span { border-top: 1px solid #cca } div.main table.shared tr.h1 td.buttons { text-align: right; border-bottom: 1px solid #cca } div.main table.shared tr.reviewers td { padding-top: 0.5em; vertical-align: top } div.main table.shared tr.reviewers td.reviewers { font-weight: bold; text-align: right; width: 30% } div.main table.shared tr.reviewers td.files { font-family: monospace; width: 40% } div.main table.shared tr.reviewers td.files span.file { white-space: pre } div.main table.shared tr.reviewers td.files { width: 30% } div.main table.log td.approval, div.main table.log td.total { text-align: right; white-space: pre } div.main table.log td.approval.user span, div.main table.log td.total.user span { outline: 3px dotted #f66; padding-left: 3px; padding-right: 3px } div.main table.log td.approval span, div.main table.log td.total span { font-family: monospace } div.main table.log td.approval.user span, div.main table.log td.total.user span { font-weight: bold } div.main table.log td.approval.approved span { background-color: #dfd } div.main table.log td.approval.pending span { background-color: #fdd } div.main table.comments > tr > td, div.main table.batches > tr > td { padding-left: 1em } div.main table.comments td.h1, div.main table.batches td.h1 { border-bottom: 1px solid #cca } div.main table.comments td.h1 h1, div.main table.batches td.h1 h1 { margin-top: 0; margin-bottom: 0.5em } div.main table.comments td.h1 h1 a { font-size: 50%; margin-left: 1em } div.main table.comments td.h2 { padding-top: 1em; padding-left: 2em; border-bottom: 1px solid #cca } div.main table.comments td.h2 h2 { margin-top: 0; margin-bottom: 0.5em } div.main table.comments td.h2 a { font-size: 50%; margin-left: 1em } div.main table.comments tr.comment td, div.main table.batches tr.batch td { padding-top: 3px; padding-bottom: 3px } div.main table.comments tr.comment:hover, div.main table.batches tr.batch:hover { background: #eec } div.main table.comments td.author, div.main table.batches td.author { font-weight: bold; white-space: pre } div.main table.comments td.title, div.main table.batches td.title { font-family: monospace; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; width: 70% } div.main table.comments td.when, div.main table.batches td.when { text-align: right; white-space: pre; padding-right: 1em } div.main table.comments td.buttons { border-top: 1px solid #cca; padding-top: 15px; text-align: right } div.main table.batches td.title span.numbers { color: black; float: right; font-family: sans-serif; font-size: 90% } div.summary-tooltip > div.header { font-weight: bold; text-decoration: underline } div.summary-tooltip > div.reviewer { font-family: monospace } div.summary-tooltip > div.reviewer > span.absent { color: red } div.text input { margin-top: 1em; padding: 3px; width: 95% } dt { font-weight: bold } dd { padding-top: 0.5em; padding-bottom: 1em } dd .notsupported { font-weight: bold; color: red; margin-bottom: 0.5em } div.specifyupstream input[name='sha1'] { font-family: monospace; width: 100% } div.specifyupstream select { width: 100% } div.removefilter div { padding-left: 1em } div.removefilter .user { font-weight: bold } div.removefilter .path { font-family: monospace } div.main > div.error { width: 50%; margin-left: auto; margin-right: auto; border: 10px solid red; border-radius: 20px; padding: 0.5em 2em 2em 2em; font-weight: bold } div.main > div.error > h1 { text-decoration: underline } code.inset > a { color: #222; text-decoration: none } code.inset > a:hover { color: blue; text-decoration: underline } p.tracking { font-style: italic; padding-left: 2em } p.tracking.disabled { text-decoration: line-through; color: red } span.lastupdate { font-style: italic; padding-left: 1em } div.enabletracking input { font-family: monospace; margin-top: 0.5em; width: 100% } div.operation-performed h1 { text-align: center; font-size: 120% } ================================================ FILE: src/resources/showreview.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ function archiveBranch() { function finished(result) { if (result) { var done = $("<div class=operation-performed title=Status>" + "<h1>Branch archived</h1>" + "</div>"); done.dialog({ modal: true, buttons: { OK: function () { done.dialog("close"); location.reload(); } } }); } } var operation = new Operation({ action: "archive branch", url: "archivebranch", data: { review_id: review.id }, callback: finished }); operation.execute(); } function resurrectBranch() { function finish(result) { if (result) { var scheduled_archival_note = ""; if (result.delay) { var delay = String(result.delay) + " day"; if (result.delay > 1) delay += "s"; scheduled_archival_note = ( "<p>Note that since the review is not open, its branch was " + "scheduled to be archived again in " + delay + ".</p>"); } var done = $("<div class=operation-performed title=Status>" + "<h1>Branch resurrected</h1>" + scheduled_archival_note + "</div>"); done.dialog({ width: scheduled_archival_note ? 600 : void 0, modal: true, buttons: { OK: function () { done.dialog("close"); location.reload(); } } }); } } var operation = new Operation({ action: "resurrect branch", url: "resurrectbranch", data: { review_id: review.id }, callback: finish }); operation.execute(); } function shortDate(date) { function pad(number, width) { var result = String(number); while (result.length < width) result = "0" + result; return result; } return (pad(date.getFullYear(), 4) + "-" + pad(date.getMonth() + 1, 2) + "-" + pad(date.getDate(), 2) + " " + pad(date.getHours(), 2) + ":" + pad(date.getMinutes(), 2)); } function triggerUpdate(branch_id) { var operation = new Operation({ action: "trigger update", url: "triggertrackedbranchupdate", data: { branch_id: branch_id }}); if (operation.execute()) { var done = $("<div title='Status' style='text-align: center; padding-top: 2em'>Branch update triggered.</div>"); done.dialog({ modal: true, buttons: { OK: function () { done.dialog("close"); }}}); } } function enableTracking(branch_id, remote, current_remote_name) { function finish() { var operation = new Operation({ action: "enable tracking", url: "enabletrackedbranch", data: { branch_id: branch_id, new_remote_name: remote_name.val() }}); return Boolean(operation.execute()); } var self = this; var content = $("<div class='enabletracking' title='Enable Tracking'><p><b>Remote branch name:</b><br><input></p></div>"); var remote_name = content.find("input"); remote_name .val(current_remote_name) .autocomplete({ source: AutoCompleteRef(remote, "refs/heads/"), html: true }); var buttons = { "Enable Tracking": function () { if (finish()) { content.dialog("close"); location.reload(); } }, "Cancel": function () { content.dialog("close"); } }; content.dialog({ width: 400, buttons: buttons }); } function disableTracking(branch_id) { var operation = new Operation({ action: "disable tracking", url: "disabletrackedbranch", data: { branch_id: branch_id }}); if (operation.execute()) location.reload(); } function watchReview() { var operation = new Operation({ "action": "watch review", "url": "watchreview", "data": { "review_id": review.id, "subject_name": user.name }}); if (operation.execute()) location.reload(); } function unwatchReview() { var operation = new Operation({ "action": "unwatch review", "url": "unwatchreview", "data": { "review_id": review.id, "subject_name": user.name }}); if (operation.execute()) location.reload(); } function filterPartialChanges() { var content = $("<div title='Filter Partial Changes'>Please select the desired range of commits below using click-and-drag.</div>"); function cancel() { content.dialog("close"); overrideShowSquashedDiff = null; } content.dialog({ width: 800, position: "top", buttons: { Cancel: cancel }, resizable: false }); overrideShowSquashedDiff = function (from_sha1, to_sha1) { overrideShowSquashedDiff = null; content.dialog("close"); location.href = "/filterchanges?review=" + review.id + "&first=" + from_sha1 + "&last=" + to_sha1; }; } function updateFilters(filter_type) { function addFilters(names, path) { var operation = new Operation({ action: "add review filters", url: "addreviewfilters", data: { review_id: review.id, filters: [{ type: filter_type, user_names: names, paths: [path] }] } }); return operation.execute() != null; } addReviewFiltersDialog({ filter_type: filter_type, callback: addFilters, reload_page: true }); } function addReviewer() { updateFilters("reviewer"); } function addWatcher() { updateFilters("watcher"); } function removeReviewFilter(filter_id, filter_user, filter_type, filter_path, confirm) { function finish() { var operation = new Operation({ action: "remove review filter", url: "removereviewfilter", data: { filter_id: filter_id }}); if (operation.execute()) { location.reload(); return true; } else return false; } if (confirm) { var content = $("<div class='removefilter' title='Confirm'><p>Please confirm that you mean to remove the filter that makes</p><div class=user>" + htmlify(filter_user) + "</div><p>a " + filter_type + " of</p><div class=path>" + filter_path + "</div><p>An email will be sent the user about the change and its effect on assignments.</p></div>"); content.dialog({ width: 400, buttons: { "Remove the filter": function () { if (finish()) content.dialog("close"); }, "Do nothing": function () { content.dialog("close"); } }, modal: true }); } else finish(); } function applyFilters(what) { var query_url, apply_url; if (what == "global") { query_url = "queryglobalfilters"; apply_url = "applyglobalfilters"; } else { query_url = "queryparentfilters"; apply_url = "applyparentfilters"; } function openDialog(result) { if (!result) return; function proceed() { function finish(result) { if (result) { dialog.dialog("close"); location.reload(); } } var operation = new Operation({ action: "update review filters", url: apply_url, data: { review_id: review.id }, wait: "Applying filters ...", callback: finish }); operation.execute(); } function cancel() { dialog.dialog("close"); } var html = "<div title='Apply " + what + " filters'>"; var buttons = {}; if (result.reviewers.length || result.watchers.length) { html += ("<p>By applying " + what + " filters to this review, the " + "following new reviewers and watchers would be added:</p>"); if (result.reviewers.length) { html += "<p>New reviewers:</p><ul>"; result.reviewers.forEach( function (user) { html += "<li>" + htmlify(user.displayName + " <" + user.email + ">") + "</li>"; }); html += "</ul>"; } if (result.watchers.length) { html += "<p>New watchers:</p><ul>"; result.watchers.forEach( function (user) { html += "<li>" + htmlify(user.displayName + " <" + user.email + ">") + "</li>"; }); html += "</ul>"; } buttons["Apply Filters"] = proceed; } else { html += ("<p>Applying " + what + " filters to this review would " + "not cause any immediate changes. It may however affect " + "what happens when adding additional changes to the review " + "in the future.</p>"); } buttons["Cancel"] = cancel; html += "</div>"; var dialog = $(html); dialog.dialog({ width: 400, modal: true, buttons: buttons }); } var operation = new Operation({ action: "query review filters", url: query_url, data: { review_id: review.id }, wait: "Listing new reviewers and watchers ...", callback: openDialog }); operation.execute(); } function toggleReviewFilters(type, button) { var table = $("table.reviewfilters." + type); var tbody = table.find("tbody"); var tfoot = table.find("tfoot"); if (tbody.hasClass("hidden")) { tbody.removeClass("hidden"); tfoot.addClass("hidden"); button.button("option", "label", "Hide Custom Filters"); } else { tbody.addClass("hidden"); tfoot.removeClass("hidden"); button.button("option", "label", "Show Custom Filters"); } } function prepareRebase() { var rebase_type_dialog; function finish() { var inplace = rebase_type_dialog.find("input#inplace:checked").size() != 0; if (inplace) { var operation = new Operation({ action: "prepare rebase", url: "preparerebase", data: { review_id: review.id }}); if (operation.execute()) { rebase_type_dialog.dialog("close"); var finished = $("<div title='Rebase Prepared!'>" + "<p>" + "You may now push the rebased branch, using \"git push -f\". " + "Any attempt to push changes to this review by other users will " + "be rejected until you've completed the rebase, or aborted it." + "</p>" + "<p>" + "<b>Note:</b> Remember that one commit on the rebased branch must " + "reference a tree that is identical to the one referenced by the " + "current head of the review branch. If this is not the case, your " + "push will be rejected." + "</p>" + "</div>"); finished.dialog({ width: 400, modal: true, close: function() { location.reload(); }, buttons: { Close: function () { finished.dialog("close"); }} }); } } else { rebase_type_dialog.dialog("close"); var select_upstream_dialog = $("<div class='specifyupstream' title='Specify New Upstream Commit'>" + "<p>" + "Unless you squashed the whole branch into a single commit, please specify " + "the new upstream commit onto which the review branch is rebased, either by " + "entering a SHA-1 sum or by selecting one of the suggested tags:" + "</p>" + "<p>" + "<label><input name='single' type='checkbox'>Branch squashed into a single commit.</label>" + "</p>" + "<p>" + "<b>SHA-1:</b><input name='sha1' size=40 spellcheck='false'>" + "</p>" + "<p>" + "<b>Tag:</b>" + "<select disabled>" + "<option value='none'>Fetching suggestions...</option>" + "</select>" + "</p>" + "</div>"); var select_upstream_dialog_closed = false; function populateSuggestedUpstreams(result) { if (result) { var upstreams = result.upstreams.map( function (tag) { return "<option value='" + htmlify(tag) + "'>" + htmlify(tag) + "</option>"; }); var select = select_upstream_dialog.find("select").get(0); if (upstreams.length != 0) { select.innerHTML = "<option value='none'>Found " + upstreams.length + " likely upstreams:</option>" + upstreams.join(""); select.disabled = single.checked; } else select.innerHTML = "<option value='none'>(No likely upstreams found.)</option>"; } } var fetch_upstreams = new Operation({ action: "fetch suggested upstream commits", url: "suggestupstreams", data: { review_id: review.id }, callback: populateSuggestedUpstreams }); fetch_upstreams.execute(); var single = select_upstream_dialog.find("input").get(0); var sha1 = select_upstream_dialog.find("input").get(1); var tag = select_upstream_dialog.find("select").get(0); single.onclick = function () { sha1.disabled = single.checked; tag.disabled = single.checked || tag.options.length == 1; }; function finishMove() { var upstream; if (single.checked) upstream = "0000000000000000000000000000000000000000"; else if (tag.value != "none" && sha1.value != "") alert("Ambiguous input! Please leave either SHA-1 or tag empty."); else if (tag.value == "none" && !/^[0-9a-f]{40}$/i.test(sha1.value)) alert("Invalid input! Please specify a full 40-character SHA-1 sum."); else if (sha1.value != "") upstream = sha1.value; else upstream = tag.value; if (typeof upstream == "string") { var operation = new Operation({ action: "prepare rebase", url: "preparerebase", data: { review_id: review.id, new_upstream: upstream }}); if (operation.execute()) { select_upstream_dialog.dialog("close"); var finished = $("<div title='Rebase Prepared!'>" + "<p>" + "You may now push the rebased branch, using \"git push -f\". " + "Any attempt to push changes to this review by other users will " + "be rejected until you've completed the rebase, or aborted it." + "</p>" + "<p>" + "<b>Important:</b> Remember not to push any new changes to the " + "review with this push; such changes will be very difficult to " + "see or review." + "</p>" + "</div>"); finished.dialog({ width: 400, modal: true, close: function() { location.reload(); }, buttons: { Close: function () { finished.dialog("close"); }} }); } } } select_upstream_dialog.dialog({ width: 400, modal: true, buttons: { Continue: function () { finishMove(); }, Cancel: function () { select_upstream_dialog.dialog("close"); }}, close: function () { select_upstream_dialog_closed = true; } }); } return true; } function start(supports_move) { rebase_type_dialog = $("<div title='Prepare Rebase'>" + "<p>Please select rebase type:</p>" + "<dl>" + "<dt><label><input id='inplace' type='radio' name='rebasetype' checked>History Rewrite / In-place</label></dt>" + "<dd>Rebase on-top of the same upstream commit that only changes the history on the branch.</dd>" + "<dt><label><input id='move' type='radio' name='rebasetype'" + (supports_move ? "" : " disabled") + ">New Upstream / Move</label></dt>" + "<dd>" + (supports_move ? "" : "<div class='notsupported'>[Not supported for this review!]</div>") + "Rebase on-top of a different upstream commit. Can also change the history on the branch in the process.</dd>" + "</dl>" + "</div>"); rebase_type_dialog.dialog({ width: 400, modal: true, buttons: { Continue: function () { finish(); }, Cancel: function () { rebase_type_dialog.dialog("close"); }} }); } var operation = new Operation({ action: "check rebase possibility", url: "checkrebase", data: { review_id: review.id }}); var result = operation.execute(); if (result) start(result.available == "both"); } function cancelRebase() { var operation = new Operation({ action: "cancel rebase", url: "cancelrebase", data: { review_id: review.id }}); if (operation.execute()) location.reload(); } function revertRebase(rebase_id) { var confirm_dialog = $("<div title=Please Confirm'><p>Are you sure you want to revert the rebase?</p></div>"); function finish() { var operation = new Operation({ action: "revert rebase", url: "revertrebase", data: { review_id: review.id, rebase_id: rebase_id }}); if (operation.execute()) { confirm_dialog.dialog("close"); location.reload(); } } confirm_dialog.dialog({ width: 400, modal: true, buttons: { "Revert Rebase": function () { finish(); }, "Do Nothing": function () { confirm_dialog.dialog("close"); }} }); } function excludeRecipient(user_id) { var operation = new Operation({ action: "exclude recipient", url: "addrecipientfilter", data: { review_id: review.id, user_id: user_id, include: false }}); if (operation.execute()) location.reload(); } function includeRecipient(user_id) { var operation = new Operation({ action: "include recipient", url: "addrecipientfilter", data: { review_id: review.id, user_id: user_id, include: true }}); if (operation.execute()) location.reload(); } $(document).ready(function () { $("button.archive").click(archiveBranch); $("button.resurrect").click(resurrectBranch); $("tr.commit td.summary").each(function (index, element) { var users = $(element).attr("critic-reviewers"); if (users) { users = users.split(","); $(element).find("a.commit").tooltip({ items: 'a.commit', content: function () { var html = "<div class='summary-tooltip'><div class='header'>Needs review from</div>"; for (var index = 0; index < users.length; ++index) { var match = /([^:]+):(current|absent|retired)/.exec(users[index]); var fullname = match[1]; var status = match[2]; if (status != "retired") { html += "<div class='reviewer'>" + htmlify(fullname); if (status == "absent") html += "<span class='absent'> (absent)</span>"; html += "</div>"; } } return $(html + "</div>"); }, track: true, hide: false }); } }); $("td.straggler.no-email").each(function (index, element) { $(element).tooltip({ items: 'td.straggler.no-email', content: function () { return $("<div class='no-email-tooltip'><strong>This user has not enabled the <u>email.activated</u> preference!</strong></div>"); }, track: true, hide: false }); }); $("a[title]").tooltip({ fade: 250 }); var reviewfilters = []; $("table.shared button.accept").click(function (ev) { var target = $(ev.currentTarget); var paths = JSON.parse(target.attr("critic-paths")); var user_ids = JSON.parse(target.attr("critic-user-ids")); reviewfilters.push({ type: "watcher", user_ids: user_ids, paths: paths }); $("table.shared td.buttons > span").css("display", "inline"); target.parents("td.buttons").children("button").css("visibility", "hidden"); target.parents("tr.reviewers").children("td.willreview").css("text-decoration", "line-through"); }); $("table.shared button.deny").click(function (ev) { var target = $(ev.currentTarget); var paths = JSON.parse(target.attr("critic-paths")); reviewfilters.push({ type: "watcher", user_ids: [user.id], paths: paths }); $("table.shared td.buttons > span").css("display", "inline"); target.parents("td.buttons").children("button").css("visibility", "hidden"); target.parents("tr.reviewers").find("td.willreview span.also").css("text-decoration", "line-through"); }); $("table.shared button.cancel").click(function (ev) { location.reload(); }); $("table.shared button.confirm").click(function (ev) { var operation = new Operation({ action: "add review filters", url: "addreviewfilters", data: { review_id: review.id, filters: reviewfilters }}); if (operation.execute()) { $("table.shared td.buttons > span").css("display", "none"); reviewfilters = []; location.reload(); } }); $("button.preparerebase").click(prepareRebase); $("button.cancelrebase").click(cancelRebase); }); ================================================ FILE: src/resources/showreviewlog.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ body { font-size: 12px; font-family: sans-serif } div.main > table td { vertical-align: top } div.main > table tr.reviewers td { padding-top: 0.5em } div.main > table tr.reviewers td.reviewers { font-weight: bold; text-align: right } div.main > table tr.reviewers td.willreview { text-align: center } div.main > table tr.reviewers td.files { font-family: monospace; } div.main > table tr.reviewers td.files span.file { white-space: pre } div.main > table tr.reviewers td.no-one { text-align: right; color: red; font-weight: bold } ================================================ FILE: src/resources/showtree.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main table.tree { table-layout: fixed } div.main table.tree td { padding-left: 1em } div.main table td.h1 h1 a { text-decoration: none; color: #222 } div.main table td.h1 h1 a.root { font-size: 80% } div.main table td.h1 h1 a:hover { text-decoration: underline; color: #222 } div.main table td.h1 h1 span { padding-left: 3px; padding-right: 3px; } div.main table.tree thead td.mode, div.main table.tree thead td.name, div.main table.tree thead td.size { padding-top: 1em; font-weight: bold; text-decoration: underline } div.main table.tree tbody td { padding-top: 0.5em } div.main table.tree td.mode { font-family: monospace } div.main table.tree td.name { font-family: monospace } div.main table.tree td.name a { text-decoration: none } div.main table.tree td.size { text-align: right; font-family: monospace } div.main table.tree tr.tree td.name { font-weight: bold } div.main table.tree td.link { font-family: monospace; color: gray } div.main table.tree td.link span { color: #222 } ================================================ FILE: src/resources/statistics.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ tr.line td { padding: 3px 3px 3px 1em } tr.line td.user { text-align: right; font-weight: bold; white-space: pre } tr.line td.value { text-align: right; white-space: pre } tr.line.self td.user, tr.line.self td.value { border-bottom: 1px solid #222; border-top: 1px solid #222; background-color: #eec } tr.space td { padding-top: 1em } div.main table td.right { width: 90%; text-align: left } ================================================ FILE: src/resources/syntax.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* C++ syntax highlighting is done using b (bold) elements, with the classes: com => comment pp => pre-processing directive kw => keyword op => operator id => identifier str => string literal ch => character literal fp => floating point literal int => integer literal */ /* We didn't really want bold: */ td.line b { font-weight: normal } /* Comments. */ td.line b.com { font-style: italic; color: darkblue } /* Preprocessing directives. */ td.line b.pp { color: maroon } /* Keywords and operators. */ td.line b.kw, td.line b.op { font-weight: bold; color: #222 } /* Identifiers. */ td.line b.id { color: #222 } /* String and character literals. */ td.line b.str, td.line b.ch { color: blue } /* Numeric literals. */ td.line b.fp, td.line b.int { color: green } ================================================ FILE: src/resources/tabify.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.playground { white-space: pre } b.t:before { content: "\2192" } b.t { color: #ccc } b.t.ill { color: red; font-weight: bold } ================================================ FILE: src/resources/tabify.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ var tabified = true, tab_width_calculated = false, tabify_style_added = false; function calculateTabWidth() { if (!tabify_style_added) { $("head").append('<style>' + ' div.playground { white-space: pre }' + ' b.t:before { content: "\\2192" }' + ' b.t { color: #ccc }' + ' b.t.ill { color: red; font-weight: bold }' + '</style>'); tabify_style_added = true; } if (!tab_width_calculated) { document.write("<div class='playground sourcefont'><span id='playground-space'> </span></div>"); var playground = $(".playground"); var space_width = $("#playground-space").width(); if (space_width != 0) { var stylesheet = ""; for (var tabwidth = 2; tabwidth <= 8; ++tabwidth) { var tab_width_extra = (tabwidth - 1) * space_width; // NOTE: I don't know why " + 1" is necessary. var tab_margin_before = (tab_width_extra / 2) << 0; var tab_margin_after = tab_width_extra - tab_margin_before; stylesheet += "b.w" + tabwidth + " { padding-left: " + tab_margin_before + "px; padding-right: " + tab_margin_after + "px }\n"; } $("head").append("<style>" + stylesheet + "</style>"); tab_width_calculated = true; } playground.remove(); } } ================================================ FILE: src/resources/third-party/chosen.css ================================================ /*! Chosen, a Select Box Enhancer for jQuery and Prototype by Patrick Filler for Harvest, http://getharvest.com Version 1.0.0 Full source at https://github.com/harvesthq/chosen Copyright (c) 2011 Harvest http://getharvest.com MIT License, https://github.com/harvesthq/chosen/blob/master/LICENSE.md This file is generated by `grunt build`, do not edit it by hand. */ /* @group Base */ .chosen-container { position: relative; display: inline-block; vertical-align: middle; font-size: 13px; zoom: 1; *display: inline; -webkit-user-select: none; -moz-user-select: none; user-select: none; } .chosen-container .chosen-drop { position: absolute; top: 100%; left: -9999px; z-index: 1010; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; width: 100%; border: 1px solid #aaa; border-top: 0; background: #fff; box-shadow: 0 4px 5px rgba(0, 0, 0, 0.15); } .chosen-container.chosen-with-drop .chosen-drop { left: 0; } .chosen-container a { cursor: pointer; } /* @end */ /* @group Single Chosen */ .chosen-container-single .chosen-single { position: relative; display: block; overflow: hidden; padding: 0 0 0 8px; height: 23px; border: 1px solid #aaa; border-radius: 5px; background-color: #fff; background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(20%, #ffffff), color-stop(50%, #f6f6f6), color-stop(52%, #eeeeee), color-stop(100%, #f4f4f4)); background: -webkit-linear-gradient(top, #ffffff 20%, #f6f6f6 50%, #eeeeee 52%, #f4f4f4 100%); background: -moz-linear-gradient(top, #ffffff 20%, #f6f6f6 50%, #eeeeee 52%, #f4f4f4 100%); background: -o-linear-gradient(top, #ffffff 20%, #f6f6f6 50%, #eeeeee 52%, #f4f4f4 100%); background: linear-gradient(top, #ffffff 20%, #f6f6f6 50%, #eeeeee 52%, #f4f4f4 100%); background-clip: padding-box; box-shadow: 0 0 3px white inset, 0 1px 1px rgba(0, 0, 0, 0.1); color: #444; text-decoration: none; white-space: nowrap; line-height: 24px; } .chosen-container-single .chosen-default { color: #999; } .chosen-container-single .chosen-single span { display: block; overflow: hidden; margin-right: 26px; text-overflow: ellipsis; white-space: nowrap; } .chosen-container-single .chosen-single-with-deselect span { margin-right: 50px; } .chosen-container-single .chosen-single abbr { position: absolute; top: 6px; right: 26px; display: block; width: 12px; height: 12px; background: url('chosen-sprite.png') -42px 1px no-repeat; font-size: 1px; } .chosen-container-single .chosen-single abbr:hover { background-position: -42px -10px; } .chosen-container-single.chosen-disabled .chosen-single abbr:hover { background-position: -42px -10px; } .chosen-container-single .chosen-single div { position: absolute; top: 0; right: 0; display: block; width: 18px; height: 100%; } .chosen-container-single .chosen-single div b { display: block; width: 100%; height: 100%; background: url('chosen-sprite.png') no-repeat 0px 2px; } .chosen-container-single .chosen-search { position: relative; z-index: 1010; margin: 0; padding: 3px 4px; white-space: nowrap; } .chosen-container-single .chosen-search input[type="text"] { -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; margin: 1px 0; padding: 4px 20px 4px 5px; width: 100%; height: auto; outline: 0; border: 1px solid #aaa; background: white url('chosen-sprite.png') no-repeat 100% -20px; background: url('chosen-sprite.png') no-repeat 100% -20px, -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(1%, #eeeeee), color-stop(15%, #ffffff)); background: url('chosen-sprite.png') no-repeat 100% -20px, -webkit-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat 100% -20px, -moz-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat 100% -20px, -o-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat 100% -20px, linear-gradient(#eeeeee 1%, #ffffff 15%); font-size: 1em; font-family: sans-serif; line-height: normal; border-radius: 0; } .chosen-container-single .chosen-drop { margin-top: -1px; border-radius: 0 0 4px 4px; background-clip: padding-box; } .chosen-container-single.chosen-container-single-nosearch .chosen-search { position: absolute; left: -9999px; } /* @end */ /* @group Results */ .chosen-container .chosen-results { position: relative; overflow-x: hidden; overflow-y: auto; margin: 0 4px 4px 0; padding: 0 0 0 4px; max-height: 240px; -webkit-overflow-scrolling: touch; } .chosen-container .chosen-results li { display: none; margin: 0; padding: 5px 6px; list-style: none; line-height: 15px; -webkit-touch-callout: none; } .chosen-container .chosen-results li.active-result { display: list-item; cursor: pointer; } .chosen-container .chosen-results li.disabled-result { display: list-item; color: #ccc; cursor: default; } .chosen-container .chosen-results li.highlighted { background-color: #3875d7; background-image: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(20%, #3875d7), color-stop(90%, #2a62bc)); background-image: -webkit-linear-gradient(#3875d7 20%, #2a62bc 90%); background-image: -moz-linear-gradient(#3875d7 20%, #2a62bc 90%); background-image: -o-linear-gradient(#3875d7 20%, #2a62bc 90%); background-image: linear-gradient(#3875d7 20%, #2a62bc 90%); color: #fff; } .chosen-container .chosen-results li.no-results { display: list-item; background: #f4f4f4; } .chosen-container .chosen-results li.group-result { display: list-item; font-weight: bold; cursor: default; } .chosen-container .chosen-results li.group-option { padding-left: 15px; } .chosen-container .chosen-results li em { font-style: normal; text-decoration: underline; } /* @end */ /* @group Multi Chosen */ .chosen-container-multi .chosen-choices { position: relative; overflow: hidden; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; box-sizing: border-box; margin: 0; padding: 0; width: 100%; height: auto !important; height: 1%; border: 1px solid #aaa; background-color: #fff; background-image: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(1%, #eeeeee), color-stop(15%, #ffffff)); background-image: -webkit-linear-gradient(#eeeeee 1%, #ffffff 15%); background-image: -moz-linear-gradient(#eeeeee 1%, #ffffff 15%); background-image: -o-linear-gradient(#eeeeee 1%, #ffffff 15%); background-image: linear-gradient(#eeeeee 1%, #ffffff 15%); cursor: text; } .chosen-container-multi .chosen-choices li { float: left; list-style: none; } .chosen-container-multi .chosen-choices li.search-field { margin: 0; padding: 0; white-space: nowrap; } .chosen-container-multi .chosen-choices li.search-field input[type="text"] { margin: 1px 0; padding: 5px; height: 15px; outline: 0; border: 0 !important; background: transparent !important; box-shadow: none; color: #666; font-size: 100%; font-family: sans-serif; line-height: normal; border-radius: 0; } .chosen-container-multi .chosen-choices li.search-field .default { color: #999; } .chosen-container-multi .chosen-choices li.search-choice { position: relative; margin: 3px 0 3px 5px; padding: 3px 20px 3px 5px; border: 1px solid #aaa; border-radius: 3px; background-color: #e4e4e4; background-image: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(20%, #f4f4f4), color-stop(50%, #f0f0f0), color-stop(52%, #e8e8e8), color-stop(100%, #eeeeee)); background-image: -webkit-linear-gradient(#f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: -moz-linear-gradient(#f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: -o-linear-gradient(#f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: linear-gradient(#f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-clip: padding-box; box-shadow: 0 0 2px white inset, 0 1px 0 rgba(0, 0, 0, 0.05); color: #333; line-height: 13px; cursor: default; } .chosen-container-multi .chosen-choices li.search-choice .search-choice-close { position: absolute; top: 4px; right: 3px; display: block; width: 12px; height: 12px; background: url('chosen-sprite.png') -42px 1px no-repeat; font-size: 1px; } .chosen-container-multi .chosen-choices li.search-choice .search-choice-close:hover { background-position: -42px -10px; } .chosen-container-multi .chosen-choices li.search-choice-disabled { padding-right: 5px; border: 1px solid #ccc; background-color: #e4e4e4; background-image: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(20%, #f4f4f4), color-stop(50%, #f0f0f0), color-stop(52%, #e8e8e8), color-stop(100%, #eeeeee)); background-image: -webkit-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: -moz-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: -o-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); background-image: linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eeeeee 100%); color: #666; } .chosen-container-multi .chosen-choices li.search-choice-focus { background: #d4d4d4; } .chosen-container-multi .chosen-choices li.search-choice-focus .search-choice-close { background-position: -42px -10px; } .chosen-container-multi .chosen-results { margin: 0; padding: 0; } .chosen-container-multi .chosen-drop .result-selected { display: list-item; color: #ccc; cursor: default; } /* @end */ /* @group Active */ .chosen-container-active .chosen-single { border: 1px solid #5897fb; box-shadow: 0 0 5px rgba(0, 0, 0, 0.3); } .chosen-container-active.chosen-with-drop .chosen-single { border: 1px solid #aaa; -moz-border-radius-bottomright: 0; border-bottom-right-radius: 0; -moz-border-radius-bottomleft: 0; border-bottom-left-radius: 0; background-image: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(20%, #eeeeee), color-stop(80%, #ffffff)); background-image: -webkit-linear-gradient(#eeeeee 20%, #ffffff 80%); background-image: -moz-linear-gradient(#eeeeee 20%, #ffffff 80%); background-image: -o-linear-gradient(#eeeeee 20%, #ffffff 80%); background-image: linear-gradient(#eeeeee 20%, #ffffff 80%); box-shadow: 0 1px 0 #fff inset; } .chosen-container-active.chosen-with-drop .chosen-single div { border-left: none; background: transparent; } .chosen-container-active.chosen-with-drop .chosen-single div b { background-position: -18px 2px; } .chosen-container-active .chosen-choices { border: 1px solid #5897fb; box-shadow: 0 0 5px rgba(0, 0, 0, 0.3); } .chosen-container-active .chosen-choices li.search-field input[type="text"] { color: #111 !important; } /* @end */ /* @group Disabled Support */ .chosen-disabled { opacity: 0.5 !important; cursor: default; } .chosen-disabled .chosen-single { cursor: default; } .chosen-disabled .chosen-choices .search-choice .search-choice-close { cursor: default; } /* @end */ /* @group Right to Left */ .chosen-rtl { text-align: right; } .chosen-rtl .chosen-single { overflow: visible; padding: 0 8px 0 0; } .chosen-rtl .chosen-single span { margin-right: 0; margin-left: 26px; direction: rtl; } .chosen-rtl .chosen-single-with-deselect span { margin-left: 38px; } .chosen-rtl .chosen-single div { right: auto; left: 3px; } .chosen-rtl .chosen-single abbr { right: auto; left: 26px; } .chosen-rtl .chosen-choices li { float: right; } .chosen-rtl .chosen-choices li.search-field input[type="text"] { direction: rtl; } .chosen-rtl .chosen-choices li.search-choice { margin: 3px 5px 3px 0; padding: 3px 5px 3px 19px; } .chosen-rtl .chosen-choices li.search-choice .search-choice-close { right: auto; left: 4px; } .chosen-rtl.chosen-container-single-nosearch .chosen-search, .chosen-rtl .chosen-drop { left: 9999px; } .chosen-rtl.chosen-container-single .chosen-results { margin: 0 0 4px 4px; padding: 0 4px 0 0; } .chosen-rtl .chosen-results li.group-option { padding-right: 15px; padding-left: 0; } .chosen-rtl.chosen-container-active.chosen-with-drop .chosen-single div { border-right: none; } .chosen-rtl .chosen-search input[type="text"] { padding: 4px 5px 4px 20px; background: white url('chosen-sprite.png') no-repeat -30px -20px; background: url('chosen-sprite.png') no-repeat -30px -20px, -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(1%, #eeeeee), color-stop(15%, #ffffff)); background: url('chosen-sprite.png') no-repeat -30px -20px, -webkit-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat -30px -20px, -moz-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat -30px -20px, -o-linear-gradient(#eeeeee 1%, #ffffff 15%); background: url('chosen-sprite.png') no-repeat -30px -20px, linear-gradient(#eeeeee 1%, #ffffff 15%); direction: rtl; } .chosen-rtl.chosen-container-single .chosen-single div b { background-position: 6px 2px; } .chosen-rtl.chosen-container-single.chosen-with-drop .chosen-single div b { background-position: -12px 2px; } /* @end */ /* @group Retina compatibility */ @media only screen and (-webkit-min-device-pixel-ratio: 2), only screen and (min-resolution: 144dpi) { .chosen-rtl .chosen-search input[type="text"], .chosen-container-single .chosen-single abbr, .chosen-container-single .chosen-single div b, .chosen-container-single .chosen-search input[type="text"], .chosen-container-multi .chosen-choices .search-choice .search-choice-close, .chosen-container .chosen-results-scroll-down span, .chosen-container .chosen-results-scroll-up span { background-image: url('chosen-sprite@2x.png') !important; background-size: 52px 37px !important; background-repeat: no-repeat !important; } } /* @end */ ================================================ FILE: src/resources/third-party/chosen.js ================================================ /*! Chosen, a Select Box Enhancer for jQuery and Prototype by Patrick Filler for Harvest, http://getharvest.com Version 1.0.0 Full source at https://github.com/harvesthq/chosen Copyright (c) 2011 Harvest http://getharvest.com MIT License, https://github.com/harvesthq/chosen/blob/master/LICENSE.md This file is generated by `grunt build`, do not edit it by hand. */ (function() { var $, AbstractChosen, Chosen, SelectParser, _ref, __hasProp = {}.hasOwnProperty, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }; SelectParser = (function() { function SelectParser() { this.options_index = 0; this.parsed = []; } SelectParser.prototype.add_node = function(child) { if (child.nodeName.toUpperCase() === "OPTGROUP") { return this.add_group(child); } else { return this.add_option(child); } }; SelectParser.prototype.add_group = function(group) { var group_position, option, _i, _len, _ref, _results; group_position = this.parsed.length; this.parsed.push({ array_index: group_position, group: true, label: this.escapeExpression(group.label), children: 0, disabled: group.disabled }); _ref = group.childNodes; _results = []; for (_i = 0, _len = _ref.length; _i < _len; _i++) { option = _ref[_i]; _results.push(this.add_option(option, group_position, group.disabled)); } return _results; }; SelectParser.prototype.add_option = function(option, group_position, group_disabled) { if (option.nodeName.toUpperCase() === "OPTION") { if (option.text !== "") { if (group_position != null) { this.parsed[group_position].children += 1; } this.parsed.push({ array_index: this.parsed.length, options_index: this.options_index, value: option.value, text: option.getAttribute("data-text") || option.text, html: option.getAttribute("data-html") || option.innerHTML, selected: option.selected, disabled: group_disabled === true ? group_disabled : option.disabled, group_array_index: group_position, classes: option.className, style: option.style.cssText }); } else { this.parsed.push({ array_index: this.parsed.length, options_index: this.options_index, empty: true }); } return this.options_index += 1; } }; SelectParser.prototype.escapeExpression = function(text) { var map, unsafe_chars; if ((text == null) || text === false) { return ""; } if (!/[\&\<\>\"\'\`]/.test(text)) { return text; } map = { "<": "<", ">": ">", '"': """, "'": "'", "`": "`" }; unsafe_chars = /&(?!\w+;)|[\<\>\"\'\`]/g; return text.replace(unsafe_chars, function(chr) { return map[chr] || "&"; }); }; return SelectParser; })(); SelectParser.select_to_array = function(select) { var child, parser, _i, _len, _ref; parser = new SelectParser(); _ref = select.childNodes; for (_i = 0, _len = _ref.length; _i < _len; _i++) { child = _ref[_i]; parser.add_node(child); } return parser.parsed; }; AbstractChosen = (function() { function AbstractChosen(form_field, options) { this.form_field = form_field; this.options = options != null ? options : {}; if (!AbstractChosen.browser_is_supported()) { return; } this.is_multiple = this.form_field.multiple; this.set_default_text(); this.set_default_values(); this.setup(); this.set_up_html(); this.register_observers(); } AbstractChosen.prototype.set_default_values = function() { var _this = this; this.click_test_action = function(evt) { return _this.test_active_click(evt); }; this.activate_action = function(evt) { return _this.activate_field(evt); }; this.active_field = false; this.mouse_on_container = false; this.results_showing = false; this.result_highlighted = null; this.allow_single_deselect = (this.options.allow_single_deselect != null) && (this.form_field.options[0] != null) && this.form_field.options[0].text === "" ? this.options.allow_single_deselect : false; this.disable_search_threshold = this.options.disable_search_threshold || 0; this.disable_search = this.options.disable_search || false; this.enable_split_word_search = this.options.enable_split_word_search != null ? this.options.enable_split_word_search : true; this.group_search = this.options.group_search != null ? this.options.group_search : true; this.search_contains = this.options.search_contains || false; this.single_backstroke_delete = this.options.single_backstroke_delete != null ? this.options.single_backstroke_delete : true; this.max_selected_options = this.options.max_selected_options || Infinity; this.inherit_select_classes = this.options.inherit_select_classes || false; this.display_selected_options = this.options.display_selected_options != null ? this.options.display_selected_options : true; return this.display_disabled_options = this.options.display_disabled_options != null ? this.options.display_disabled_options : true; }; AbstractChosen.prototype.set_default_text = function() { if (this.form_field.getAttribute("data-placeholder")) { this.default_text = this.form_field.getAttribute("data-placeholder"); } else if (this.is_multiple) { this.default_text = this.options.placeholder_text_multiple || this.options.placeholder_text || AbstractChosen.default_multiple_text; } else { this.default_text = this.options.placeholder_text_single || this.options.placeholder_text || AbstractChosen.default_single_text; } return this.results_none_found = this.form_field.getAttribute("data-no_results_text") || this.options.no_results_text || AbstractChosen.default_no_result_text; }; AbstractChosen.prototype.mouse_enter = function() { return this.mouse_on_container = true; }; AbstractChosen.prototype.mouse_leave = function() { return this.mouse_on_container = false; }; AbstractChosen.prototype.input_focus = function(evt) { var _this = this; if (this.is_multiple) { if (!this.active_field) { return setTimeout((function() { return _this.container_mousedown(); }), 50); } } else { if (!this.active_field) { return this.activate_field(); } } }; AbstractChosen.prototype.input_blur = function(evt) { var _this = this; if (!this.mouse_on_container) { this.active_field = false; return setTimeout((function() { return _this.blur_test(); }), 100); } }; AbstractChosen.prototype.results_option_build = function(options) { var content, data, _i, _len, _ref; content = ''; _ref = this.results_data; for (_i = 0, _len = _ref.length; _i < _len; _i++) { data = _ref[_i]; if (data.group) { content += this.result_add_group(data); } else { content += this.result_add_option(data); } if (options != null ? options.first : void 0) { if (data.selected && this.is_multiple) { this.choice_build(data); } else if (data.selected && !this.is_multiple) { this.single_set_selected_text(this.selected_value(data)); } } } return content; }; AbstractChosen.prototype.result_add_option = function(option) { var classes, option_el; if (!option.search_match) { return ''; } if (!this.include_option_in_results(option)) { return ''; } classes = []; if (!option.disabled && !(option.selected && this.is_multiple)) { classes.push("active-result"); } if (option.disabled && !(option.selected && this.is_multiple)) { classes.push("disabled-result"); } if (option.selected) { classes.push("result-selected"); } if (option.group_array_index != null) { classes.push("group-option"); } if (option.classes !== "") { classes.push(option.classes); } option_el = document.createElement("li"); option_el.className = classes.join(" "); option_el.style.cssText = option.style; option_el.setAttribute("data-option-array-index", option.array_index); option_el.innerHTML = this.get_search_text() ? option.search_text : option.html; return this.outerHTML(option_el); }; AbstractChosen.prototype.result_add_group = function(group) { var group_el; if (!(group.search_match || group.group_match)) { return ''; } if (!(group.active_options > 0)) { return ''; } group_el = document.createElement("li"); group_el.className = "group-result"; group_el.innerHTML = group.search_text; return this.outerHTML(group_el); }; AbstractChosen.prototype.results_update_field = function() { this.set_default_text(); if (!this.is_multiple) { this.results_reset_cleanup(); } this.result_clear_highlight(); this.results_build(); if (this.results_showing) { return this.winnow_results(); } }; AbstractChosen.prototype.reset_single_select_options = function() { var result, _i, _len, _ref, _results; _ref = this.results_data; _results = []; for (_i = 0, _len = _ref.length; _i < _len; _i++) { result = _ref[_i]; if (result.selected) { _results.push(result.selected = false); } else { _results.push(void 0); } } return _results; }; AbstractChosen.prototype.results_toggle = function() { if (this.results_showing) { return this.results_hide(); } else { return this.results_show(); } }; AbstractChosen.prototype.results_search = function(evt) { if (this.results_showing) { return this.winnow_results(); } else { return this.results_show(); } }; AbstractChosen.prototype.winnow_results = function() { var escapedSearchText, option, regex, regexAnchor, results, results_group, searchText, startpos, text, zregex, _i, _len, _ref; this.no_results_clear(); results = 0; searchText = this.get_search_text(); escapedSearchText = searchText.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); regexAnchor = this.search_contains ? "" : "^"; regex = new RegExp(regexAnchor + escapedSearchText, 'i'); zregex = new RegExp(escapedSearchText, 'i'); _ref = this.results_data; for (_i = 0, _len = _ref.length; _i < _len; _i++) { option = _ref[_i]; option.search_match = false; results_group = null; if (this.include_option_in_results(option)) { if (option.group) { option.group_match = false; option.active_options = 0; } if ((option.group_array_index != null) && this.results_data[option.group_array_index]) { results_group = this.results_data[option.group_array_index]; if (results_group.active_options === 0 && results_group.search_match) { results += 1; } results_group.active_options += 1; } if (!(option.group && !this.group_search)) { option.search_text = option.group ? option.label : option.text; option.search_match = this.search_string_match(option.search_text, regex); if (option.search_match && !option.group) { results += 1; } if (option.search_match) { if (searchText.length) { startpos = option.search_text.search(zregex); text = option.search_text.substr(0, startpos + searchText.length) + '</em>' + option.search_text.substr(startpos + searchText.length); option.search_text = text.substr(0, startpos) + '<em>' + text.substr(startpos); } if (results_group != null) { results_group.group_match = true; } } else if ((option.group_array_index != null) && this.results_data[option.group_array_index].search_match) { option.search_match = true; } } } } this.result_clear_highlight(); if (results < 1 && searchText.length) { this.update_results_content(""); return this.no_results(searchText); } else { this.update_results_content(this.results_option_build()); return this.winnow_results_set_highlight(); } }; AbstractChosen.prototype.search_string_match = function(search_string, regex) { var part, parts, _i, _len; if (regex.test(search_string)) { return true; } else if (this.enable_split_word_search && (search_string.indexOf(" ") >= 0 || search_string.indexOf("[") === 0)) { parts = search_string.replace(/\[|\]/g, "").split(" "); if (parts.length) { for (_i = 0, _len = parts.length; _i < _len; _i++) { part = parts[_i]; if (regex.test(part)) { return true; } } } } }; AbstractChosen.prototype.choices_count = function() { var option, _i, _len, _ref; if (this.selected_option_count != null) { return this.selected_option_count; } this.selected_option_count = 0; _ref = this.form_field.options; for (_i = 0, _len = _ref.length; _i < _len; _i++) { option = _ref[_i]; if (option.selected) { this.selected_option_count += 1; } } return this.selected_option_count; }; AbstractChosen.prototype.choices_click = function(evt) { evt.preventDefault(); if (!(this.results_showing || this.is_disabled)) { return this.results_show(); } }; AbstractChosen.prototype.keyup_checker = function(evt) { var stroke, _ref; stroke = (_ref = evt.which) != null ? _ref : evt.keyCode; this.search_field_scale(); switch (stroke) { case 8: if (this.is_multiple && this.backstroke_length < 1 && this.choices_count() > 0) { return this.keydown_backstroke(); } else if (!this.pending_backstroke) { this.result_clear_highlight(); return this.results_search(); } break; case 13: evt.preventDefault(); if (this.results_showing) { return this.result_select(evt); } break; case 27: if (this.results_showing) { this.results_hide(); } return true; case 9: case 38: case 40: case 16: case 91: case 17: break; default: return this.results_search(); } }; AbstractChosen.prototype.container_width = function(mode, initial) { if (mode === "collapsed" && this.options.collapsed_width) { return this.options.collapsed_width; } else if (mode === "expanded" && this.options.expanded_width) { return this.options.expanded_width; } else if (initial) { if (this.options.width != null) { return this.options.width; } else { return "" + this.form_field.offsetWidth + "px"; } } }; AbstractChosen.prototype.include_option_in_results = function(option) { if (this.is_multiple && (!this.display_selected_options && option.selected)) { return false; } if (!this.display_disabled_options && option.disabled) { return false; } if (option.empty) { return false; } return true; }; AbstractChosen.prototype.search_results_touchstart = function(evt) { this.touch_started = true; return this.search_results_mouseover(evt); }; AbstractChosen.prototype.search_results_touchmove = function(evt) { this.touch_started = false; return this.search_results_mouseout(evt); }; AbstractChosen.prototype.search_results_touchend = function(evt) { if (this.touch_started) { return this.search_results_mouseup(evt); } }; AbstractChosen.prototype.outerHTML = function(element) { var tmp; if (element.outerHTML) { return element.outerHTML; } tmp = document.createElement("div"); tmp.appendChild(element); return tmp.innerHTML; }; AbstractChosen.browser_is_supported = function() { if (window.navigator.appName === "Microsoft Internet Explorer") { return document.documentMode >= 8; } if (/iP(od|hone)/i.test(window.navigator.userAgent)) { return false; } if (/Android/i.test(window.navigator.userAgent)) { if (/Mobile/i.test(window.navigator.userAgent)) { return false; } } return true; }; AbstractChosen.default_multiple_text = "Select Some Options"; AbstractChosen.default_single_text = "Select an Option"; AbstractChosen.default_no_result_text = "No results match"; return AbstractChosen; })(); $ = jQuery; $.fn.extend({ chosen: function(options) { if (!AbstractChosen.browser_is_supported()) { return this; } return this.each(function(input_field) { var $this, chosen; $this = $(this); chosen = $this.data('chosen'); if (options === 'destroy' && chosen) { chosen.destroy(); } else if (!chosen) { $this.data('chosen', new Chosen(this, options)); } }); } }); Chosen = (function(_super) { __extends(Chosen, _super); function Chosen() { _ref = Chosen.__super__.constructor.apply(this, arguments); return _ref; } Chosen.prototype.setup = function() { this.form_field_jq = $(this.form_field); this.current_selectedIndex = this.form_field.selectedIndex; return this.is_rtl = this.form_field_jq.hasClass("chosen-rtl"); }; Chosen.prototype.set_up_html = function() { var container_classes, container_props; container_classes = ["chosen-container"]; container_classes.push("chosen-container-" + (this.is_multiple ? "multi" : "single")); if (this.inherit_select_classes && this.form_field.className) { container_classes.push(this.form_field.className); } if (this.is_rtl) { container_classes.push("chosen-rtl"); } container_props = { 'class': container_classes.join(' '), 'title': this.form_field.title }; if (this.form_field.id.length) { container_props.id = this.form_field.id.replace(/[^\w]/g, '_') + "_chosen"; } this.main_container = $("<div />", container_props); this.drop_container = $("<div />", container_props); this.container = $([this.main_container.get(0), this.drop_container.get(0)]); this.container.css({ width: this.container_width("collapsed", true) }); if (this.is_multiple) { this.main_container.html('<ul class="chosen-choices"><li class="search-field"><input type="text" value="' + this.default_text + '" class="default" autocomplete="off" style="width:25px;" /></li></ul>'); this.drop_container.html('<div class="chosen-drop"><ul class="chosen-results"></ul></div>'); } else { this.main_container.html('<a class="chosen-single chosen-default" tabindex="-1"><span>' + this.default_text + '</span><div><b></b></div></a>'); this.drop_container.html('<div class="chosen-drop"><div class="chosen-search"><input type="text" autocomplete="off" /></div><ul class="chosen-results"></ul></div>'); } this.form_field_jq.hide().after(this.main_container); this.dropdown = this.drop_container.first(); this.search_field = this.container.find('input').first(); this.search_results = this.drop_container.find('ul.chosen-results').first(); this.search_field_scale(); this.search_no_results = this.container.find('li.no-results').first(); if (this.is_multiple) { this.search_choices = this.container.find('ul.chosen-choices').first(); this.search_container = this.container.find('li.search-field').first(); } else { this.search_container = this.container.find('div.chosen-search').first(); this.selected_item = this.container.find('.chosen-single').first(); } this.drop_container.css({ position: "absolute", left: "-9999px", top: 0 }); $("body").append(this.drop_container); this.results_build(); this.set_tab_index(); this.set_label_behavior(); return this.form_field_jq.trigger("chosen:ready", { chosen: this }); }; Chosen.prototype.register_observers = function() { var _this = this; this.container.bind('mousedown.chosen', function(evt) { _this.container_mousedown(evt); }); this.container.bind('mouseup.chosen', function(evt) { _this.container_mouseup(evt); }); this.container.bind('mouseenter.chosen', function(evt) { _this.mouse_enter(evt); }); this.container.bind('mouseleave.chosen', function(evt) { _this.mouse_leave(evt); }); this.search_results.bind('mouseup.chosen', function(evt) { _this.search_results_mouseup(evt); }); this.search_results.bind('mouseover.chosen', function(evt) { _this.search_results_mouseover(evt); }); this.search_results.bind('mouseout.chosen', function(evt) { _this.search_results_mouseout(evt); }); this.search_results.bind('mousewheel.chosen DOMMouseScroll.chosen', function(evt) { _this.search_results_mousewheel(evt); }); this.search_results.bind('touchstart.chosen', function(evt) { _this.search_results_touchstart(evt); }); this.search_results.bind('touchmove.chosen', function(evt) { _this.search_results_touchmove(evt); }); this.search_results.bind('touchend.chosen', function(evt) { _this.search_results_touchend(evt); }); this.form_field_jq.bind("chosen:updated.chosen", function(evt) { _this.results_update_field(evt); }); this.form_field_jq.bind("chosen:activate.chosen", function(evt) { _this.activate_field(evt); }); this.form_field_jq.bind("chosen:open.chosen", function(evt) { _this.container_mousedown(evt); }); this.form_field_jq.bind("chosen:close.chosen", function(evt) { _this.input_blur(evt); }); this.search_field.bind('blur.chosen', function(evt) { _this.input_blur(evt); }); this.search_field.bind('keyup.chosen', function(evt) { _this.keyup_checker(evt); }); this.search_field.bind('keydown.chosen', function(evt) { _this.keydown_checker(evt); }); this.search_field.bind('focus.chosen', function(evt) { _this.input_focus(evt); }); if (this.is_multiple) { return this.search_choices.bind('click.chosen', function(evt) { _this.choices_click(evt); }); } else { return this.container.bind('click.chosen', function(evt) { evt.preventDefault(); }); } }; Chosen.prototype.destroy = function() { $(document).unbind("click.chosen", this.click_test_action); if (this.search_field[0].tabIndex) { this.form_field_jq[0].tabIndex = this.search_field[0].tabIndex; } this.container.remove(); this.form_field_jq.removeData('chosen'); return this.form_field_jq.show(); }; Chosen.prototype.search_field_disabled = function() { this.is_disabled = this.form_field_jq[0].disabled; if (this.is_disabled) { this.container.addClass('chosen-disabled'); this.search_field[0].disabled = true; if (!this.is_multiple) { this.selected_item.unbind("focus.chosen", this.activate_action); } return this.close_field(); } else { this.container.removeClass('chosen-disabled'); this.search_field[0].disabled = false; if (!this.is_multiple) { return this.selected_item.bind("focus.chosen", this.activate_action); } } }; Chosen.prototype.container_mousedown = function(evt) { if (!this.is_disabled) { if (evt && evt.type === "mousedown" && !this.results_showing) { evt.preventDefault(); } if (!((evt != null) && ($(evt.target)).hasClass("search-choice-close"))) { if (!this.active_field) { if (this.is_multiple) { this.search_field.val(""); } $(document).bind('click.chosen', this.click_test_action); this.results_show(); } else if (!this.is_multiple && evt && (($(evt.target)[0] === this.selected_item[0]) || $(evt.target).parents("a.chosen-single").length)) { evt.preventDefault(); this.results_toggle(); } return this.activate_field(); } } }; Chosen.prototype.container_mouseup = function(evt) { if (evt.target.nodeName === "ABBR" && !this.is_disabled) { return this.results_reset(evt); } }; Chosen.prototype.search_results_mousewheel = function(evt) { var delta; if (evt.originalEvent) { delta = -evt.originalEvent.wheelDelta || evt.originalEvent.detail; } if (delta != null) { evt.preventDefault(); if (evt.type === 'DOMMouseScroll') { delta = delta * 40; } return this.search_results.scrollTop(delta + this.search_results.scrollTop()); } }; Chosen.prototype.blur_test = function(evt) { if (!this.active_field && this.container.hasClass("chosen-container-active")) { return this.close_field(); } }; Chosen.prototype.close_field = function() { $(document).unbind("click.chosen", this.click_test_action); this.active_field = false; this.results_hide(); this.container.removeClass("chosen-container-active"); this.clear_backstroke(); this.show_search_field_default(); return this.search_field_scale(); }; Chosen.prototype.activate_field = function() { this.container.addClass("chosen-container-active"); this.active_field = true; this.search_field.val(this.search_field.val()); return this.search_field.focus(); }; Chosen.prototype.test_active_click = function(evt) { var active_container; active_container = $(evt.target).closest('.chosen-container'); if (active_container.length && this.container[0] === active_container[0]) { return this.active_field = true; } else { return this.close_field(); } }; Chosen.prototype.results_build = function() { this.parsing = true; this.selected_option_count = null; this.results_data = SelectParser.select_to_array(this.form_field); if (this.is_multiple) { this.search_choices.find("li.search-choice").remove(); } else if (!this.is_multiple) { this.single_set_selected_text(null, true); if (this.disable_search || this.form_field.options.length <= this.disable_search_threshold) { this.search_field[0].readOnly = true; this.container.addClass("chosen-container-single-nosearch"); } else { this.search_field[0].readOnly = false; this.container.removeClass("chosen-container-single-nosearch"); } } this.update_results_content(this.results_option_build({ first: true })); this.search_field_disabled(); this.show_search_field_default(); this.search_field_scale(); return this.parsing = false; }; Chosen.prototype.result_do_highlight = function(el) { var high_bottom, high_top, maxHeight, visible_bottom, visible_top; if (el.length) { this.result_clear_highlight(); this.result_highlight = el; this.result_highlight.addClass("highlighted"); maxHeight = parseInt(this.search_results.css("maxHeight"), 10); visible_top = this.search_results.scrollTop(); visible_bottom = maxHeight + visible_top; high_top = this.result_highlight.position().top + this.search_results.scrollTop(); high_bottom = high_top + this.result_highlight.outerHeight(); if (high_bottom >= visible_bottom) { return this.search_results.scrollTop((high_bottom - maxHeight) > 0 ? high_bottom - maxHeight : 0); } else if (high_top < visible_top) { return this.search_results.scrollTop(high_top); } } }; Chosen.prototype.result_clear_highlight = function() { if (this.result_highlight) { this.result_highlight.removeClass("highlighted"); } return this.result_highlight = null; }; Chosen.prototype.results_realign = function() { return this.drop_container.css({ position: "absolute", left: this.main_container.offset().left, top: this.main_container.offset().top + this.main_container.height(), width: this.main_container.width() }); }; Chosen.prototype.results_show = function() { var new_width, self, _this = this; if (this.is_multiple && this.max_selected_options <= this.choices_count()) { this.form_field_jq.trigger("chosen:maxselected", { chosen: this }); return false; } this.container.addClass("chosen-with-drop"); new_width = this.container_width("expanded"); if (new_width) { this.container.css({ width: new_width }); } this.form_field_jq.trigger("chosen:showing_dropdown", { chosen: this }); this.results_realign(); this.results_showing = true; this.search_field.focus(); this.search_field.val(this.search_field.val()); self = this; $(window).resize(function() { if (self.results_showing) { return self.results_realign(); } }); return this.winnow_results(); }; Chosen.prototype.update_results_content = function(content) { return this.search_results.html(content); }; Chosen.prototype.results_hide = function() { var new_width; if (this.results_showing) { this.result_clear_highlight(); new_width = this.container_width("collapsed"); if (new_width) { this.container.css({ width: new_width }); } this.container.removeClass("chosen-with-drop"); this.form_field_jq.trigger("chosen:hiding_dropdown", { chosen: this }); } return this.results_showing = false; }; Chosen.prototype.set_tab_index = function(el) { var ti; if (this.form_field.tabIndex) { ti = this.form_field.tabIndex; this.form_field.tabIndex = -1; return this.search_field[0].tabIndex = ti; } }; Chosen.prototype.set_label_behavior = function() { var _this = this; this.form_field_label = this.form_field_jq.parents("label"); if (!this.form_field_label.length && this.form_field.id.length) { this.form_field_label = $("label[for='" + this.form_field.id + "']"); } if (this.form_field_label.length > 0) { return this.form_field_label.bind('click.chosen', function(evt) { if (_this.is_multiple) { return _this.container_mousedown(evt); } else { return _this.activate_field(); } }); } }; Chosen.prototype.show_search_field_default = function() { if (this.is_multiple && this.choices_count() < 1 && !this.active_field) { this.search_field.val(this.default_text); return this.search_field.addClass("default"); } else { this.search_field.val(""); return this.search_field.removeClass("default"); } }; Chosen.prototype.search_results_mouseup = function(evt) { var target; target = $(evt.target).hasClass("active-result") ? $(evt.target) : $(evt.target).parents(".active-result").first(); if (target.length) { this.result_highlight = target; this.result_select(evt); return this.search_field.focus(); } }; Chosen.prototype.search_results_mouseover = function(evt) { var target; target = $(evt.target).hasClass("active-result") ? $(evt.target) : $(evt.target).parents(".active-result").first(); if (target) { return this.result_do_highlight(target); } }; Chosen.prototype.search_results_mouseout = function(evt) { if ($(evt.target).hasClass("active-result" || $(evt.target).parents('.active-result').first())) { return this.result_clear_highlight(); } }; Chosen.prototype.choice_build = function(item) { var choice, close_link, _this = this; choice = $('<li></li>', { "class": "search-choice" }).html("<span></span>"); choice.find("span").text(item.text); if (item.disabled) { choice.addClass('search-choice-disabled'); } else { close_link = $('<a />', { "class": 'search-choice-close', 'data-option-array-index': item.array_index }); close_link.bind('click.chosen', function(evt) { return _this.choice_destroy_link_click(evt); }); choice.append(close_link); } return this.search_container.before(choice); }; Chosen.prototype.choice_destroy_link_click = function(evt) { evt.preventDefault(); evt.stopPropagation(); if (!this.is_disabled) { return this.choice_destroy($(evt.target)); } }; Chosen.prototype.choice_destroy = function(link) { if (this.result_deselect(link[0].getAttribute("data-option-array-index"))) { this.show_search_field_default(); if (this.is_multiple && this.choices_count() > 0 && this.search_field.val().length < 1) { this.results_hide(); } link.parents('li').first().remove(); return this.search_field_scale(); } }; Chosen.prototype.results_reset = function() { this.reset_single_select_options(); this.form_field.options[0].selected = true; this.single_set_selected_text(null, true); this.show_search_field_default(); this.results_reset_cleanup(); this.form_field_jq.trigger("change"); if (this.active_field) { return this.results_hide(); } }; Chosen.prototype.results_reset_cleanup = function() { this.current_selectedIndex = this.form_field.selectedIndex; return this.selected_item.find("abbr").remove(); }; Chosen.prototype.result_select = function(evt) { var high, item; if (this.result_highlight) { high = this.result_highlight; this.result_clear_highlight(); if (this.is_multiple && this.max_selected_options <= this.choices_count()) { this.form_field_jq.trigger("chosen:maxselected", { chosen: this }); return false; } if (this.is_multiple) { high.removeClass("active-result"); } else { this.reset_single_select_options(); } item = this.results_data[high[0].getAttribute("data-option-array-index")]; item.selected = true; this.form_field.options[item.options_index].selected = true; this.selected_option_count = null; if (this.is_multiple) { this.choice_build(item); } else { this.single_set_selected_text(this.selected_value(item)); } if (!((evt.metaKey || evt.ctrlKey) && this.is_multiple)) { this.results_hide(); } this.search_field.val(""); if (this.is_multiple || this.form_field.selectedIndex !== this.current_selectedIndex) { this.form_field_jq.trigger("change", { 'selected': this.form_field.options[item.options_index].value }); } this.current_selectedIndex = this.form_field.selectedIndex; return this.search_field_scale(); } }; Chosen.prototype.single_set_selected_text = function(value, set_default) { if (set_default) { this.selected_item.addClass("chosen-default"); if (value === null) { value = { text: this.default_text }; } } else { this.single_deselect_control_build(); this.selected_item.removeClass("chosen-default"); } if (value.text) { return this.selected_item.find("span").text(value.text); } else { return this.selected_item.find("span").html(value.html); } }; Chosen.prototype.result_deselect = function(pos) { var result_data; result_data = this.results_data[pos]; if (!this.form_field.options[result_data.options_index].disabled) { result_data.selected = false; this.form_field.options[result_data.options_index].selected = false; this.selected_option_count = null; this.result_clear_highlight(); if (this.results_showing) { this.winnow_results(); } this.form_field_jq.trigger("change", { deselected: this.form_field.options[result_data.options_index].value }); this.search_field_scale(); return true; } else { return false; } }; Chosen.prototype.single_deselect_control_build = function() { if (!this.allow_single_deselect) { return; } if (!this.selected_item.find("abbr").length) { this.selected_item.find("span").first().after("<abbr class=\"search-choice-close\"></abbr>"); } return this.selected_item.addClass("chosen-single-with-deselect"); }; Chosen.prototype.get_search_text = function() { if (this.search_field.val() === this.default_text) { return ""; } else { return $('<div/>').text($.trim(this.search_field.val())).html(); } }; Chosen.prototype.winnow_results_set_highlight = function() { var do_high, selected_results; selected_results = !this.is_multiple ? this.search_results.find(".result-selected.active-result") : []; do_high = selected_results.length ? selected_results.first() : this.search_results.find(".active-result").first(); if (do_high != null) { return this.result_do_highlight(do_high); } }; Chosen.prototype.no_results = function(terms) { var no_results_html; no_results_html = $('<li class="no-results">' + this.results_none_found + ' "<span></span>"</li>'); no_results_html.find("span").first().html(terms); return this.search_results.append(no_results_html); }; Chosen.prototype.no_results_clear = function() { return this.search_results.find(".no-results").remove(); }; Chosen.prototype.keydown_arrow = function() { var next_sib; if (this.results_showing && this.result_highlight) { next_sib = this.result_highlight.nextAll("li.active-result").first(); if (next_sib) { return this.result_do_highlight(next_sib); } } else { return this.results_show(); } }; Chosen.prototype.keyup_arrow = function() { var prev_sibs; if (!this.results_showing && !this.is_multiple) { return this.results_show(); } else if (this.result_highlight) { prev_sibs = this.result_highlight.prevAll("li.active-result"); if (prev_sibs.length) { return this.result_do_highlight(prev_sibs.first()); } else { if (this.choices_count() > 0) { this.results_hide(); } return this.result_clear_highlight(); } } }; Chosen.prototype.keydown_backstroke = function() { var next_available_destroy; if (this.pending_backstroke) { this.choice_destroy(this.pending_backstroke.find("a").first()); return this.clear_backstroke(); } else { next_available_destroy = this.search_container.siblings("li.search-choice").last(); if (next_available_destroy.length && !next_available_destroy.hasClass("search-choice-disabled")) { this.pending_backstroke = next_available_destroy; if (this.single_backstroke_delete) { return this.keydown_backstroke(); } else { return this.pending_backstroke.addClass("search-choice-focus"); } } } }; Chosen.prototype.clear_backstroke = function() { if (this.pending_backstroke) { this.pending_backstroke.removeClass("search-choice-focus"); } return this.pending_backstroke = null; }; Chosen.prototype.keydown_checker = function(evt) { var stroke, _ref1; stroke = (_ref1 = evt.which) != null ? _ref1 : evt.keyCode; this.search_field_scale(); if (stroke !== 8 && this.pending_backstroke) { this.clear_backstroke(); } switch (stroke) { case 8: this.backstroke_length = this.search_field.val().length; break; case 9: if (this.results_showing && !this.is_multiple) { this.result_select(evt); } this.mouse_on_container = false; break; case 13: evt.preventDefault(); break; case 38: evt.preventDefault(); this.keyup_arrow(); break; case 40: evt.preventDefault(); this.keydown_arrow(); break; } }; Chosen.prototype.search_field_scale = function() { var div, f_width, h, style, style_block, styles, w, _i, _len; if (this.is_multiple) { h = 0; w = 0; style_block = "position:absolute; left: -1000px; top: -1000px; display:none;"; styles = ['font-size', 'font-style', 'font-weight', 'font-family', 'line-height', 'text-transform', 'letter-spacing']; for (_i = 0, _len = styles.length; _i < _len; _i++) { style = styles[_i]; style_block += style + ":" + this.search_field.css(style) + ";"; } div = $('<div />', { 'style': style_block }); div.text(this.search_field.val()); $('body').append(div); w = div.width() + 25; div.remove(); f_width = this.container.outerWidth(); if (w > f_width - 10) { w = f_width - 10; } return this.search_field.css({ 'width': w + 'px' }); } }; Chosen.prototype.selected_value = function(item) { if (this.options.generate_selected_value) { return this.options.generate_selected_value(item); } else { return { text: item.text }; } }; return Chosen; })(AbstractChosen); }).call(this); ================================================ FILE: src/resources/third-party/jquery-ui-1.10.2.custom.css ================================================ /*! jQuery UI - v1.10.2 - 2013-03-22 * http://jqueryui.com * Includes: jquery.ui.core.css, jquery.ui.resizable.css, jquery.ui.selectable.css, jquery.ui.autocomplete.css, jquery.ui.button.css, jquery.ui.dialog.css, jquery.ui.menu.css, jquery.ui.tooltip.css * To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Helvetica%2CArial%2Csans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=6px&bgColorHeader=cb842e&bgTextureHeader=glass&bgImgOpacityHeader=25&borderColorHeader=d49768&fcHeader=ffffff&iconColorHeader=ffffff&bgColorContent=f4f0ec&bgTextureContent=inset_soft&bgImgOpacityContent=100&borderColorContent=e0cfc2&fcContent=1e1b1d&iconColorContent=c47a23&bgColorDefault=ede4d4&bgTextureDefault=glass&bgImgOpacityDefault=70&borderColorDefault=cdc3b7&fcDefault=3f3731&iconColorDefault=f08000&bgColorHover=f5f0e5&bgTextureHover=glass&bgImgOpacityHover=100&borderColorHover=f5ad66&fcHover=a46313&iconColorHover=f08000&bgColorActive=f4f0ec&bgTextureActive=highlight_hard&bgImgOpacityActive=100&borderColorActive=e0cfc2&fcActive=b85700&iconColorActive=f35f07&bgColorHighlight=f5f5b5&bgTextureHighlight=highlight_hard&bgImgOpacityHighlight=75&borderColorHighlight=d9bb73&fcHighlight=060200&iconColorHighlight=cb672b&bgColorError=fee4bd&bgTextureError=highlight_hard&bgImgOpacityError=65&borderColorError=f8893f&fcError=592003&iconColorError=ff7519&bgColorOverlay=aaaaaa&bgTextureOverlay=flat&bgImgOpacityOverlay=75&opacityOverlay=30&bgColorShadow=aaaaaa&bgTextureShadow=flat&bgImgOpacityShadow=75&opacityShadow=30&thicknessShadow=8px&offsetTopShadow=-8px&offsetLeftShadow=-8px&cornerRadiusShadow=8px * Copyright 2013 jQuery Foundation and other contributors Licensed MIT */.ui-helper-hidden{display:none}.ui-helper-hidden-accessible{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.ui-helper-reset{margin:0;padding:0;border:0;outline:0;line-height:1.3;text-decoration:none;font-size:100%;list-style:none}.ui-helper-clearfix:before,.ui-helper-clearfix:after{content:"";display:table;border-collapse:collapse}.ui-helper-clearfix:after{clear:both}.ui-helper-clearfix{min-height:0}.ui-helper-zfix{width:100%;height:100%;top:0;left:0;position:absolute;opacity:0;filter:Alpha(Opacity=0)}.ui-front{z-index:100}.ui-state-disabled{cursor:default!important}.ui-icon{display:block;text-indent:-99999px;overflow:hidden;background-repeat:no-repeat}.ui-widget-overlay{position:fixed;top:0;left:0;width:100%;height:100%}.ui-resizable{position:relative}.ui-resizable-handle{position:absolute;font-size:.1px;display:block}.ui-resizable-disabled .ui-resizable-handle,.ui-resizable-autohide .ui-resizable-handle{display:none}.ui-resizable-n{cursor:n-resize;height:7px;width:100%;top:-5px;left:0}.ui-resizable-s{cursor:s-resize;height:7px;width:100%;bottom:-5px;left:0}.ui-resizable-e{cursor:e-resize;width:7px;right:-5px;top:0;height:100%}.ui-resizable-w{cursor:w-resize;width:7px;left:-5px;top:0;height:100%}.ui-resizable-se{cursor:se-resize;width:12px;height:12px;right:1px;bottom:1px}.ui-resizable-sw{cursor:sw-resize;width:9px;height:9px;left:-5px;bottom:-5px}.ui-resizable-nw{cursor:nw-resize;width:9px;height:9px;left:-5px;top:-5px}.ui-resizable-ne{cursor:ne-resize;width:9px;height:9px;right:-5px;top:-5px}.ui-selectable-helper{position:absolute;z-index:100;border:1px dotted #000}.ui-autocomplete{position:absolute;top:0;left:0;cursor:default}.ui-button{display:inline-block;position:relative;padding:0;line-height:normal;margin-right:.1em;cursor:pointer;vertical-align:middle;text-align:center;overflow:visible}.ui-button,.ui-button:link,.ui-button:visited,.ui-button:hover,.ui-button:active{text-decoration:none}.ui-button-icon-only{width:2.2em}button.ui-button-icon-only{width:2.4em}.ui-button-icons-only{width:3.4em}button.ui-button-icons-only{width:3.7em}.ui-button .ui-button-text{display:block;line-height:normal}.ui-button-text-only .ui-button-text{padding:.4em 1em}.ui-button-icon-only .ui-button-text,.ui-button-icons-only .ui-button-text{padding:.4em;text-indent:-9999999px}.ui-button-text-icon-primary .ui-button-text,.ui-button-text-icons .ui-button-text{padding:.4em 1em .4em 2.1em}.ui-button-text-icon-secondary .ui-button-text,.ui-button-text-icons .ui-button-text{padding:.4em 2.1em .4em 1em}.ui-button-text-icons .ui-button-text{padding-left:2.1em;padding-right:2.1em}input.ui-button{padding:.4em 1em}.ui-button-icon-only .ui-icon,.ui-button-text-icon-primary .ui-icon,.ui-button-text-icon-secondary .ui-icon,.ui-button-text-icons .ui-icon,.ui-button-icons-only .ui-icon{position:absolute;top:50%;margin-top:-8px}.ui-button-icon-only .ui-icon{left:50%;margin-left:-8px}.ui-button-text-icon-primary .ui-button-icon-primary,.ui-button-text-icons .ui-button-icon-primary,.ui-button-icons-only .ui-button-icon-primary{left:.5em}.ui-button-text-icon-secondary .ui-button-icon-secondary,.ui-button-text-icons .ui-button-icon-secondary,.ui-button-icons-only .ui-button-icon-secondary{right:.5em}.ui-buttonset{margin-right:7px}.ui-buttonset .ui-button{margin-left:0;margin-right:-.3em}input.ui-button::-moz-focus-inner,button.ui-button::-moz-focus-inner{border:0;padding:0}.ui-dialog{position:absolute;top:0;left:0;padding:.2em;outline:0}.ui-dialog .ui-dialog-titlebar{padding:.4em 1em;position:relative}.ui-dialog .ui-dialog-title{float:left;margin:.1em 0;white-space:nowrap;width:90%;overflow:hidden;text-overflow:ellipsis}.ui-dialog .ui-dialog-titlebar-close{position:absolute;right:.3em;top:50%;width:21px;margin:-10px 0 0 0;padding:1px;height:20px}.ui-dialog .ui-dialog-content{position:relative;border:0;padding:.5em 1em;background:0;overflow:auto}.ui-dialog .ui-dialog-buttonpane{text-align:left;border-width:1px 0 0;background-image:none;margin-top:.5em;padding:.3em 1em .5em .4em}.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset{float:right}.ui-dialog .ui-dialog-buttonpane button{margin:.5em .4em .5em 0;cursor:pointer}.ui-dialog .ui-resizable-se{width:12px;height:12px;right:-5px;bottom:-5px;background-position:16px 16px}.ui-draggable .ui-dialog-titlebar{cursor:move}.ui-menu{list-style:none;padding:2px;margin:0;display:block;outline:0}.ui-menu .ui-menu{margin-top:-3px;position:absolute}.ui-menu .ui-menu-item{margin:0;padding:0;width:100%}.ui-menu .ui-menu-divider{margin:5px -2px 5px -2px;height:0;font-size:0;line-height:0;border-width:1px 0 0}.ui-menu .ui-menu-item a{text-decoration:none;display:block;padding:2px .4em;line-height:1.5;min-height:0;font-weight:400}.ui-menu .ui-menu-item a.ui-state-focus,.ui-menu .ui-menu-item a.ui-state-active{font-weight:400;margin:-1px}.ui-menu .ui-state-disabled{font-weight:400;margin:.4em 0 .2em;line-height:1.5}.ui-menu .ui-state-disabled a{cursor:default}.ui-menu-icons{position:relative}.ui-menu-icons .ui-menu-item a{position:relative;padding-left:2em}.ui-menu .ui-icon{position:absolute;top:.2em;left:.2em}.ui-menu .ui-menu-icon{position:static;float:right}.ui-tooltip{padding:8px;position:absolute;z-index:9999;max-width:300px;-webkit-box-shadow:0 0 5px #aaa;box-shadow:0 0 5px #aaa}body .ui-tooltip{border-width:2px}.ui-widget{font-family:Helvetica,Arial,sans-serif;font-size:1.1em}.ui-widget .ui-widget{font-size:1em}.ui-widget input,.ui-widget select,.ui-widget textarea,.ui-widget button{font-family:Helvetica,Arial,sans-serif;font-size:1em}.ui-widget-content{border:1px solid #e0cfc2;background:#f4f0ec url(images/ui-bg_inset-soft_100_f4f0ec_1x100.png) 50% bottom repeat-x;color:#1e1b1d}.ui-widget-content a{color:#1e1b1d}.ui-widget-header{border:1px solid #d49768;background:#cb842e url(images/ui-bg_glass_25_cb842e_1x400.png) 50% 50% repeat-x;color:#fff;font-weight:bold}.ui-widget-header a{color:#fff}.ui-state-default,.ui-widget-content .ui-state-default,.ui-widget-header .ui-state-default{border:1px solid #cdc3b7;background:#ede4d4 url(images/ui-bg_glass_70_ede4d4_1x400.png) 50% 50% repeat-x;font-weight:normal;color:#3f3731}.ui-state-default a,.ui-state-default a:link,.ui-state-default a:visited{color:#3f3731;text-decoration:none}.ui-state-hover,.ui-widget-content .ui-state-hover,.ui-widget-header .ui-state-hover,.ui-state-focus,.ui-widget-content .ui-state-focus,.ui-widget-header .ui-state-focus{border:1px solid #f5ad66;background:#f5f0e5 url(images/ui-bg_glass_100_f5f0e5_1x400.png) 50% 50% repeat-x;font-weight:normal;color:#a46313}.ui-state-hover a,.ui-state-hover a:hover,.ui-state-hover a:link,.ui-state-hover a:visited{color:#a46313;text-decoration:none}.ui-state-active,.ui-widget-content .ui-state-active,.ui-widget-header .ui-state-active{border:1px solid #e0cfc2;background:#f4f0ec url(images/ui-bg_highlight-hard_100_f4f0ec_1x100.png) 50% 50% repeat-x;font-weight:normal;color:#b85700}.ui-state-active a,.ui-state-active a:link,.ui-state-active a:visited{color:#b85700;text-decoration:none}.ui-state-highlight,.ui-widget-content .ui-state-highlight,.ui-widget-header .ui-state-highlight{border:1px solid #d9bb73;background:#f5f5b5 url(images/ui-bg_highlight-hard_75_f5f5b5_1x100.png) 50% top repeat-x;color:#060200}.ui-state-highlight a,.ui-widget-content .ui-state-highlight a,.ui-widget-header .ui-state-highlight a{color:#060200}.ui-state-error,.ui-widget-content .ui-state-error,.ui-widget-header .ui-state-error{border:1px solid #f8893f;background:#fee4bd url(images/ui-bg_highlight-hard_65_fee4bd_1x100.png) 50% top repeat-x;color:#592003}.ui-state-error a,.ui-widget-content .ui-state-error a,.ui-widget-header .ui-state-error a{color:#592003}.ui-state-error-text,.ui-widget-content .ui-state-error-text,.ui-widget-header .ui-state-error-text{color:#592003}.ui-priority-primary,.ui-widget-content .ui-priority-primary,.ui-widget-header .ui-priority-primary{font-weight:bold}.ui-priority-secondary,.ui-widget-content .ui-priority-secondary,.ui-widget-header .ui-priority-secondary{opacity:.7;filter:Alpha(Opacity=70);font-weight:normal}.ui-state-disabled,.ui-widget-content .ui-state-disabled,.ui-widget-header .ui-state-disabled{opacity:.35;filter:Alpha(Opacity=35);background-image:none}.ui-state-disabled .ui-icon{filter:Alpha(Opacity=35)}.ui-icon{width:16px;height:16px}.ui-icon,.ui-widget-content .ui-icon{background-image:url(images/ui-icons_c47a23_256x240.png)}.ui-widget-header .ui-icon{background-image:url(images/ui-icons_ffffff_256x240.png)}.ui-state-default .ui-icon{background-image:url(images/ui-icons_f08000_256x240.png)}.ui-state-hover .ui-icon,.ui-state-focus .ui-icon{background-image:url(images/ui-icons_f08000_256x240.png)}.ui-state-active .ui-icon{background-image:url(images/ui-icons_f35f07_256x240.png)}.ui-state-highlight .ui-icon{background-image:url(images/ui-icons_cb672b_256x240.png)}.ui-state-error .ui-icon,.ui-state-error-text .ui-icon{background-image:url(images/ui-icons_ff7519_256x240.png)}.ui-icon-blank{background-position:16px 16px}.ui-icon-carat-1-n{background-position:0 0}.ui-icon-carat-1-ne{background-position:-16px 0}.ui-icon-carat-1-e{background-position:-32px 0}.ui-icon-carat-1-se{background-position:-48px 0}.ui-icon-carat-1-s{background-position:-64px 0}.ui-icon-carat-1-sw{background-position:-80px 0}.ui-icon-carat-1-w{background-position:-96px 0}.ui-icon-carat-1-nw{background-position:-112px 0}.ui-icon-carat-2-n-s{background-position:-128px 0}.ui-icon-carat-2-e-w{background-position:-144px 0}.ui-icon-triangle-1-n{background-position:0 -16px}.ui-icon-triangle-1-ne{background-position:-16px -16px}.ui-icon-triangle-1-e{background-position:-32px -16px}.ui-icon-triangle-1-se{background-position:-48px -16px}.ui-icon-triangle-1-s{background-position:-64px -16px}.ui-icon-triangle-1-sw{background-position:-80px -16px}.ui-icon-triangle-1-w{background-position:-96px -16px}.ui-icon-triangle-1-nw{background-position:-112px -16px}.ui-icon-triangle-2-n-s{background-position:-128px -16px}.ui-icon-triangle-2-e-w{background-position:-144px -16px}.ui-icon-arrow-1-n{background-position:0 -32px}.ui-icon-arrow-1-ne{background-position:-16px -32px}.ui-icon-arrow-1-e{background-position:-32px -32px}.ui-icon-arrow-1-se{background-position:-48px -32px}.ui-icon-arrow-1-s{background-position:-64px -32px}.ui-icon-arrow-1-sw{background-position:-80px -32px}.ui-icon-arrow-1-w{background-position:-96px -32px}.ui-icon-arrow-1-nw{background-position:-112px -32px}.ui-icon-arrow-2-n-s{background-position:-128px -32px}.ui-icon-arrow-2-ne-sw{background-position:-144px -32px}.ui-icon-arrow-2-e-w{background-position:-160px -32px}.ui-icon-arrow-2-se-nw{background-position:-176px -32px}.ui-icon-arrowstop-1-n{background-position:-192px -32px}.ui-icon-arrowstop-1-e{background-position:-208px -32px}.ui-icon-arrowstop-1-s{background-position:-224px -32px}.ui-icon-arrowstop-1-w{background-position:-240px -32px}.ui-icon-arrowthick-1-n{background-position:0 -48px}.ui-icon-arrowthick-1-ne{background-position:-16px -48px}.ui-icon-arrowthick-1-e{background-position:-32px -48px}.ui-icon-arrowthick-1-se{background-position:-48px -48px}.ui-icon-arrowthick-1-s{background-position:-64px -48px}.ui-icon-arrowthick-1-sw{background-position:-80px -48px}.ui-icon-arrowthick-1-w{background-position:-96px -48px}.ui-icon-arrowthick-1-nw{background-position:-112px -48px}.ui-icon-arrowthick-2-n-s{background-position:-128px -48px}.ui-icon-arrowthick-2-ne-sw{background-position:-144px -48px}.ui-icon-arrowthick-2-e-w{background-position:-160px -48px}.ui-icon-arrowthick-2-se-nw{background-position:-176px -48px}.ui-icon-arrowthickstop-1-n{background-position:-192px -48px}.ui-icon-arrowthickstop-1-e{background-position:-208px -48px}.ui-icon-arrowthickstop-1-s{background-position:-224px -48px}.ui-icon-arrowthickstop-1-w{background-position:-240px -48px}.ui-icon-arrowreturnthick-1-w{background-position:0 -64px}.ui-icon-arrowreturnthick-1-n{background-position:-16px -64px}.ui-icon-arrowreturnthick-1-e{background-position:-32px -64px}.ui-icon-arrowreturnthick-1-s{background-position:-48px -64px}.ui-icon-arrowreturn-1-w{background-position:-64px -64px}.ui-icon-arrowreturn-1-n{background-position:-80px -64px}.ui-icon-arrowreturn-1-e{background-position:-96px -64px}.ui-icon-arrowreturn-1-s{background-position:-112px -64px}.ui-icon-arrowrefresh-1-w{background-position:-128px -64px}.ui-icon-arrowrefresh-1-n{background-position:-144px -64px}.ui-icon-arrowrefresh-1-e{background-position:-160px -64px}.ui-icon-arrowrefresh-1-s{background-position:-176px -64px}.ui-icon-arrow-4{background-position:0 -80px}.ui-icon-arrow-4-diag{background-position:-16px -80px}.ui-icon-extlink{background-position:-32px -80px}.ui-icon-newwin{background-position:-48px -80px}.ui-icon-refresh{background-position:-64px -80px}.ui-icon-shuffle{background-position:-80px -80px}.ui-icon-transfer-e-w{background-position:-96px -80px}.ui-icon-transferthick-e-w{background-position:-112px -80px}.ui-icon-folder-collapsed{background-position:0 -96px}.ui-icon-folder-open{background-position:-16px -96px}.ui-icon-document{background-position:-32px -96px}.ui-icon-document-b{background-position:-48px -96px}.ui-icon-note{background-position:-64px -96px}.ui-icon-mail-closed{background-position:-80px -96px}.ui-icon-mail-open{background-position:-96px -96px}.ui-icon-suitcase{background-position:-112px -96px}.ui-icon-comment{background-position:-128px -96px}.ui-icon-person{background-position:-144px -96px}.ui-icon-print{background-position:-160px -96px}.ui-icon-trash{background-position:-176px -96px}.ui-icon-locked{background-position:-192px -96px}.ui-icon-unlocked{background-position:-208px -96px}.ui-icon-bookmark{background-position:-224px -96px}.ui-icon-tag{background-position:-240px -96px}.ui-icon-home{background-position:0 -112px}.ui-icon-flag{background-position:-16px -112px}.ui-icon-calendar{background-position:-32px -112px}.ui-icon-cart{background-position:-48px -112px}.ui-icon-pencil{background-position:-64px -112px}.ui-icon-clock{background-position:-80px -112px}.ui-icon-disk{background-position:-96px -112px}.ui-icon-calculator{background-position:-112px -112px}.ui-icon-zoomin{background-position:-128px -112px}.ui-icon-zoomout{background-position:-144px -112px}.ui-icon-search{background-position:-160px -112px}.ui-icon-wrench{background-position:-176px -112px}.ui-icon-gear{background-position:-192px -112px}.ui-icon-heart{background-position:-208px -112px}.ui-icon-star{background-position:-224px -112px}.ui-icon-link{background-position:-240px -112px}.ui-icon-cancel{background-position:0 -128px}.ui-icon-plus{background-position:-16px -128px}.ui-icon-plusthick{background-position:-32px -128px}.ui-icon-minus{background-position:-48px -128px}.ui-icon-minusthick{background-position:-64px -128px}.ui-icon-close{background-position:-80px -128px}.ui-icon-closethick{background-position:-96px -128px}.ui-icon-key{background-position:-112px -128px}.ui-icon-lightbulb{background-position:-128px -128px}.ui-icon-scissors{background-position:-144px -128px}.ui-icon-clipboard{background-position:-160px -128px}.ui-icon-copy{background-position:-176px -128px}.ui-icon-contact{background-position:-192px -128px}.ui-icon-image{background-position:-208px -128px}.ui-icon-video{background-position:-224px -128px}.ui-icon-script{background-position:-240px -128px}.ui-icon-alert{background-position:0 -144px}.ui-icon-info{background-position:-16px -144px}.ui-icon-notice{background-position:-32px -144px}.ui-icon-help{background-position:-48px -144px}.ui-icon-check{background-position:-64px -144px}.ui-icon-bullet{background-position:-80px -144px}.ui-icon-radio-on{background-position:-96px -144px}.ui-icon-radio-off{background-position:-112px -144px}.ui-icon-pin-w{background-position:-128px -144px}.ui-icon-pin-s{background-position:-144px -144px}.ui-icon-play{background-position:0 -160px}.ui-icon-pause{background-position:-16px -160px}.ui-icon-seek-next{background-position:-32px -160px}.ui-icon-seek-prev{background-position:-48px -160px}.ui-icon-seek-end{background-position:-64px -160px}.ui-icon-seek-start{background-position:-80px -160px}.ui-icon-seek-first{background-position:-80px -160px}.ui-icon-stop{background-position:-96px -160px}.ui-icon-eject{background-position:-112px -160px}.ui-icon-volume-off{background-position:-128px -160px}.ui-icon-volume-on{background-position:-144px -160px}.ui-icon-power{background-position:0 -176px}.ui-icon-signal-diag{background-position:-16px -176px}.ui-icon-signal{background-position:-32px -176px}.ui-icon-battery-0{background-position:-48px -176px}.ui-icon-battery-1{background-position:-64px -176px}.ui-icon-battery-2{background-position:-80px -176px}.ui-icon-battery-3{background-position:-96px -176px}.ui-icon-circle-plus{background-position:0 -192px}.ui-icon-circle-minus{background-position:-16px -192px}.ui-icon-circle-close{background-position:-32px -192px}.ui-icon-circle-triangle-e{background-position:-48px -192px}.ui-icon-circle-triangle-s{background-position:-64px -192px}.ui-icon-circle-triangle-w{background-position:-80px -192px}.ui-icon-circle-triangle-n{background-position:-96px -192px}.ui-icon-circle-arrow-e{background-position:-112px -192px}.ui-icon-circle-arrow-s{background-position:-128px -192px}.ui-icon-circle-arrow-w{background-position:-144px -192px}.ui-icon-circle-arrow-n{background-position:-160px -192px}.ui-icon-circle-zoomin{background-position:-176px -192px}.ui-icon-circle-zoomout{background-position:-192px -192px}.ui-icon-circle-check{background-position:-208px -192px}.ui-icon-circlesmall-plus{background-position:0 -208px}.ui-icon-circlesmall-minus{background-position:-16px -208px}.ui-icon-circlesmall-close{background-position:-32px -208px}.ui-icon-squaresmall-plus{background-position:-48px -208px}.ui-icon-squaresmall-minus{background-position:-64px -208px}.ui-icon-squaresmall-close{background-position:-80px -208px}.ui-icon-grip-dotted-vertical{background-position:0 -224px}.ui-icon-grip-dotted-horizontal{background-position:-16px -224px}.ui-icon-grip-solid-vertical{background-position:-32px -224px}.ui-icon-grip-solid-horizontal{background-position:-48px -224px}.ui-icon-gripsmall-diagonal-se{background-position:-64px -224px}.ui-icon-grip-diagonal-se{background-position:-80px -224px}.ui-corner-all,.ui-corner-top,.ui-corner-left,.ui-corner-tl{border-top-left-radius:6px}.ui-corner-all,.ui-corner-top,.ui-corner-right,.ui-corner-tr{border-top-right-radius:6px}.ui-corner-all,.ui-corner-bottom,.ui-corner-left,.ui-corner-bl{border-bottom-left-radius:6px}.ui-corner-all,.ui-corner-bottom,.ui-corner-right,.ui-corner-br{border-bottom-right-radius:6px}.ui-widget-overlay{background:#aaa url(images/ui-bg_flat_75_aaaaaa_40x100.png) 50% 50% repeat-x;opacity:.3;filter:Alpha(Opacity=30)}.ui-widget-shadow{margin:-8px 0 0 -8px;padding:8px;background:#aaa url(images/ui-bg_flat_75_aaaaaa_40x100.png) 50% 50% repeat-x;opacity:.3;filter:Alpha(Opacity=30);border-radius:8px} ================================================ FILE: src/resources/third-party/jquery-ui-autocomplete-html.js ================================================ /* * jQuery UI Autocomplete HTML Extension * * Copyright 2010, Scott González (http://scottgonzalez.com) * Dual licensed under the MIT or GPL Version 2 licenses. * * http://github.com/scottgonzalez/jquery-ui-extensions */ (function( $ ) { var proto = $.ui.autocomplete.prototype, initSource = proto._initSource; function filter( array, term ) { var matcher = new RegExp( $.ui.autocomplete.escapeRegex(term), "i" ); return $.grep( array, function(value) { return matcher.test( $( "<div>" ).html( value.label || value.value || value ).text() ); }); } $.extend( proto, { _initSource: function() { if ( this.options.html && $.isArray(this.options.source) ) { this.source = function( request, response ) { response( filter( this.options.source, request.term ) ); }; } else { initSource.call( this ); } }, _renderItem: function( ul, item) { return $( "<li></li>" ) .data( "item.autocomplete", item ) .append( $( "<a></a>" )[ this.options.html ? "html" : "text" ]( item.label ) ) .appendTo( ul ); } }); })( jQuery ); ================================================ FILE: src/resources/tutorial.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ div.main > table { margin-top: 1em; padding: 1em } div.main table td.h2 { border-bottom: 1px solid #cca } div.main table td.h2 h2 { font-size: 1rem; margin: 1.5rem 0 0.75rem; } div.main table td.h2 div { margin: 0 auto; width: 50em } div.main table td.toc div { margin: 0 auto 0.5em auto; padding-top: 1em; width: 40em } div.main table td.short { padding-top: 1em } div.main table td.text { text-align: center; padding-top: 1em } div.main table td.text div { text-align: justify; margin-left: auto; margin-right: auto; width: 50em; line-height: 140%; margin-top: 0.5em; margin-bottom: 0.5em } div.main table td.text div div.hint { font-style: italic; margin-left: 2em; width: 90% } div.main table td.text div div.example { padding-top: 0.1em; padding-bottom: 0.1em; margin-left: 2em; width: 90% } div.main table td.text div h3 { border-bottom: 1px solid #cca } div.main table td.text div.link { padding-left: 2em; font-weight: bold; font-family: monospace } div.main table td.text a { text-decoration: underline; color: #222; white-space: nowrap } div.main table td.text ol, div.main table td.text ul, div.main table td.text dl { width: 90% } div.main table td.text ol li, div.main table td.text ul li { margin-top: 0.5em; margin-bottom: 0.5em } div.main table td.text dl { margin-left: 1em } div.main table td.text dl dt { font-family: monospace; margin-top: 0.5em } div.main table td.text dl dd { margin-bottom: 0.5em } div.main table td.text code { white-space: nowrap; font-style: normal; background-color: #eec; padding: 0 2px } div.main table td.text code.bold { font-family: monospace; font-weight: bold } div.main table td.goto div { text-align: right; margin: 0 auto 0.5em auto; width: 50em } div.main table td.back { text-align: center; padding-left: 0; padding-top: 2em } div.main table td.back div { padding-top: 2em; border-top: 1px solid #cca } table.repositories { font-family: monospace; } table.pre { margin: 1em; margin-right: auto; font-family: monospace } table.pre tr td { white-space: pre; margin: 0; padding: 0 } table.toc { width: 100%; } table.toc th { text-align: left } table.toc tr.h2 td { padding-top: 1em } table.toc tr.h3 td { padding-left: 2em } table.toc a { color: #222 } ================================================ FILE: src/resources/tutorial.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ $(document).ready(function () { $("td.goto a, td.back a").button(); }); ================================================ FILE: src/resources/whitespace.css ================================================ /* -*- mode: css; indent-tabs-mode: nil -*- Copyright 2012 Jens Lindström, Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ tr.line td > i.tailws, tr.line.inserted td i.tailws { background: red !important } tr.line td i.cr { color: #888 } tr.line td i.cr:before { content: "^M" } tr.line td i.bom { color: #888 } tr.line td i.bom:before { content: "BOM" } tr.line td > i.tailws i.cr { color: white } tr.line td > i.tailws > b.t { color: white } tr.line i.i i.tailws { outline: 2px solid red } tr.line.inserted > td.line.new b.t.ill, tr.line.replaced > td.line.new b.t.ill, tr.line.modified > td.line b.t.ill { background-color: red; color: white } ================================================ FILE: src/reviewing/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: src/reviewing/comment/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import diff import diff.parse import gitutils import itertools import changeset.load as changeset_load import htmlutils import re import page.utils from htmlutils import jsify from time import strftime from reviewing.filters import Filters from operation import OperationFailure class Comment: def __init__(self, chain, batch_id, id, state, user, time, when, comment, code, unread): self.chain = chain self.batch_id = batch_id self.id = id self.state = state self.user = user self.time = time self.when = when self.comment = comment self.code = code self.unread = unread def __repr__(self): return "Comment(%r)" % self.comment def getJSConstructor(self): return "new Comment(%d, %s, %s, %s, %s)" % (self.id, self.user.getJSConstructor(), jsify(strftime("%Y-%m-%d %H:%M", self.time.timetuple())), jsify(self.state), jsify(self.comment)) @staticmethod def fromId(db, id, user): cursor = db.cursor() cursor.execute("SELECT chain, batch, uid, time, state, comment, code FROM comments WHERE id=%s", (id,)) row = cursor.fetchone() if not row: return None else: chain_id, batch_id, author_id, time, state, comment, code = row author = dbutils.User.fromId(db, author_id) adjusted_time = user.adjustTimestamp(db, time) when = user.formatTimestamp(db, time) cursor.execute("SELECT 1 FROM commentstoread WHERE uid=%s AND comment=%s", (user.id, id)) return Comment(CommentChain.fromId(db, chain_id, user), batch_id, id, state, author, adjusted_time, when, comment, code, cursor.fetchone() is not None) class CommentChain: def __init__(self, id, user, review, batch_id, type, state, origin=None, file_id=None, first_commit=None, last_commit=None, closed_by=None, addressed_by=None, type_is_draft=False, state_is_draft=False, last_commit_is_draft=False, addressed_by_is_draft=False, leader=None, count=None, unread=None): self.id = id self.user = user self.review = review self.batch_id = batch_id self.type = type self.type_is_draft = type_is_draft self.state = state self.state_is_draft = state_is_draft self.origin = origin self.file_id = file_id self.first_commit = first_commit self.last_commit = last_commit self.last_commit_is_draft = last_commit_is_draft self.closed_by = closed_by self.addressed_by = addressed_by self.addressed_by_is_draft = addressed_by_is_draft self.lines = None self.lines_by_sha1 = None self.__leader = leader self.__count = count self.__unread = unread self.comments = [] def setLines(self, sha1, offset, count): if not self.lines: self.lines = [] self.lines_by_sha1 = {} assert sha1 not in self.lines self.lines.append((sha1, offset, count)) self.lines_by_sha1[sha1] = (offset, count) return self def loadComments(self, db, user, include_draft_comments=True): if include_draft_comments: if self.state == "draft": draft_user_id = self.user.id else: draft_user_id = user.id else: draft_user_id = None cursor = db.cursor() cursor.execute("""SELECT comments.id, comments.batch, comments.state, comments.uid, comments.time, comments.comment, comments.code, commentstoread.uid IS NOT NULL AS unread FROM comments LEFT OUTER JOIN commentstoread ON (comments.id=commentstoread.comment AND commentstoread.uid=%s) WHERE comments.chain=%s AND ((comments.state='draft' AND comments.uid=%s) OR comments.state='current') ORDER BY comments.batch ASC""", (user.id, self.id, draft_user_id)) last = None for comment_id, batch_id, comment_state, author_id, time, comment, code, unread in cursor.fetchall(): author = dbutils.User.fromId(db, author_id) adjusted_time = user.adjustTimestamp(db, time) when = user.formatTimestamp(db, time) comment = Comment(self, batch_id, comment_id, comment_state, author, adjusted_time, when, comment, code, unread) if comment_state == 'draft': last = comment else: self.comments.append(comment) if last: self.comments.append(last) def when(self): return self.comments[0].when def countComments(self): if self.__count is None: self.__count = len(self.comments) return self.__count def countUnread(self): if self.__unread is None: self.__unread = len(filter(lambda comment: comment.unread, self.comments)) return self.__unread def title(self, include_time=True): if self.type == "issue": result = "Issue raised by %s" % (self.user.fullname) else: result = "Note by %s" % (self.user.fullname) if include_time: result += " at %s" % self.when() return result def leader(self, max_length=80, text=False): if self.__leader is None: self.__leader = self.comments[0].comment.split("\n", 1)[0] if len(self.__leader) > max_length: if text: return self.__leader[:max_length - 5] + "[...]" else: return htmlutils.htmlify(self.__leader[:max_length - 3]) + "[…]" else: if text: return self.__leader else: return htmlutils.htmlify(self.__leader) def getJSConstructor(self, sha1=None): if self.closed_by: closed_by = self.closed_by.getJSConstructor() else: closed_by = "null" if self.addressed_by: addressed_by = jsify(self.addressed_by.sha1) else: addressed_by = "null" comments = ", ".join(map(Comment.getJSConstructor, self.comments)) if sha1: offset, count = self.lines_by_sha1[sha1] if self.file_id: lines = "new CommentLines(%d, %s, %d, %d)" % (self.file_id, jsify(sha1), offset, offset + count - 1) else: lines = "new CommentLines(null, %s, %d, %d)" % (jsify(sha1), offset, offset + count - 1) else: lines = "null" return "new CommentChain(%d, %s, %s, %s, %s, %s, %s, [%s], %s)" % (self.id, self.user.getJSConstructor(), jsify(self.type), "true" if self.type_is_draft else "false", jsify(self.state), closed_by, addressed_by, comments, lines) def __nonzero__(self): return bool(self.comments) def __eq__(self, other): return other is not None and self.id == other.id def __ne__(self, other): return other is None or self.id != other.id def __repr__(self): return "CommentChain(%d)" % self.id def __len__(self): return len(self.comments) def __getitem__(self, index): return self.comments[index] @staticmethod def fromReview(db, review, user): cursor = db.cursor() cursor.execute("""SELECT commentchains.id, commentchains.batch, users.id, users.name, users.fullname, users.status, useremails.email, useremails.verified, commentchains.type, drafttype.to_type, commentchains.state, draftstate.to_state, SUBSTR(comments.comment, 1, 81), chaincomments(commentchains.id), chainunread(commentchains.id, %s) FROM commentchains JOIN users ON (users.id=commentchains.uid) JOIN useremails ON (useremails.id=users.email) JOIN comments ON (comments.id=commentchains.first_comment) LEFT OUTER JOIN commentchainchanges AS drafttype ON (drafttype.chain=commentchains.id AND drafttype.uid=%s AND drafttype.to_type IS NOT NULL AND drafttype.state='draft') LEFT OUTER JOIN commentchainchanges AS draftstate ON (draftstate.chain=commentchains.id AND draftstate.uid=%s AND draftstate.to_state IS NOT NULL AND draftstate.state='draft') WHERE commentchains.review=%s AND (commentchains.state!='draft' or commentchains.uid=%s) ORDER BY commentchains.id ASC""", (user.id, user.id, user.id, review.id, user.id,)) chains = [] for chain_id, batch_id, user_id, user_name, user_fullname, user_status, user_email, user_email_verified, chain_type, draft_type, chain_state, draft_state, leader, count, unread in cursor: if draft_type is not None: chain_type = draft_type if draft_state is not None: chain_state = draft_state if "\n" in leader: leader = leader[:leader.index("\n")] chains.append(CommentChain(chain_id, dbutils.User(user_id, user_name, user_fullname, user_status, user_email, user_email_verified), review, batch_id, chain_type, chain_state, leader=leader, count=count, unread=unread)) return chains @staticmethod def fromId(db, id, user, review=None, skip=None): cursor = db.cursor() cursor.execute("SELECT review, batch, uid, type, state, origin, file, first_commit, last_commit, closed_by, addressed_by FROM commentchains WHERE id=%s", [id]) row = cursor.fetchone() if not row: return None else: review_id, batch_id, user_id, type, state, origin, file_id, first_commit_id, last_commit_id, closed_by_id, addressed_by_id = row type_is_draft = False state_is_draft = False last_commit_is_draft = False addressed_by_is_draft = False if user is not None: cursor.execute("""SELECT from_type, to_type, from_state, to_state, from_last_commit, to_last_commit, from_addressed_by, to_addressed_by FROM commentchainchanges WHERE chain=%s AND uid=%s AND state='draft'""", [id, user.id]) for from_type, to_type, from_state, to_state, from_last_commit_id, to_last_commit_id, from_addressed_by_id, to_addressed_by_id in cursor: if from_state == state: state = to_state state_is_draft = True if to_state != "open": closed_by_id = user.id if from_type == type: type = to_type type_is_draft = True if from_last_commit_id == last_commit_id: last_commit_id = from_last_commit_id last_commit_is_draft = True if from_addressed_by_id == addressed_by_id: addressed_by_id = to_addressed_by_id addressed_by_is_draft = True if review is None: review = dbutils.Review.fromId(db, review_id) else: assert review.id == review_id first_commit = last_commit = addressed_by = None if not skip or 'commits' not in skip: if first_commit_id: first_commit = gitutils.Commit.fromId(db, review.repository, first_commit_id) if last_commit_id: last_commit = gitutils.Commit.fromId(db, review.repository, last_commit_id) if addressed_by_id: addressed_by = gitutils.Commit.fromId(db, review.repository, addressed_by_id) if closed_by_id: closed_by = dbutils.User.fromId(db, closed_by_id) else: closed_by = None chain = CommentChain(id, dbutils.User.fromId(db, user_id), review, batch_id, type, state, origin, file_id, first_commit, last_commit, closed_by, addressed_by, type_is_draft=type_is_draft, state_is_draft=state_is_draft, last_commit_is_draft=last_commit_is_draft, addressed_by_is_draft=addressed_by_is_draft) if not skip or 'lines' not in skip: if chain.state == "draft": draft_user_id = chain.user.id elif user is not None: draft_user_id = user.id else: draft_user_id = None cursor.execute("""SELECT sha1, first_line, last_line FROM commentchainlines WHERE chain=%s AND (state='current' OR uid=%s)""", (id, draft_user_id)) for sha1, first_line, last_line in cursor.fetchall(): chain.setLines(sha1, first_line, last_line - first_line + 1) return chain def loadCommentChains(db, review, user, file=None, changeset=None, commit=None, local_comments_only=False): result = [] cursor = db.cursor() chain_ids = None if file is None and changeset is None and commit is None: cursor.execute("SELECT id FROM commentchains WHERE review=%s AND file IS NULL", [review.id]) elif commit is not None: cursor.execute("""SELECT DISTINCT id FROM commentchains WHERE review=%s AND file IS NULL AND first_commit=%s AND ((state!='draft' OR uid=%s) AND state!='empty') GROUP BY id""", [review.id, commit.getId(db), user.id]) elif local_comments_only: cursor.execute("""SELECT DISTINCT commentchains.id FROM commentchains JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) JOIN fileversions ON (fileversions.file=commentchains.file) WHERE commentchains.review=%s AND commentchains.file=%s AND commentchains.state!='empty' AND ((commentchains.first_commit=%s AND commentchains.last_commit=%s) OR commentchains.addressed_by=%s) AND fileversions.changeset=%s AND (commentchainlines.sha1=fileversions.old_sha1 OR commentchainlines.sha1=fileversions.new_sha1) AND (commentchainlines.state='current' OR commentchainlines.uid=%s) ORDER BY commentchains.id ASC""", (review.id, file.id, changeset.parent.getId(db), changeset.child.getId(db), changeset.child.getId(db), changeset.id, user.id)) else: chain_ids = set() if file is not None: files = [file] else: files = changeset.files for file in files: cursor.execute("""SELECT id FROM commentchains JOIN commentchainlines ON (commentchainlines.chain=commentchains.id) WHERE commentchains.review=%s AND commentchains.file=%s AND commentchains.state!='empty' AND (commentchains.state!='draft' OR commentchains.uid=%s) AND (commentchainlines.sha1=%s OR commentchainlines.sha1=%s) AND (commentchainlines.state='current' OR commentchainlines.uid=%s)""", (review.id, file.id, user.id, file.old_sha1, file.new_sha1, user.id)) for (chain_id,) in cursor.fetchall(): chain_ids.add(chain_id) if chain_ids is None: chain_ids = set() for (chain_id,) in cursor.fetchall(): chain_ids.add(chain_id) for chain_id in sorted(chain_ids): chain = CommentChain.fromId(db, chain_id, user, review=review) chain.loadComments(db, user) result.append(chain) return result def createCommentChain(db, user, review, chain_type, commit=None, origin=None, file=None, parent=None, child=None, offset=None, count=None): import reviewing.comment.propagate if chain_type == "issue" and review.state != "open": raise OperationFailure(code="reviewclosed", title="Review is closed!", message="You need to reopen the review before you can raise new issues.") cursor = db.cursor() if file is not None: if origin == "old": commit = parent else: commit = child propagation = reviewing.comment.propagate.Propagation(db) if not propagation.setCustom(review, commit, file.id, offset, offset + count - 1): raise OperationFailure(code="invalidoperation", title="Invalid operation", message="It's not possible to create a comment here.") propagation.calculateInitialLines() cursor.execute("""INSERT INTO commentchains (review, uid, type, origin, file, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id""", (review.id, user.id, chain_type, origin, file.id, parent.getId(db) if parent else None, child.getId(db) if child else None)) chain_id = cursor.fetchone()[0] commentchainlines_values = [] for sha1, (first_line, last_line) in propagation.new_lines.items(): commentchainlines_values.append((chain_id, user.id, sha1, first_line, last_line)) cursor.executemany("""INSERT INTO commentchainlines (chain, uid, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s)""", commentchainlines_values) elif commit is not None: if offset + count > len(commit.message.splitlines()): raise OperationFailure(code="invalidoperation", title="Invalid operation", message="It's not possible to create a comment here.") cursor.execute("""INSERT INTO commentchains (review, uid, type, first_commit, last_commit) VALUES (%s, %s, %s, %s, %s) RETURNING id""", (review.id, user.id, chain_type, commit.getId(db), commit.getId(db))) chain_id = cursor.fetchone()[0] cursor.execute("""INSERT INTO commentchainlines (chain, uid, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s)""", (chain_id, user.id, commit.sha1, offset, offset + count - 1)) else: cursor.execute("""INSERT INTO commentchains (review, uid, type) VALUES (%s, %s, %s) RETURNING id""", (review.id, user.id, chain_type)) chain_id = cursor.fetchone()[0] commentchainusers = set([user.id] + map(int, review.owners)) cursor.executemany("INSERT INTO commentchainusers (chain, uid) VALUES (%s, %s)", [(chain_id, user_id) for user_id in commentchainusers]) return chain_id def createComment(db, user, chain_id, comment, first=False): cursor = db.cursor() cursor.execute("INSERT INTO comments (chain, uid, time, state, comment) VALUES (%s, %s, now(), 'draft', %s) RETURNING id", (chain_id, user.id, comment)) comment_id = cursor.fetchone()[0] if first: cursor.execute("UPDATE commentchains SET first_comment=%s WHERE id=%s", (comment_id, chain_id)) return comment_id def validateCommentChain(db, review, origin, parent, child, file, offset, count): """ Check whether the commented lines are changed by later commits in the review. If they are, a diff.Changeset object representing the first changeset that modifies those lines is returned. If they are not, None is returned. """ import reviewing.comment.propagate if origin == "old": commit = parent else: commit = child propagation = reviewing.comment.propagate.Propagation(db) if not propagation.setCustom(review, commit, file.id, offset, offset + count - 1): return "invalid", {} propagation.calculateInitialLines() if propagation.active: if commit.getFileSHA1(file.path) != review.branch.getHead(db).getFileSHA1(file.path): return "transferred", {} else: return "clean", {} else: addressed_by = propagation.addressed_by[0] return "modified", { "parent_sha1": addressed_by.parent.sha1, "child_sha1": addressed_by.child.sha1, "offset": addressed_by.location.first_line } def propagateCommentChains(db, user, review, commits, replayed_rebases={}): import reviewing.comment.propagate cursor = db.cursor() cursor.execute("""SELECT id, uid, type, state, file FROM commentchains WHERE review=%s AND file IS NOT NULL""", (review.id,)) chains_by_file = {} for chain_id, chain_user_id, chain_type, chain_state, file_id in cursor: chains_by_file.setdefault(file_id, {})[chain_id] = (chain_user_id, chain_type, chain_state) commentchainlines_values = [] addressed_values = [] for file_id, chains in chains_by_file.items(): file_path = dbutils.describe_file(db, file_id) file_sha1 = review.branch.getHead(db).getFileSHA1(file_path) cursor.execute("""SELECT chain, first_line, last_line FROM commentchainlines WHERE chain=ANY (%s) AND sha1=%s""", (chains.keys(), file_sha1)) for chain_id, first_line, last_line in cursor: assert len(commits.getHeads()) == 1 head = commits.getHeads().pop() if head in replayed_rebases: head = replayed_rebases[head] propagation = reviewing.comment.propagate.Propagation(db) propagation.setExisting(review, chain_id, review.branch.getHead(db), file_id, first_line, last_line) propagation.calculateAdditionalLines(commits, head) chain_user_id, chain_type, chain_state = chains[chain_id] lines_state = "draft" if chain_state == "draft" else "current" for sha1, (first_line, last_line) in propagation.new_lines.items(): commentchainlines_values.append((chain_id, chain_user_id, lines_state, sha1, first_line, last_line)) if chain_type == "issue" and chain_state in ("open", "draft") and not propagation.active: addressed_values.append((propagation.addressed_by[0].child.getId(db), chain_id)) cursor.executemany("""INSERT INTO commentchainlines (chain, uid, state, sha1, first_line, last_line) VALUES (%s, %s, %s, %s, %s, %s)""", commentchainlines_values) if addressed_values: cursor.executemany("UPDATE commentchains SET state='addressed', addressed_by=%s WHERE id=%s AND state='open'", addressed_values) cursor.executemany("UPDATE commentchains SET addressed_by=%s WHERE id=%s AND state='draft'", addressed_values) print "Addressed issues:" for commit_id, chain_id in addressed_values: chain = CommentChain.fromId(db, chain_id, user, review=review) if chain.state == 'addressed': chain.loadComments(db, user) title = " %s: " % chain.title(False) print "%s%s" % (title, chain.leader(max_length=80 - len(title), text=True)) ================================================ FILE: src/reviewing/comment/propagate.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import diff from changeset.utils import createChangeset FORWARD = 1 BACKWARD = 2 class Location: def __init__(self, first_line, last_line, active=True): self.first_line = first_line self.last_line = last_line self.active = active def copy(self): return Location(self.first_line, self.last_line, self.active) def __iadd__(self, delta): self.first_line += delta self.last_line += delta return self def __len__(self): return 2 def __getitem__(self, index): if index == 0: return self.first_line elif index == 1: return self.last_line else: raise IndexError def __eq__(self, other): return tuple(self) == tuple(other) def apply(self, changes, direction): """ Apply a set of changes and adjust the location accordingly. Process a set of changes in the form of a list of objects with the attributes delete_offset, delete_count, insert_offset and insert_count (such as diff.Chunk objects) sorted on ascending offsets. If any of the changes overlap this location, the location's 'active' attribute is set to False, otherwise the 'first_line' and 'last_line' attributes are adjusted to keep the location referencing the same lines. If the 'direction' argument is FORWARD, this location is interpreted as a location in the old version (before the changes) and is adjusted to a location in the new version (after the changes.) If the argument is BACKWARD, this location is interpreted as a location in the new version (after the changes) and is adjusted to a location in the old version (before the changes.) Returns True if the location is still active. """ delta = 0 # The only difference between the two loops is that uses of # delete_offset/delete_count and insert_offset/insert_count are # mirrored. if direction == FORWARD: for change in changes: if change.delete_offset + change.delete_count <= self.first_line: # Change is before (and does not overlap) the location. delta += change.insert_count - change.delete_count elif change.delete_offset <= self.last_line: # Change overlaps the location. self.active = False break else: # Change is after the location, meaning, since changes come # in ascending offset order, that all other changes are also # after the location. break else: for change in changes: if change.insert_offset + change.insert_count <= self.first_line: # Change is before (and does not overlap) the location. delta += change.delete_count - change.insert_count elif change.insert_offset <= self.last_line: # Change overlaps the comment chain. self.active = False break else: # Change is after the location, meaning, since changes come # in ascending offset order, that all other changes are also # after the location. break # Apply 'delta' to the location if it's still active. if self.active: self += delta return self.active class AddressedBy(object): def __init__(self, parent, child, location): self.parent = parent self.child = child self.location = location class Propagation: def __init__(self, db): self.db = db self.review = None self.head = None self.rebases = None self.initial_commit = None self.file_path = None self.file_id = None self.location = None self.active = None self.all_lines = None self.new_lines = None def setCustom(self, review, commit, file_id, first_line, last_line): """ Initialize for propagation of a custom location. This mode of operation is used to propagate a new comment chain to all relevant commits current part of the review. Returns false if the creating a comment at the specified location is not supported, typically because the commit is not being reviewed in the review. """ assert first_line > 0 assert last_line >= first_line if not review.containsCommit(self.db, commit, True): return False self.review = review self.rebases = review.getReviewRebases(self.db) self.initial_commit = commit self.addressed_by = [] self.file_path = dbutils.describe_file(self.db, file_id) self.file_id = file_id self.location = Location(first_line, last_line) self.active = True file_entry = commit.getFileEntry(self.file_path) if file_entry is None: # File doesn't exist (in the given commit.) return False diff_file = diff.File(new_sha1=file_entry.sha1, new_mode=file_entry.mode, repository=review.repository) diff_file.loadNewLines() if last_line > diff_file.newCount(): # Range of lines is out of bounds. return False self.all_lines = { file_entry.sha1: (first_line, last_line) } self.new_lines = { file_entry.sha1: (first_line, last_line) } return True def setExisting(self, review, chain_id, commit, file_id, first_line, last_line, reopening=False): """ Initialize for propagation of existing comment chain. This initializes the location to where the comment chain is located in the most recent commit in the review. If the comment chain is not present in the most recent commit in the review, this function returns False. This mode of operation is used to update existing comment chains when adding new commits to a review. """ self.review = review self.rebases = review.getReviewRebases(self.db) self.initial_commit = commit self.addressed_by = [] self.file_path = dbutils.describe_file(self.db, file_id) self.file_id = file_id self.location = Location(first_line, last_line) self.active = True self.all_lines = {} self.new_lines = {} cursor = self.db.cursor() cursor.execute("""SELECT sha1, first_line, last_line FROM commentchainlines WHERE chain=%s""", (chain_id,)) for file_sha1, first_line, last_line in cursor: self.all_lines[file_sha1] = (first_line, last_line) if reopening: self.__setLines(commit.getFileSHA1(self.file_path), self.location) return True def calculateInitialLines(self): """ Calculate the initial set of line mappings for a comment chain. Propagates the initial location both backward and forward through all current commits in the review. If, through forward propagation, the location becomes inactive, the 'active' attribute is set to False. In any case, the 'lines' attribute will map each file SHA-1 to a pair of line numbers (first_line, last_line) for each location found during the propagation. Returns the value of the 'active' attribute. """ self.head = self.review.branch.getHead(self.db) self.__propagate(self.review.getCommitSet(self.db)) return self.active def calculateAdditionalLines(self, commits, head): """ Calculate additional set of line mappings when adding new commits. If this propagation object is not active (because the comment chain it represents is not present in the most recent commit in the review) then nothing happens. Returns the value of the 'active' attribute. """ self.head = head self.__propagate(commits) return self.active def __propagate(self, commits): cursor = self.db.cursor() def propagateBackward(commit, location, processed): parents = commits.getParents(commit) recurse = [] if not parents: rebase = self.rebases.fromNewHead(commit) if rebase: parents.add(rebase.old_head) else: for parent_sha1 in commit.parents: rebase = self.rebases.fromNewHead(parent_sha1) if rebase: parents.add(rebase.old_head) for parent in parents - processed: changes, removed, added = self.__getChanges(parent, commit) if added: pass elif changes: parent_location = location.copy() if parent_location.apply(changes, BACKWARD): file_sha1 = parent.getFileSHA1(self.file_path) assert file_sha1 self.__setLines(file_sha1, parent_location) recurse.append((parent, parent_location)) else: recurse.append((parent, location)) processed.add(commit) for parent, parent_location in recurse: propagateBackward(parent, parent_location, processed) def propagateForward(commit, location, processed): if commit == self.head: self.active = True children = commits.getChildren(commit) recurse = [] if not children: rebase = self.rebases.fromOldHead(commit) if rebase: children.update([rebase.new_head]) if not children: assert not commits or commit in commits.getHeads() or self.rebases.fromNewHead(commit) for child in children - processed: changes, removed, added = self.__getChanges(commit, child) if removed: self.addressed_by.append(AddressedBy(commit, child, location)) elif changes: child_location = location.copy() if child_location.apply(changes, FORWARD): file_sha1 = child.getFileSHA1(self.file_path) assert file_sha1 self.__setLines(file_sha1, child_location) recurse.append((child, child_location)) else: self.addressed_by.append(AddressedBy(commit, child, location)) else: recurse.append((child, location)) processed.add(commit) for child, child_location in recurse: propagateForward(child, child_location, processed) # If we started propagation in the middle of, or at the end of, the # commit-set, this call does the main backward propagation. After # that, it will do extra backward propagation via other parents of # merge commits encountered during forward propagation. # # For non-merge commits, 'processed' will always contain the single # parent of 'commit', and propagateBackward() will find no parent # commits to process, leaving this call a no-op. propagateBackward(commit, location, processed) # Will be set to True again if propagation reaches the head of the # commit-set. self.active = False propagateForward(self.initial_commit, self.location, set()) def __getChanges(self, from_commit, to_commit): changesets = createChangeset(self.db, user=None, repository=self.review.repository, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=set([self.file_id]), do_highlight=False) assert len(changesets) == 1 if changesets[0].files: changed_file = changesets[0].files[0] assert changed_file.id == self.file_id removed = changed_file.new_sha1 == "0" * 40 added = changed_file.old_sha1 == "0" * 40 return changesets[0].files[0].chunks, removed, added else: return None, False, False def __setLines(self, file_sha1, lines): if file_sha1 not in self.all_lines: self.all_lines[file_sha1] = self.new_lines[file_sha1] = tuple(lines) else: assert self.all_lines[file_sha1] == tuple(lines) ================================================ FILE: src/reviewing/filters.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import time import re class PatternError(Exception): def __init__(self, pattern, message): self.pattern = pattern self.message = message def __str__(self): return "%s: %s" % (self.pattern, self.message) def sanitizePath(path): return re.sub("//+", "/", path.strip().lstrip("/")) or "/" def validatePattern(pattern): if re.search(r"[^/]\*\*", pattern): raise PatternError(pattern, "** not at beginning of path or path component") elif re.search(r"\*\*$", pattern): raise PatternError(pattern, "** at end of path") elif re.search(r"\*\*[^/]", pattern): raise PatternError(pattern, "** not at end of path component") def validPattern(pattern): try: validatePattern(pattern) return True except PatternError: return False def compilePattern(pattern): wildcards = { "**/": "(?:[^/]+/)*", "**": "(?:[^/]+(?:/|$))*", "*": "[^/]*", "?": "[^/]" } def escape(match): return "\\" + match.group(0) def replacement(match): return wildcards[match.group(0)] pattern = re.sub(r"[[{()+^$.\\|]", escape, pattern) return re.compile("^" + re.sub("\\*\\*(?:/|$)|\\*|\\?", replacement, pattern) + "$") def hasWildcard(string): return "*" in string or "?" in string class Path(object): def __init__(self, path): path = path.lstrip("/") self.path = path if hasWildcard(path): validatePattern(path) if path.endswith("/"): self.regexp = compilePattern(path + "**/*") else: self.regexp = compilePattern(path) if not path: self.dirname, self.filename = "", None elif "/" in path: self.dirname, self.filename = path.rsplit("/", 1) if not self.filename: self.filename = None else: self.dirname, self.filename = "", path if hasWildcard(self.dirname): components = self.dirname.split("/") for index, component in enumerate(components): if hasWildcard(component): self.fixedDirname = "/".join(components[:index]) self.wildDirname = "/".join(components[index:]) self.wildDirnameRegExp = compilePattern(self.wildDirname) break else: self.fixedDirname = self.dirname self.wildDirname = None if self.filename and hasWildcard(self.filename): self.filenameRegExp = compilePattern(self.filename) else: self.filenameRegExp = None def __repr__(self): return "Path(%r)" % self.path def match(self, path): return bool(self.regexp.match(path)) @staticmethod def cmp(pathA, pathB): # Filters that select individual files rank above filters that # select directories (even if the actual name of the file contains # wildcards.) if pathA.endswith("/") and not pathB.endswith("/"): return -1 elif not pathA.endswith("/") and pathB.endswith("/"): return 1 # Filters with more slashes in them rank higher than filters with fewer # slashes (but "**/" doesn't count as a slash, since it might match zero # slashes in practice.) specificityA = pathA.count("/") - len(re.findall(r"\*\*/", pathA)) specificityB = pathB.count("/") - len(re.findall(r"\*\*/", pathB)) if specificityA < specificityB: return -1 elif specificityA > specificityB: return 1 # Filters with fewer wildcards in them rank higher than filters with # more wildcards. wildcardsA = len(re.findall("\\*\\*|\\*|\\?", pathA)) wildcardsB = len(re.findall("\\*\\*|\\*|\\?", pathB)) if wildcardsA < wildcardsB: return 1 elif wildcardsA > wildcardsB: return -1 # Fall back to lexicographical ordering. The filters probably won't # match the same files anyway, and if they do, well, at least this # way it's stable and predictable. return cmp(pathA, pathB) class Filters: def __init__(self): # Pseudo-types: # data: dict(user_id -> tuple(filter_type, delegate)) # file: tuple(file_id, data) # tree: tuple(dict(dirname -> tree), dict(filename -> file)) self.files = {} # dict(path -> file) self.directories = {} # dict(dirname -> tree) self.root = ({}, {}) # tree self.data = {} # dict(file_id -> data) self.active_filters = {} # dict(user_id -> set(filter_id)) self.matched_files = {} # dict(filter_id -> set(file_id)) # Note: The same per-file 'data' objects are referenced by all of # 'self.files', 'self.tree' and 'self.data'. self.directories[""] = self.root def setFiles(self, db, file_ids=None, review=None): assert (file_ids is None) != (review is None) cursor = db.cursor() if file_ids is None: cursor.execute("SELECT DISTINCT file FROM reviewfiles WHERE review=%s", (review.id,)) file_ids = [file_id for (file_id,) in cursor] cursor.execute("SELECT id, path FROM files WHERE id=ANY (%s)", (file_ids,)) for file_id, path in cursor: data = {} self.files[path] = (file_id, data) self.data[file_id] = data if "/" in path: dirname, filename = path.rsplit("/", 1) def find_tree(dirname): tree = self.directories.get(dirname) if tree: return tree tree = self.directories[dirname] = ({}, {}) if "/" in dirname: dirname, basename = dirname.rsplit("/", 1) find_tree(dirname)[0][basename] = tree else: self.root[0][dirname] = tree return tree tree = find_tree(dirname) else: filename = path tree = self.root tree[1][filename] = self.files[path] def addFilter(self, user_id, path, filter_type, delegate, filter_id): def files_in_tree(components, tree): for dirname, child_tree in tree[0].items(): for f in files_in_tree(components + [dirname], child_tree): yield f dirname = "/".join(components) + "/" if components else "" for filename, (file_id, data) in tree[1].items(): yield dirname, filename, file_id, data if not path: dirname, filename = "", None components = [] elif "/" in path: dirname, filename = path.rsplit("/", 1) if not dirname: dirname = "" components = [] else: components = dirname.split("/") if not filename: filename = None else: dirname, filename = "", path components = [] def hasWildcard(string): return "*" in string or "?" in string file_ids = [] files = [] if hasWildcard(path): tree = self.root for index, component in enumerate(components): if hasWildcard(component): wild_dirname = "/".join(components[index:]) + "/" break else: tree = tree[0].get(component) if not tree: return else: wild_dirname = None re_filename = compilePattern(filename or "*") if wild_dirname: re_dirname = compilePattern(wild_dirname) for dirname, filename, file_id, data in files_in_tree([], tree): if re_dirname.match(dirname) and re_filename.match(filename): file_ids.append(file_id) files.append(data) else: for filename, (file_id, data) in tree[1].items(): if re_filename.match(filename): file_ids.append(file_id) files.append(data) else: if filename: if path in self.files: file_id, data = self.files[path] file_ids.append(file_id) files.append(data) else: return else: if dirname in self.directories: for _, _, file_id, data in files_in_tree([dirname], self.directories[dirname]): file_ids.append(file_id) files.append(data) else: return self.matched_files[filter_id] = file_ids if filter_type == "ignored": for data in files: if user_id in data: del data[user_id] elif filter_type in ("reviewer", "watcher"): if files: self.active_filters.setdefault(user_id, set()).add(filter_id) for data in files: data[user_id] = (filter_type, delegate) def addFilters(self, filters): def compareFilters(filterA, filterB): return Path.cmp(filterA[1], filterB[1]) def add_filter_id(filter_data): if len(filter_data) == 4: return tuple(filter_data) + (None,) return filter_data sorted_filters = sorted(map(add_filter_id, filters), cmp=compareFilters) for user_id, path, filter_type, delegate, filter_id in sorted_filters: self.addFilter(user_id, path, filter_type, delegate, filter_id) class Review: def __init__(self, review_id, applyfilters, applyparentfilters, repository): self.id = review_id self.applyfilters = applyfilters self.applyparentfilters = applyparentfilters self.repository = repository def load(self, db, repository=None, review=None, recursive=False, user=None, added_review_filters=[], removed_review_filters=[]): assert (repository is None) != (review is None) cursor = db.cursor() if user is not None: user_filter = " AND uid=%d" % user.id else: user_filter = "" def loadGlobal(repository, recursive): if recursive and repository.parent: loadGlobal(repository.parent, recursive) cursor.execute("""SELECT filters.uid, filters.path, filters.type, filters.delegate, filters.id FROM filters JOIN users ON (users.id=filters.uid) WHERE filters.repository=%%s AND users.status!='retired' %s""" % user_filter, (repository.id,)) self.addFilters(cursor) def loadReview(review): cursor.execute("""SELECT reviewfilters.uid, reviewfilters.path, reviewfilters.type, NULL FROM reviewfilters JOIN users ON (users.id=reviewfilters.uid) WHERE reviewfilters.review=%%s AND users.status!='retired' %s""" % user_filter, (review.id,)) if added_review_filters or removed_review_filters: review_filters = set(cursor.fetchall()) review_filters -= set(map(tuple, removed_review_filters)) review_filters |= set(map(tuple, added_review_filters)) self.addFilters(list(review_filters)) else: self.addFilters(cursor) if review: if review.applyfilters: loadGlobal(review.repository, review.applyparentfilters) loadReview(review) else: loadGlobal(repository, recursive) def getUserFileAssociation(self, user_id, file_id): user_id = int(user_id) file_id = int(file_id) data = self.data.get(file_id) if not data: return None data = data.get(user_id) if not data: return None return data[0] def isReviewer(self, user_id, file_id): return self.getUserFileAssociation(user_id, file_id) == 'reviewer' def isWatcher(self, user_id, file_id): return self.getUserFileAssociation(user_id, file_id) == 'watcher' def isRelevant(self, user_id, file_id): return self.getUserFileAssociation(user_id, file_id) in ('reviewer', 'watcher') def listUsers(self, file_id): return self.data.get(file_id, {}) def getRelevantFiles(self): relevant = {} for file_id, data in self.data.items(): for user_id, (filter_type, _) in data.items(): if filter_type in ('reviewer', 'watcher'): relevant.setdefault(user_id, set()).add(file_id) return relevant def getActiveFilters(self, user): return self.active_filters.get(user.id, set()) def getMatchedFiles(repository, paths): paths = [Path(path) for path in sorted(paths, cmp=Path.cmp, reverse=True)] common_fixedDirname = None for path in paths: if path.fixedDirname is None: common_fixedDirname = [] break elif common_fixedDirname is None: common_fixedDirname = path.fixedDirname.split("/") else: for index, component in enumerate(path.fixedDirname.split("/")): if index == len(common_fixedDirname): break elif common_fixedDirname[index] != component: del common_fixedDirname[index:] break else: del common_fixedDirname[index:] common_fixedDirname = "/".join(common_fixedDirname) args = ["ls-tree", "-r", "--name-only", "HEAD"] if common_fixedDirname: args.append(common_fixedDirname + "/") matched = dict((path.path, []) for path in paths) if repository.isEmpty(): return matched filenames = repository.run(*args).splitlines() if len(paths) == 1 and not paths[0].wildDirname and not paths[0].filename: return { paths[0].path: filenames } for filename in filenames: for path in paths: if path.match(filename): matched[path.path].append(filename) break return matched def countMatchedFiles(repository, paths): matched = getMatchedFiles(repository, paths) return dict((path, len(filenames)) for path, filenames in matched.items()) ================================================ FILE: src/reviewing/html.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import htmlutils import gitutils import dbutils import log.html import diff import diff.context import changeset.html as changeset_html import changeset.utils as changeset_utils import page.utils import page.showcommit import linkify def renderComments(db, target, user, chain, position, linkify): repository = chain.review.repository div_chain = target.div("comments %s" % position) for comment in chain.comments: div_comment = div_chain.div("comment%s" % (comment.state == "draft" and " draft" or ""), id="c%dc%d" % (chain.id, comment.id)) div_header = div_comment.div("header") div_header.span("author").text("%s <%s>" % (comment.user.fullname, comment.user.email)) div_header.text(" posted ") div_header.span("time").text(comment.when) div_text = div_comment.div("text", id="c%dtext" % comment.id).preformatted() div_text.text(comment.comment, linkify=linkify, repository=repository) if chain.type == "issue" and chain.state not in ("draft", "open"): div_resolution = div_chain.div("resolution") if chain.state == "addressed": div_resolution.text("Addressed by ").a(href="showcommit?review=%d&sha1=%s" % (chain.review.id, chain.addressed_by.sha1)).text(chain.addressed_by.sha1[:8]) if chain.closed_by: div_resolution.text(" (by %s)" % chain.closed_by.fullname) else: div_resolution.text("Resolved by " + chain.closed_by.fullname) div_buttons = div_chain.div("buttons") if (chain.state == "closed" or chain.addressed_by) and chain.type == "issue": div_buttons.button("reopen", onclick="commentChainById[%d].reopen(true);" % chain.id).text("Reopen Issue") if chain.type == "issue": if chain.state == "open": if not chain.type_is_draft: div_buttons.button("resolve", onclick="commentChainById[%d].resolve(null);" % chain.id).text("Resolve Issue") if chain.type_is_draft or user.getPreference(db, "ui.convertIssueToNote"): div_buttons.button("morph", onclick="commentChainById[%d].morph(null, this);" % chain.id).text("Convert %sto Note" % ("back " if chain.type_is_draft else "")) else: div_buttons.button("morph", onclick="commentChainById[%d].morph(null, this);" % chain.id).text("Convert %sto Issue" % ("back " if chain.type_is_draft else "")) if chain.comments[-1].state == "draft": div_buttons.button("edit", onclick="commentChainById[%d].editComment(commentChainById[%d].comments[%d], null);" % (chain.id, chain.id, len(chain.comments) - 1)).text("Edit") div_buttons.button("delete", onclick="commentChainById[%d].deleteComment(commentChainById[%d].comments[%d], null);" % (chain.id, chain.id, len(chain.comments) - 1)).text("Delete") reply_hidden = " hidden" else: reply_hidden = "" div_buttons.button("reply" + reply_hidden, onclick="commentChainById[%d].reply(null);" % chain.id).text("Reply") div_buttons.span("buttonscope buttonscope-comment") def getCodeCommentChainChangeset(db, chain, original=False): if (chain.state != "addressed" or original) and chain.first_commit == chain.last_commit: # Comment against a single version of the file, not against a diff. return None, None elif chain.state == "addressed" and not original: parent = gitutils.Commit.fromSHA1(db, chain.review.repository, chain.addressed_by.parents[0]) child = chain.addressed_by else: parent = chain.first_commit child = chain.last_commit return parent, child def renderCodeCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): repository = review.repository old_sha1 = None new_sha1 = None old = 1 new = 2 cursor = db.cursor() file_id = chain.file_id file_path = dbutils.describe_file(db, file_id) if (chain.state != "addressed" or original) and chain.first_commit == chain.last_commit: sha1 = chain.first_commit.getFileSHA1(file_path) cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", (chain.id, sha1)) first_line, last_line = cursor.fetchone() file = diff.File(file_id, file_path, sha1, sha1, review.repository, chunks=[]) file.loadNewLines(True) start = max(1, first_line - context_lines) end = min(file.newCount(), last_line + context_lines) count = end + 1 - start lines = file.newLines(True) lines = [diff.Line(diff.Line.CONTEXT, start + index, lines[start + index - 1], start + index, lines[start + index - 1]) for index in range(count)] file.macro_chunks = [diff.MacroChunk([], lines)] use = new display_type = "new" commit_url_component = "sha1=%s" % chain.first_commit.sha1 else: if chain.state == "addressed" and not original and review.containsCommit(db, chain.addressed_by): parent = gitutils.Commit.fromSHA1(db, review.repository, chain.addressed_by.parents[0]) child = chain.addressed_by use = old else: parent = chain.first_commit child = chain.last_commit if parent == child: if chain.origin == "old": cursor.execute("""SELECT changesets.child FROM changesets, reviewchangesets WHERE changesets.parent=%s AND reviewchangesets.changeset=changesets.id AND reviewchangesets.review=%s""", [child.getId(db), review.id]) try: child = gitutils.Commit.fromId(db, repository, cursor.fetchone()[0]) except: parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0]) else: parent = gitutils.Commit.fromSHA1(db, repository, child.parents[0]) if chain.origin == "old": use = old else: use = new if parent.sha1 in child.parents and len(child.parents) == 1: commit = child from_commit = None to_commit = None else: commit = None from_commit = parent to_commit = child if changeset: assert ((changeset.parent == from_commit and changeset.child == to_commit) if commit is None else (changeset.parent.sha1 == commit.parents[0] and changeset.child == commit)) assert changeset.getFile(file_id) else: changeset = changeset_utils.createChangeset(db, user, repository, commit=commit, from_commit=from_commit, to_commit=to_commit, filtered_file_ids=set((file_id,)))[0] file = changeset.getFile(file_id) if not file: if chain.state == "addressed" and not original: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original=True) return else: raise # Commit so that the diff and its analysis, written to the database by createChangeset(), # can be reused later. db.commit() old_sha1 = file.old_sha1 new_sha1 = file.new_sha1 if use == old and old_sha1 == '0' * 40: use = new elif use == new and new_sha1 == '0' * 40: use = old if use == old: sha1 = old_sha1 else: sha1 = new_sha1 cursor.execute("SELECT first_line, last_line FROM commentchainlines WHERE chain=%s AND sha1=%s", [chain.id, sha1]) first_line, last_line = cursor.fetchone() def readChunks(): return [diff.Chunk(delete_offset, delete_count, insert_offset, insert_count, analysis=analysis, is_whitespace=is_whitespace) for delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace in cursor.fetchall()] first_context_line = first_line - context_lines last_context_line = last_line + context_lines def includeChunk(chunk): if use == old: chunk_first_line, chunk_last_line = chunk.delete_offset, chunk.delete_offset + chunk.delete_count - 1 else: chunk_first_line, chunk_last_line = chunk.insert_offset, chunk.insert_offset + chunk.insert_count - 1 return chunk_last_line >= first_context_line and chunk_first_line <= last_context_line def lineFilter(line): if use == old: linenr = line.old_offset if linenr == first_context_line and line.type == diff.Line.INSERTED: return False else: linenr = line.new_offset if linenr == first_context_line and line.type == diff.Line.DELETED: return False return first_context_line <= linenr <= last_context_line file.loadOldLines(True) file.loadNewLines(True) context = diff.context.ContextLines(file, file.chunks, [(chain, use == old)]) file.macro_chunks = context.getMacroChunks(context_lines, highlight=True, lineFilter=lineFilter) try: macro_chunk = file.macro_chunks[0] except: raise repr((parent.sha1, child.sha1)) display_type = "both" if chain.state != "addressed": first_line_type = macro_chunk.lines[0].type if first_line_type == diff.Line.CONTEXT or (use == old and first_line_type == diff.Line.DELETED) or (use == new and first_line_type == diff.Line.INSERTED): for line in macro_chunk.lines[1:]: if first_line_type != line.type: break else: display_type = "old" if use == old else "new" commit_url_component = "from=%s&to=%s" % (parent.sha1, child.sha1) def renderHeaderLeft(db, target, file): target.span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title()) def renderHeaderRight(db, target, file): side = use == old and "o" or "n" uri = "showcommit?%s&review=%d&file=%d#f%d%s%d" % (commit_url_component, review.id, file.id, file.id, side, first_line) target.span("filename").a(href=uri).text(file.path) def renderCommentsLocal(db, target, **kwargs): if display_type == "both": if use == old: position = "left" else: position = "right" else: position = "center" renderComments(db, target, user, chain, position, linkify) def lineId(base): return "c%d%s" % (chain.id, base) def lineCellId(base): return "c%d%s" % (chain.id, base) target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor(sha1)), here=True) changeset_html.renderFile(db, target, user, review, file, options={ "support_expand": False, "display_type": display_type, "header_left": renderHeaderLeft, "header_right": renderHeaderRight, "content_after": renderCommentsLocal, "show": True, "expand": True, "line_id": lineId, "line_cell_id": lineCellId, "compact": compact, "tabify": tabify, "include_deleted": True }) data = (chain.id, file_id, use == old and "o" or "n", first_line, chain.id, file_id, use == old and "o" or "n", last_line, htmlutils.jsify(chain.type), htmlutils.jsify(chain.state), chain.id) target.addInternalScript("""$(document).ready(function () { var markers = new CommentMarkers(null); markers.setLines(document.getElementById('c%df%d%s%d'), document.getElementById('c%df%d%s%d')); markers.setType(%s, %s); commentChainById[%d].markers = markers; });""" % data, here=True) def renderReviewCommentChain(db, target, user, review, chain, linkify=False, message=None): target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor()), here=True) table = target.table("file show expanded first", width="60%", align="center", cellspacing=0, cellpadding=0) columns = table.colgroup() columns.col("edge") columns.col("linenr") columns.col("line") columns.col("middle") columns.col("middle") columns.col("line") columns.col("linenr") columns.col("edge") table.thead().tr().td("left", colspan=8, align="left").span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title()) table.tbody('spacer').tr('spacer').td(colspan='8').text() if message: row = table.tbody("content").tr("content") row.td(colspan=2).text() row.td("excuse", colspan=4).innerHTML(message) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan='8').text() row = table.tbody("content").tr("content") row.td(colspan=2).text() renderComments(db, row.td(colspan=4), user, chain, "center", linkify) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan='8').text() table.tfoot().tr().td("left", colspan=8, align="left").text() def renderCommitCommentChain(db, target, user, review, chain, linkify=False): target.addInternalScript("commentChainById[%d] = %s;" % (chain.id, chain.getJSConstructor()), here=True) table = target.table("file show expanded first", width="60%", align="center", cellspacing=0, cellpadding=0) columns = table.colgroup() columns.col("edge") columns.col("linenr") columns.col("line") columns.col("middle") columns.col("middle") columns.col("line") columns.col("linenr") columns.col("edge") table.thead().tr().td("left", colspan=8, align="left").span("comment-chain-title").a(href="/showcomment?chain=%d" % chain.id).text(chain.title()) table.tbody('spacer').tr('spacer').td(colspan='8').text() row = table.tbody("content").tr("content") row.td(colspan=2).text() page.showcommit.renderCommitInfo(db, row.td("content", colspan=4), user, review.repository, review, chain.first_commit, minimal=True) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan='8').text() row = table.tbody("content").tr("content") row.td(colspan=2).text() renderComments(db, row.td(colspan=4), user, chain, "center", linkify) row.td(colspan=2).text() table.tbody('spacer').tr('spacer').td(colspan='8').text() table.tfoot().tr().td("left", colspan=8, align="left").text() def renderCommentChain(db, target, user, review, chain, context_lines=3, compact=False, tabify=False, original=False, changeset=None, linkify=False): chain.loadComments(db, user) target.addExternalStylesheet("resource/changeset.css") target.addExternalStylesheet("resource/comment.css") target.addExternalStylesheet("resource/review.css") target.addExternalScript("resource/changeset.js") target.addExternalScript("resource/comment.js") target.addExternalScript("resource/review.js") target = target.div("comment-chain", id="c%d" % chain.id) if chain.file_id: renderCodeCommentChain(db, target, user, review, chain, context_lines, compact, tabify, original, changeset, linkify) elif chain.first_commit: renderCommitCommentChain(db, target, user, review, chain, linkify) else: renderReviewCommentChain(db, target, user, review, chain, linkify) ================================================ FILE: src/reviewing/mail.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils import configuration import textutils import diff import mailutils from mailutils import sendPendingMails, generateMessageId import changeset.text as changeset_text import changeset.utils as changeset_utils import changeset.load as changeset_load import log.commitset as log_commitset import reviewing.comment as review_comment import utils as review_utils import time def sendMail(db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=None, headers=None): if headers is None: headers = {} else: headers = headers.copy() headers["OperaCritic-URL"] = review.getURL(db, to_user, separator=", ") headers["OperaCritic-Association"] = review.getUserAssociation(db, to_user) headers["OperaCritic-Repository"] = review.repository.getURL(db, to_user) list_id = to_user.getPreference(db, "email.listId", repository=review.repository) if list_id: headers["List-Id"] = "<%s.%s>" % (list_id, configuration.base.HOSTNAME) recipients = list(recipients) review_association = review.getUserAssociation(db, to_user) if to_user.getPreference(db, "email.enableAssociationRecipients", repository=review.repository): system_user, _, system_domain = configuration.base.SYSTEM_USER_EMAIL.partition("@") for association in review_association.split(", "): recipients.append(mailutils.User( "is-%(association)s <%(user)s+%(association)s@%(hostname)s>" % { "association": association, "user": system_user, "hostname": system_domain })) return mailutils.queueMail(from_user, to_user, recipients, subject, body, message_id=message_id, parent_message_id=parent_message_id, headers=headers) class MailDisabled(Exception): pass def generateSubjectLine(db, user, review, item): subject_format = user.getPreference(db, "email.subjectLine.%s" % item) if not subject_format.strip(): raise MailDisabled data = { "id": "r/%d" % review.id, "summary": review.summary, "progress": str(review.getReviewState(db)), "branch": review.branch.name } try: return subject_format % data except Exception as exception: return "%s (format: %r)" % (str(exception), subject_format) def getReviewMessageId(db, to_user, review, files): cursor = db.cursor() cursor.execute("""SELECT messageid FROM reviewmessageids WHERE uid=%s AND review=%s""", (to_user.id, review.id)) for (message_id,) in cursor: return message_id else: filename, message_id = sendReviewPlaceholder( db, to_user, review, generateMessageId(len(files) + 1)) if filename: files.append(filename) return message_id def renderChainInMail(db, to_user, chain, focus_comment, new_state, new_type, line_length, context_lines): result = "" hr = "-" * line_length urls = to_user.getCriticURLs(db) url = "\n".join([" %s/showcomment?chain=%d" % (url, chain.id) for url in urls]) cursor = db.cursor() if chain.file_id: path = dbutils.describe_file(db, chain.file_id) if chain.first_commit == chain.last_commit or chain.origin == 'old': entry = chain.first_commit.getFileEntry(path) else: entry = chain.last_commit.getFileEntry(path) sha1 = entry.sha1 mode = entry.mode first_line, count = chain.lines_by_sha1[sha1] context = changeset_utils.getCodeContext(db, sha1, first_line, minimized=True) if context: result += "%s in %s, %s:\n%s\n%s\n" % (chain.type.capitalize(), path, context, url, hr) else: result += "%s in %s:\n%s\n%s\n" % (chain.type.capitalize(), path, url, hr) file = diff.File(id=chain.file_id, path=path, new_mode=mode, new_sha1=sha1, repository=chain.review.repository) file.loadNewLines() lines = file.newLines(False) last_line = first_line + count - 1 first_line = max(1, first_line - context_lines) last_line = min(last_line + context_lines, len(lines)) width = len(str(last_line)) for offset, line in enumerate(lines[first_line - 1:last_line]): result += "%s|%s\n" % (str(first_line + offset).rjust(width), line) result += hr + "\n" elif chain.first_commit: result += "%s in commit %s by %s:\n%s\n%s\n" % (chain.type.capitalize(), chain.first_commit.sha1[:8], chain.first_commit.author.name, url, hr) first_line, count = chain.lines_by_sha1[chain.first_commit.sha1] last_line = first_line + count - 1 lines = chain.first_commit.message.splitlines() for line in lines[first_line:last_line + 1]: result += " %s\n" % line result += hr + "\n" else: result += "General %s:\n%s\n%s\n" % (chain.type, url, hr) mode = to_user.getPreference(db, "email.updatedReview.quotedComments") def formatComment(comment): return "%s at %s:\n%s\n" % (comment.user.fullname, comment.when, textutils.reflow(comment.comment, line_length, indent=2)) assert not focus_comment or focus_comment == chain.comments[-1], "focus comment (#%d) is not last in chain (#%d) as expected" % (focus_comment.id, chain.id) if not focus_comment or len(chain.comments) > 1: if focus_comment: comments = chain.comments[:-1] else: comments = chain.comments result = "\n".join(["> " + line for line in result.splitlines()]) + "\n" quote1 = "" notshown = "" quote2 = "" if mode == "first": quote1 = formatComment(comments[0]) if len(comments) > 1: notshown = "[%d comment%s not shown]" % (len(comments) - 1, "s" if len(comments) > 2 else "") elif mode == "firstlast": quote1 = formatComment(comments[0]) if len(comments) > 2: notshown = "[%d comment%s not shown]" % (len(comments) - 2, "s" if len(comments) > 3 else "") if len(comments) > 1: quote2 = formatComment(comments[-1]) elif mode == "last": if len(comments) > 1: notshown = "[%d comment%s not shown]" % (len(comments) - 1, "s" if len(comments) > 2 else "") quote2 = formatComment(comments[-1]) else: for comment in comments: quote1 += formatComment(comment) if quote1: result += "\n".join(["> " + line for line in quote1.splitlines()]) + "\n" if notshown: result += notshown + "\n" if quote2: result += "\n".join(["> " + line for line in quote2.splitlines()]) + "\n" if focus_comment: result += "\n" if focus_comment: result += formatComment(focus_comment) if new_type == "issue": result += "\nCONVERTED TO ISSUE!\n" elif new_type == "note": result += "\nCONVERTED TO NOTE!\n" if new_state == "closed": result += "\nISSUE RESOLVED!\n" elif new_state == "addressed": result += "\nISSUE ADDRESSED!\n" elif new_state == "open": result += "\nISSUE REOPENED!\n" elif chain.state == "closed": result += "\n(This issue is resolved.)\n" elif chain.state == "addressed": result += "\n(This issue is addressed.)\n" return result def checkEmailEnabled(db, to_user): """Check whether we should send emails to the user.""" if to_user.email_verified is False: # Email address needs verification before use. raise MailDisabled if not to_user.getPreference(db, "email.activated"): # User has requested that no emails be sent. raise MailDisabled def sendReviewCreated(db, from_user, to_user, recipients, review): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, 'newReview') except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.owner.fullname': review.owners[0].fullname, 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data body += """%(review.owner.fullname)s has requested a review of the changes on the branch %(review.branch.name)s in the repository %(review.branch.repository)s """ % data all_reviewers = to_user.getPreference(db, "email.newReview.displayReviewers") all_watchers = to_user.getPreference(db, "email.newReview.displayWatchers") if all_reviewers or all_watchers: if all_reviewers: if review.reviewers: body += "The users assigned to review the changes on the review branch are:\n" for reviewer in review.reviewers: body += " " + reviewer.fullname + "\n" body += "\n" else: body += """No reviewers have been identified for the changes in this review. This means the review is currently stuck; it cannot finish unless there are reviewers. """ if all_watchers and review.watchers: body += "The following additional users are following the review:\n" for watcher in review.watchers: body += " " + watcher.fullname + "\n" body += "\n" body += "\n" if review.description: body += """Description: %s """ % textutils.reflow(review.description, line_length, indent=2) cursor = db.cursor() cursor.execute("""SELECT file, SUM(deleted), SUM(inserted) FROM fullreviewuserfiles WHERE review=%s AND assignee=%s GROUP BY file""", (review.id, to_user.id)) pending_files_lines = cursor.fetchall() if pending_files_lines: body += renderFiles(db, to_user, review, "These changes were assigned to you:", pending_files_lines, showcommit_link=True) all_commits = to_user.getPreference(db, "email.newReview.displayCommits") if all_commits: body += "The commits requested to be reviewed are:\n\n" contextLines = to_user.getPreference(db, "email.newReview.diff.contextLines") diffMaxLines = to_user.getPreference(db, "email.newReview.diff.maxLines") displayStats = to_user.getPreference(db, "email.newReview.displayStats") statsMaxLines = to_user.getPreference(db, "email.newReview.stats.maxLines") if contextLines < 0: contextLines = 0 # FIXME: The order here is essentially random. We shouldn't depend on # it, and reversing it doesn't make much sense... commits = list(reversed(review.branch.getCommits(db))) if diffMaxLines == 0: diffs = None else: diffs = {} lines = 0 for commit in commits: if len(commit.parents) == 1: cursor.execute("""SELECT id FROM reviewchangesets JOIN changesets ON (id=changeset) WHERE review=%s AND child=%s""", (review.id, commit.getId(db))) (changeset_id,) = cursor.fetchone() diff = changeset_text.unified(db, changeset_load.loadChangeset(db, review.repository, changeset_id), contextLines) diffs[commit] = diff lines += diff.count("\n") if lines > diffMaxLines: diffs = None break if not displayStats or statsMaxLines == 0: stats = None else: stats = {} lines = 0 for commit in commits: commit_stats = review.repository.run("show", "--oneline", "--stat", commit.sha1).split('\n', 1)[1] stats[commit] = commit_stats lines += commit_stats.count('\n') if lines > statsMaxLines: stats = None break for index, commit in enumerate(commits): if index > 0: body += "\n\n\n" body += """Commit: %(sha1)s Author: %(author.fullname)s <%(author.email)s> at %(author.time)s %(message)s """ % { 'sha1': commit.sha1, 'author.fullname': commit.author.getFullname(db), 'author.email': commit.author.email, 'author.time': time.strftime("%Y-%m-%d %H:%M:%S", commit.author.time), 'message': textutils.reflow(commit.message.strip(), line_length, indent=2) } if stats and commit in stats: body += "---\n" + stats[commit] if diffs and commit in diffs: body += "\n" + diffs[commit] message_id = generateMessageId() cursor.execute("INSERT INTO reviewmessageids (uid, review, messageid) VALUES (%s, %s, %s)", [to_user.id, review.id, message_id]) return [sendMail(db, review, message_id, from_user, to_user, recipients, subject, body)] def renderFiles(db, to_user, review, title, files_lines, commits=None, relevant_only=False, relevant_files=None, showcommit_link=False): result = "" if files_lines: files = [] for file_id, delete_count, insert_count in files_lines: if not relevant_only or file_id in relevant_files: files.append((dbutils.describe_file(db, file_id), delete_count, insert_count)) if files: paths = [] deleted = [] inserted = [] for path, delete_count, insert_count in sorted(files): paths.append(path) deleted.append(delete_count) inserted.append(insert_count) paths = diff.File.eliminateCommonPrefixes(paths, text=True) len_paths = max(map(len, paths)) len_deleted = max(map(len, map(str, deleted))) len_inserted = max(map(len, map(str, inserted))) result += title + "\n" for path, delete_count, insert_count in zip(paths, deleted, inserted): if delete_count == 0 and insert_count == 0: result += " %s binary file\n" % path.ljust(len_paths) else: delete_field = delete_count > 0 and "-%d" % delete_count or "" insert_field = insert_count > 0 and "+%d" % insert_count or "" result += " %s %s %s\n" % (path.ljust(len_paths), delete_field.rjust(len_deleted + 1), insert_field.rjust(len_inserted + 1)) if commits: if len(commits) == 1: result += "from this commit:\n" else: result += "from these commits:\n" for commit_id in commits: commit = gitutils.Commit.fromId(db, review.repository, commit_id) result += " %s %s\n" % (commit.sha1[:8], commit.niceSummary()) if showcommit_link: urls = to_user.getCriticURLs(db) try: from_sha1, to_sha1 = showcommit_link url_format = " %%s/showcommit?review=%%d&from=%s&to=%s&filter=pending\n" % (from_sha1, to_sha1) except: url_format = " %s/showcommit?review=%d&filter=pending\n" result += "\nTo review all these changes:\n" for url in urls: result += url_format % (url, review.id) result += "\n\n" return result def sendReviewPlaceholder(db, to_user, review, message_id=None): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, 'newishReview') except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length why = "This message is sent to you when you become associated with a review after the review was initially requested. It is then sent instead of the regular \"New Review\" message, for the purpose of using as the reference/in-reply-to message for other messages sent about this review." data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.owner.fullname': review.owners[0].fullname, 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'hr': hr, 'why': textutils.reflow(why, line_length) } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s %(why)s %(hr)s """ % data body += """%(review.owner.fullname)s has requested a review of the changes on the branch %(review.branch.name)s in the repository %(review.branch.repository)s """ % data all_reviewers = to_user.getPreference(db, "email.newReview.displayReviewers") all_watchers = to_user.getPreference(db, "email.newReview.displayWatchers") if all_reviewers or all_watchers: if all_reviewers: if review.reviewers: body += "The users assigned to review the changes on the review branch are:\n" for reviewer in review.reviewers: body += " " + reviewer.fullname + "\n" body += "\n" else: body += """No reviewers have been identified for the changes in this review. This means the review is currently stuck; it cannot finish unless there are reviewers. """ if all_watchers and review.watchers: body += "The following additional users are following the review:\n" for watcher in review.watchers: body += " " + watcher.fullname + "\n" body += "\n" body += "\n" if review.description: body += """Description: %s """ % textutils.reflow(review.description, line_length, indent=2) if message_id is None: message_id = generateMessageId() cursor = db.cursor() cursor.execute("""INSERT INTO reviewmessageids (uid, review, messageid) VALUES (%s, %s, %s)""", (to_user.id, review.id, message_id)) return (sendMail(db, review, message_id, review.owners[0], to_user, [to_user], subject, body), message_id) def sendReviewBatch(db, from_user, to_user, recipients, review, batch_id, was_accepted, is_accepted, profiler=None): if profiler: profiler.check("generate mail: start") # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "updatedReview.submittedChanges") except MailDisabled: return [] if from_user == to_user and to_user.getPreference(db, "email.ignoreOwnChanges"): return [] cursor = db.cursor() line_length = to_user.getPreference(db, "email.lineLength") relevant_only = to_user not in review.owners and to_user != from_user and to_user.getPreference(db, "email.updatedReview.relevantChangesOnly") if relevant_only: cursor.execute("SELECT type FROM reviewusers WHERE review=%s AND uid=%s", (review.id, to_user.id)) if cursor.fetchone()[0] == 'manual': relevant_only = False if profiler: profiler.check("generate mail: prologue") if relevant_only: relevant_files = review.getRelevantFiles(db, to_user) else: relevant_files = None if profiler: profiler.check("generate mail: get relevant files") cursor.execute("SELECT comment FROM batches WHERE id=%s", [batch_id]) batch_chain_id = cursor.fetchone()[0] if profiler: profiler.check("generate mail: batch chain") cursor.execute("""SELECT reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfilechanges.batch=%s AND reviewfilechanges.to_state='reviewed' GROUP BY reviewfiles.file""", (batch_id,)) reviewed_files_lines = cursor.fetchall() if profiler: profiler.check("generate mail: reviewed files/lines") cursor.execute("""SELECT DISTINCT changesets.child FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) JOIN changesets ON (changesets.id=reviewfiles.changeset) WHERE reviewfilechanges.batch=%s AND reviewfilechanges.to_state='reviewed'""", (batch_id,)) reviewed_commits = [commit_id for (commit_id,) in cursor] if profiler: profiler.check("generate mail: reviewed commits") cursor.execute("""SELECT reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) WHERE reviewfilechanges.batch=%s AND reviewfilechanges.to_state='pending' GROUP BY reviewfiles.file""", (batch_id,)) unreviewed_files_lines = cursor.fetchall() if profiler: profiler.check("generate mail: unreviewed files/lines") cursor.execute("""SELECT DISTINCT changesets.child FROM reviewfiles JOIN reviewfilechanges ON (reviewfilechanges.file=reviewfiles.id) JOIN changesets ON (changesets.id=reviewfiles.changeset) WHERE reviewfilechanges.batch=%s AND reviewfilechanges.to_state='pending'""", (batch_id,)) unreviewed_commits = [commit_id for (commit_id,) in cursor] if profiler: profiler.check("generate mail: unreviewed commits") reviewed_files = renderFiles(db, to_user, review, "Reviewed Files:", reviewed_files_lines, reviewed_commits, relevant_only, relevant_files) unreviewed_files = renderFiles(db, to_user, review, "Unreviewed Files:", unreviewed_files_lines, unreviewed_commits, relevant_only, relevant_files) if profiler: profiler.check("generate mail: render files") context_lines = to_user.getPreference(db, "email.comment.contextLines") comment_ids = set() def isRelevantComment(chain): if chain.file_id is None or chain.file_id in relevant_files: return True cursor.execute("SELECT 1 FROM commentchainusers WHERE chain=%s AND uid=%s", (chain.id, to_user.id)) return cursor.fetchone() is not None def fetchNewCommentChains(): chains = [] for (chain_id,) in cursor.fetchall(): if chain_id != batch_chain_id: chain = review_comment.CommentChain.fromId(db, chain_id, from_user, review=review) if not relevant_only or isRelevantComment(chain): chain.loadComments(db, from_user) chains.append((chain, None, None)) return chains def fetchAdditionalCommentChains(): chains = [] for chain_id, comment_id, new_state, new_type in cursor.fetchall(): if comment_id is not None or new_state is not None or new_type is not None: chain = review_comment.CommentChain.fromId(db, chain_id, from_user, review=review) if not relevant_only or isRelevantComment(chain): chain.loadComments(db, from_user) chains.append((chain, new_state, new_type)) return chains cursor.execute("SELECT id FROM commentchains WHERE batch=%s AND type='issue' ORDER BY id ASC", [batch_id]) new_issues = fetchNewCommentChains() if profiler: profiler.check("generate mail: new issues") cursor.execute("SELECT id FROM commentchains WHERE batch=%s AND type='note' ORDER BY id ASC", [batch_id]) new_notes = fetchNewCommentChains() if profiler: profiler.check("generate mail: new notes") cursor.execute("""SELECT commentchains.id, comments.id, commentchainchanges.to_state, commentchainchanges.to_type FROM commentchains LEFT OUTER JOIN comments ON (commentchains.id=comments.chain AND comments.batch=%s) LEFT OUTER JOIN commentchainchanges ON (commentchains.id=commentchainchanges.chain AND commentchainchanges.batch=%s) WHERE commentchains.review=%s AND commentchains.batch!=%s""", [batch_id, batch_id, review.id, batch_id]) additional_comments = fetchAdditionalCommentChains() if profiler: profiler.check("generate mail: additional comments") if is_accepted != was_accepted and not reviewed_files and not unreviewed_files and not new_issues and not new_notes and not additional_comments: return [] data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'hr': "-" * line_length } header = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data if batch_chain_id is not None: batch_chain = review_comment.CommentChain.fromId(db, batch_chain_id, from_user, review=review) else: batch_chain = None data["batch.author.fullname"] = from_user.fullname first_name = from_user.getFirstName() if batch_chain is not None: batch_chain.loadComments(db, from_user) comment_ids.add(batch_chain.comments[0].id) remark = """%s'%s comment: %s """ % (first_name, first_name[-1] != 's' and 's' or '', textutils.reflow(batch_chain.comments[0].comment, line_length, indent=2)) else: remark = "" body = header body += textutils.reflow("%(batch.author.fullname)s has submitted a batch of changes to the review." % data, line_length) body += "\n\n\n" body += remark if not was_accepted and is_accepted: state_change = textutils.reflow("The review is now ACCEPTED!", line_length) + "\n\n\n" elif was_accepted and not is_accepted: state_change = textutils.reflow("The review is NO LONGER ACCEPTED!", line_length) + "\n\n\n" else: state_change = "" body += state_change body += reviewed_files body += unreviewed_files def renderCommentChains(chains): result = "" if chains: for chain, new_state, new_type in chains: for focus_comment in chain.comments: if focus_comment.batch_id == batch_id: break else: focus_comment = None if focus_comment is not None or new_state is not None or new_type is not None: result += renderChainInMail(db, to_user, chain, focus_comment, new_state, new_type, line_length, context_lines) + "\n\n" if focus_comment is not None: comment_ids.add(focus_comment.id) return result body += renderCommentChains(new_issues) body += renderCommentChains(new_notes) if profiler: profiler.check("generate mail: render new comment chains") comment_threading = to_user.getPreference(db, "email.updatedReview.commentThreading") send_main_mail = state_change or reviewed_files or unreviewed_files or new_issues or new_notes if not comment_threading: send_main_mail = send_main_mail or additional_comments body += renderCommentChains(additional_comments) if profiler: profiler.check("generate mail: render additional comments") review_message_id = [None] files = [] def localGenerateMessageId(): return generateMessageId(len(files) + 1) def localGetReviewMessageId(): if review_message_id[0] is None: review_message_id[0] = getReviewMessageId(db, to_user, review, files) return review_message_id[0] if send_main_mail: message_id = localGenerateMessageId() cursor.executemany("INSERT INTO commentmessageids (uid, comment, messageid) VALUES (%s, %s, %s)", [(to_user.id, comment_id, message_id) for comment_id in comment_ids]) files.append(sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=localGetReviewMessageId())) if comment_threading: threads = {} for chain, new_state, new_type in additional_comments: if chain.comments[-1].batch_id == batch_id: parent_comment_id = chain.comments[-2].id else: parent_comment_id = chain.comments[-1].id cursor.execute("""SELECT messageid FROM commentmessageids WHERE comment=%s AND uid=%s""", [parent_comment_id, to_user.id]) row = cursor.fetchone() if row: parent_message_id = row[0] else: parent_message_id = localGetReviewMessageId() threads.setdefault(parent_message_id, []).append((chain, new_state, new_type)) for parent_message_id, chains in threads.items(): comment_ids = set() body = header + remark + renderCommentChains(chains) message_id = localGenerateMessageId() cursor.executemany("INSERT INTO commentmessageids (uid, comment, messageid) VALUES (%s, %s, %s)", [(to_user.id, comment_id, message_id) for comment_id in comment_ids]) files.append(sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=parent_message_id)) if profiler: profiler.check("generate mail: finished") return files def sendReviewAddedCommits(db, from_user, to_user, recipients, review, commits, changesets, tracked_branch=False): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "updatedReview.commitsPushed") except MailDisabled: return [] if from_user == to_user and to_user.getPreference(db, "email.ignoreOwnChanges"): return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length relevant_only = to_user not in review.owners and to_user != from_user and to_user.getPreference(db, "email.updatedReview.relevantChangesOnly") cursor = db.cursor() if relevant_only: cursor.execute("SELECT type FROM reviewusers WHERE review=%s AND uid=%s", (review.id, to_user.id)) if cursor.fetchone()[0] == 'manual': relevant_only = False all_commits = dict((commit.sha1, commit) for commit in commits) changeset_for_commit = {} for changeset in changesets: # We don't include diffs for merge commits in mails. if len(changeset.child.parents) == 1: if changeset.child in all_commits: changeset_for_commit[changeset.child] = changeset else: # An added changeset where the child isn't part of the added # commits will be a changeset between a "replayed rebase" commit # and the new head commit, generated when doing a non-fast- # forward rebase. The relevant commit from such a changeset is # the first (and only) parent. changeset_for_commit[changeset.parent] = changeset if relevant_only: relevant_files = review.getRelevantFiles(db, to_user) relevant_commits = set() for changeset in changesets: for file in changeset.files: if file.id in relevant_files: if changeset.child in all_commits: relevant_commits.add(changeset.child) else: # "Replayed rebase" commit; see comment above. relevant_commits.add(all_commits[changeset.parent]) break else: cursor.execute("SELECT id FROM commentchains WHERE review=%s AND state='addressed' AND addressed_by=%s", (review.id, changeset.child.getId(db))) for chain_id in cursor.fetchall(): cursor.execute("SELECT 1 FROM commentchainusers WHERE chain=%s AND uid=%s", (chain_id, to_user.id)) if cursor.fetchone(): relevant_commits.add(changeset.child) break if not relevant_commits: return [] else: relevant_commits = None data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data commitset = log_commitset.CommitSet(commits) if tracked_branch: body += "The automatic tracking of\n %s\n" % tracked_branch body += textutils.reflow("has updated the review by pushing %sadditional commit%s to the branch" % ("an " if len(commits) == 1 else "", "s" if len(commits) > 1 else ""), line_length) else: body += textutils.reflow("%s has updated the review by pushing %sadditional commit%s to the branch" % (from_user.fullname, "an " if len(commits) == 1 else "", "s" if len(commits) > 1 else ""), line_length) body += "\n %s\n" % review.branch.name body += textutils.reflow("in the repository", line_length) body += "\n %s\n\n\n" % review.repository.getURL(db, to_user) cursor.execute("""SELECT file, SUM(deleted), SUM(inserted) FROM fullreviewuserfiles WHERE review=%%s AND changeset IN (%s) AND state='pending' AND assignee=%%s GROUP BY file""" % ",".join(["%s"] * len(changesets)), [review.id] + [changeset.id for changeset in changesets] + [to_user.id]) pending_files_lines = cursor.fetchall() if pending_files_lines: heads = commitset.getHeads() tails = commitset.getFilteredTails(review.repository) if len(heads) == 1 and len(tails) == 1: showcommit_link = (tails.pop()[:8], heads.pop().sha1[:8]) else: showcommit_link = False body += renderFiles(db, to_user, review, "These changes were assigned to you:", pending_files_lines, showcommit_link=showcommit_link) all_commits = to_user.getPreference(db, "email.updatedReview.displayCommits") context_lines = to_user.getPreference(db, "email.comment.contextLines") if all_commits: body += "The additional commit%s requested to be reviewed are:\n\n" % ("s" if len(commits) > 1 else "") contextLines = to_user.getPreference(db, "email.updatedReview.diff.contextLines") diffMaxLines = to_user.getPreference(db, "email.updatedReview.diff.maxLines") displayStats = to_user.getPreference(db, "email.updatedReview.displayStats") statsMaxLines = to_user.getPreference(db, "email.updatedReview.stats.maxLines") if contextLines < 0: contextLines = 0 if diffMaxLines == 0: diffs = None else: diffs = {} lines = 0 for commit in commits: if commit in changeset_for_commit: diff = changeset_text.unified(db, changeset_for_commit[commit], contextLines) diffs[commit] = diff lines += diff.count("\n") if lines > diffMaxLines: diffs = None break if not displayStats or statsMaxLines == 0: stats = None else: stats = {} lines = 0 for commit in commits: commit_stats = review.repository.run("show", "--oneline", "--stat", commit.sha1).split('\n', 1)[1] stats[commit] = commit_stats lines += commit_stats.count('\n') if lines > statsMaxLines: stats = None break for index, commit in enumerate(commits): if index > 0: body += "\n\n\n" body += """Commit: %(sha1)s Author: %(author.fullname)s <%(author.email)s> at %(author.time)s %(message)s """ % { 'sha1': commit.sha1, 'author.fullname': commit.author.getFullname(db), 'author.email': commit.author.email, 'author.time': time.strftime("%Y-%m-%d %H:%M:%S", commit.author.time), 'message': textutils.reflow(commit.message.strip(), line_length, indent=2) } if stats and commit in stats: body += "---\n" + stats[commit] if diffs and commit in diffs: body += "\n" + diffs[commit] cursor.execute("SELECT id FROM commentchains WHERE review=%s AND state='addressed' AND addressed_by=%s", (review.id, commit.getId(db))) rows = cursor.fetchall() if rows: for (chain_id,) in rows: chain = review_comment.CommentChain.fromId(db, chain_id, to_user, review=review) chain.loadComments(db, to_user, include_draft_comments=False) body += "\n\n" + renderChainInMail(db, to_user, chain, None, "addressed", None, line_length, context_lines) files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=parent_message_id)) return files def sendPing(db, from_user, to_user, recipients, review, note): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "pingedReview") except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'from.fullname': from_user.fullname, 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data body += """%(from.fullname)s has pinged the review! """ % data if note: body += """Additional information from %s: %s """ % (from_user.getFirstName(), textutils.reflow(note, line_length, indent=2)) cursor = db.cursor() cursor.execute("""SELECT reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.state='pending' AND reviewuserfiles.uid=%s GROUP BY reviewfiles.file""", (review.id, to_user.id)) pending_files_lines = cursor.fetchall() cursor.execute("""SELECT DISTINCT changesets.child FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) JOIN changesets ON (changesets.id=reviewfiles.changeset) WHERE reviewfiles.review=%s AND reviewfiles.state='pending' AND reviewuserfiles.uid=%s""", (review.id, to_user.id)) pending_commits = cursor.fetchall() body += renderFiles(db, to_user, review, "These pending changes are assigned to you:", pending_files_lines, pending_commits, showcommit_link=True) cursor.execute("SELECT messageid FROM reviewmessageids WHERE uid=%s AND review=%s", [to_user.id, review.id]) row = cursor.fetchone() if row: parent_message_id = "<%s@%s>" % (row[0], configuration.base.HOSTNAME) else: parent_message_id = None files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=parent_message_id)) return files def sendAssignmentsChanged(db, from_user, to_user, review, added_filters, removed_filters, unassigned, assigned): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "updatedReview.assignmentsChanged") except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'from.fullname': from_user.fullname, 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data body += """%(from.fullname)s has modified the assignments in the review. """ % data def renderPaths(items): return " \n".join(diff.File.eliminateCommonPrefixes(sorted(map(lambda item: item[1], items)), text=True)) + "\n" if added_filters or removed_filters: if added_filters: added_reviewer = filter(lambda item: item[0] == "reviewer", added_filters) added_watcher = filter(lambda item: item[0] == "watcher", added_filters) else: added_reviewer = None added_watcher = None if removed_filters: removed_reviewer = filter(lambda item: item[0] == "reviewer", removed_filters) removed_watcher = filter(lambda item: item[0] == "watcher", removed_filters) else: removed_reviewer = None removed_watcher = None if added_reviewer: body += "You are now reviewing the following paths:\n %s\n" % renderPaths(added_reviewer) if added_watcher: body += "You are now watching the following paths:\n %s\n" % renderPaths(added_watcher) if removed_reviewer: body += "You are no longer reviewing the following paths:\n %s\n" % renderPaths(removed_reviewer) if removed_watcher: body += "You are no longer watching the following paths:\n %s\n" % renderPaths(removed_watcher) body += "\n" if unassigned: body += renderFiles(db, to_user, review, "The following changes are no longer assigned to you:", unassigned) if assigned: body += renderFiles(db, to_user, review, "The following changes are now assigned to you:",assigned) files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, from_user, to_user, [to_user], subject, body, parent_message_id=parent_message_id)) return files def sendFiltersApplied(db, from_user, to_user, review, globalfilters, parentfilters, assigned): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "updatedReview.parentFiltersApplied") except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'from.fullname': from_user.fullname, 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data if globalfilters: what = "global filters" else: what = "global filters from upstream repositories" text = ("%s has modified the assignments in the review by making %s apply, " "which they previously did not. This had the effect that you are " "now a %s the review." % (from_user.fullname, what, "reviewer of changes in" if assigned else "watcher of")) body += """%s """ % textutils.reflow(text, line_length) if assigned: body += renderFiles(db, to_user, review, "The following changes are now assigned to you:", assigned) files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, from_user, to_user, [to_user], subject, body, parent_message_id=parent_message_id)) return files def sendReviewRebased(db, from_user, to_user, recipients, review, new_upstream, rebased_commits, onto_branch=None): # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "updatedReview.reviewRebased") except MailDisabled: return [] if from_user == to_user and to_user.getPreference(db, "email.ignoreOwnChanges"): return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'review.branch.name': review.branch.name, 'review.branch.repository': review.repository.getURL(db, to_user), 'from.fullname': from_user.fullname, 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data if new_upstream: data["new_upstream"] = new_upstream.oneline(db, decorate=True) text = """\ %(from.fullname)s has rebased the review branch onto: %(new_upstream)s""" % data else: text = "%(from.fullname)s has rewritten the history on the review branch." % data body += """%s """ % textutils.reflow(text, line_length) body += """The new branch log is: """ for commit in rebased_commits: body += commit.oneline(db) + "\n" files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, from_user, to_user, recipients, subject, body, parent_message_id=parent_message_id)) return files def sendExtensionOutput(db, user_id, batch_id, output): to_user = dbutils.User.fromId(db, user_id) cursor = db.cursor() cursor.execute("SELECT review, uid FROM batches WHERE id=%s", (batch_id,)) review_id, batch_user_id = cursor.fetchone() review = dbutils.Review.fromId(db, review_id) batch_user = dbutils.User.fromId(db, batch_user_id) # First check if we can/should send emails to the user at all. try: checkEmailEnabled(db, to_user) subject = generateSubjectLine(db, to_user, review, "extensionOutput") except MailDisabled: return [] line_length = to_user.getPreference(db, "email.lineLength") hr = "-" * line_length data = { 'review.id': review.id, 'review.url': review.getURL(db, to_user, 2), 'batch.user.fullname': batch_user.fullname, 'hr': hr } body = """%(hr)s This is an automatic message generated by the review at: %(review.url)s %(hr)s """ % data text = "A batch of changes submitted by %(batch.user.fullname)s has been processed by your installed extensions." % data body += """%s """ % textutils.reflow(text, line_length) body += "The extensions generated the following output:\n%s" % output files = [] parent_message_id = getReviewMessageId(db, to_user, review, files) message_id = generateMessageId(len(files) + 1) files.append(sendMail( db, review, message_id, to_user, to_user, [to_user], subject, body, parent_message_id=parent_message_id)) return files ================================================ FILE: src/reviewing/rebase.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import time import configuration import gitutils def timestamp(ts): return time.strftime("%Y-%m-%d %H:%M:%S", ts) def createEquivalentMergeCommit(db, review, user, old_head, old_upstream, new_head, new_upstream, onto_branch=None): repository = review.repository old_upstream_name = repository.findInterestingTag(db, old_upstream.sha1) or old_upstream.sha1 new_upstream_name = repository.findInterestingTag(db, new_upstream.sha1) or new_upstream.sha1 if onto_branch: merged_thing = "branch '%s'" % onto_branch else: merged_thing = "commit '%s'" % new_upstream_name commit_message = """\ Merge %(merged_thing)s into %(review.branch.name)s This commit was generated automatically by Critic as an equivalent merge to the rebase of the commits %(old_upstream_name)s..%(old_head.sha1)s onto the %(merged_thing)s.""" % { "merged_thing": merged_thing, "review.branch.name": review.branch.name, "old_upstream_name": old_upstream_name, "old_head.sha1": old_head.sha1 } merge_sha1 = repository.run('commit-tree', new_head.tree, '-p', old_head.sha1, '-p', new_upstream.sha1, input=commit_message, env=gitutils.getGitEnvironment()).strip() merge = gitutils.Commit.fromSHA1(db, repository, merge_sha1) gituser_id = merge.author.getGitUserId(db) cursor = db.cursor() cursor.execute("""INSERT INTO commits (sha1, author_gituser, commit_gituser, author_time, commit_time) VALUES (%s, %s, %s, %s, %s) RETURNING id""", (merge.sha1, gituser_id, gituser_id, timestamp(merge.author.time), timestamp(merge.committer.time))) merge.id = cursor.fetchone()[0] cursor.executemany("INSERT INTO edges (parent, child) VALUES (%s, %s)", [(old_head.getId(db), merge.id), (new_upstream.getId(db), merge.id)]) # Need to commit the transaction to make the new commit available # to other database sessions right away, specifically so that the # changeset service can see it. db.commit() return merge def replayRebase(db, review, user, old_head, old_upstream, new_head, new_upstream, onto_branch=None): repository = review.repository old_upstream_name = repository.findInterestingTag(db, old_upstream.sha1) or old_upstream.sha1 if onto_branch: new_upstream_name = "branch '%s'" % onto_branch else: new_upstream_name = "commit '%s'" % (repository.findInterestingTag(db, new_upstream.sha1) or new_upstream.sha1) commit_message = """\ Rebased %(review.branch.name)s onto %(new_upstream_name)s This commit was generated automatically by Critic to "replay" the rebase of the commits %(old_upstream_name)s..%(old_head.sha1)s onto the %(new_upstream_name)s.""" % { "review.branch.name": review.branch.name, "old_head.sha1": old_head.sha1, "old_upstream_name": old_upstream_name, "new_upstream_name": new_upstream_name } original_sha1 = repository.run( 'commit-tree', old_head.tree, '-p', old_upstream.sha1, env=gitutils.getGitEnvironment(), input=commit_message).strip() with repository.workcopy(original_sha1) as workcopy: with repository.temporaryref(new_upstream) as new_upstream_ref, \ repository.temporaryref(original_sha1) as original_ref: workcopy.run("fetch", "--quiet", "origin", "%s:refs/heads/temporary" % new_upstream_ref, "%s:refs/heads/original" % original_ref) workcopy.run("checkout", "refs/heads/temporary") returncode, stdout, stderr = workcopy.run( "cherry-pick", "refs/heads/original", env=gitutils.getGitEnvironment(), check_errors=False) # If the rebase produced conflicts, just stage and commit them: if returncode != 0: # Reset any submodule gitlinks with conflicts: since we don't # have the submodules checked out, "git commit --all" below # may fail to index them. for line in stdout.splitlines(): if line.startswith("CONFLICT (submodule):"): submodule_path = line.split()[-1] workcopy.run("reset", "--", submodule_path, check_errors=False) # Then stage and commit the result, with conflict markers and all. workcopy.run("commit", "--all", "--reuse-message=%s" % original_sha1, env=gitutils.getGitEnvironment()) rebased_sha1 = workcopy.run("rev-parse", "HEAD").strip() workcopy.run("push", "origin", "HEAD:refs/keepalive/" + rebased_sha1) return gitutils.Commit.fromSHA1(db, repository, rebased_sha1) ================================================ FILE: src/reviewing/utils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import dbutils import gitutils from dbutils import * from itertools import izip, repeat, chain import htmlutils import configuration import mail import diff import changeset.utils as changeset_utils import changeset.load as changeset_load import reviewing.comment import reviewing.filters import log.commitset as log_commitset import extensions.role.filterhook from operation import OperationError, OperationFailure from filters import Filters def getFileIdsFromChangesets(changesets): file_ids = set() for changeset in changesets: file_ids.update(changed_file.id for changed_file in changeset.files) return file_ids def getReviewersAndWatchers(db, repository, commits=None, changesets=None, reviewfilters=None, applyfilters=True, applyparentfilters=False): """getReviewersAndWatchers(db, commits=None, changesets=None) -> tuple Returns a tuple containing two dictionaries, each mapping file IDs to dictionaries mapping user IDs to sets of changeset IDs. The first dictionary defines the reviwers of each file, the second dictionary defines the watchers of each file. For any changes in a file for which no reviewer is identified, None is used as a key in the dictionary instead of a real user ID.""" if changesets is None: changesets = [] changeset_utils.createChangesets(db, repository, commits) for commit in commits: changesets.extend(changeset_utils.createChangeset(db, None, repository, commit, do_highlight=False)) cursor = db.cursor() filters = Filters() filters.setFiles(db, list(getFileIdsFromChangesets(changesets))) if applyfilters: filters.load(db, repository=repository, recursive=applyparentfilters) if reviewfilters: filters.addFilters(reviewfilters) reviewers = {} watchers = {} for changeset in changesets: author_user_ids = changeset.child.author.getUserIds(db) if changeset.child else set() cursor.execute("SELECT DISTINCT file FROM fileversions WHERE changeset=%s", (changeset.id,)) for (file_id,) in cursor: reviewers_found = False for user_id, (filter_type, delegate) in filters.listUsers(file_id).items(): if filter_type == 'reviewer': if user_id not in author_user_ids: reviewer_user_ids = [user_id] elif delegate: reviewer_user_ids = [] for delegate_user_name in delegate.split(","): delegate_user = dbutils.User.fromName(db, delegate_user_name) reviewer_user_ids.append(delegate_user.id) else: reviewer_user_ids = [] for reviewer_user_id in reviewer_user_ids: reviewers.setdefault(file_id, {}).setdefault(reviewer_user_id, set()).add(changeset.id) reviewers_found = True else: watchers.setdefault(file_id, {}).setdefault(user_id, set()).add(changeset.id) if not reviewers_found: reviewers.setdefault(file_id, {}).setdefault(None, set()).add(changeset.id) return reviewers, watchers def getReviewedReviewers(db, review): """getReviewedReviewers(db, review) -> dictionary Returns a dictionary, like the ones returned by getReviewersAndWatchers(), but with details about all reviewed changes in the review.""" cursor = db.cursor() cursor.execute("""SELECT reviewfiles.reviewer, reviewfiles.changeset, reviewfiles.file FROM reviewfiles WHERE reviewfiles.review=%s AND reviewfiles.state='reviewed'""", (review.id,)) reviewers = {} for user_id, changeset_id, file_id in cursor.fetchall(): reviewers.setdefault(file_id, {}).setdefault(user_id, set()).add(changeset_id) return reviewers def getPendingReviewers(db, review): """getPendingReviewers(db, review) -> dictionary Returns a dictionary, like the ones returned by getReviewersAndWatchers(), but with details about remaining unreviewed changes in the review. Changes not assigned to a reviewer are handled the same way.""" cursor = db.cursor() cursor.execute("""SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file FROM reviewfiles LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewfiles.state='pending'""", (review.id,)) reviewers = {} for user_id, changeset_id, file_id in cursor.fetchall(): reviewers.setdefault(file_id, {}).setdefault(user_id, set()).add(changeset_id) return reviewers def collectReviewTeams(reviewers): """collectReviewTeams(reviewers) -> dictionary Takes a dictionary as returned by getReviewersAndWatchers() or getPendingReviewers() and transform into a dictionary mapping sets of users to sets of files that those groups of users share review responsibilities for. The same user may appear in number of sets, as may the same file. If None appears as a key in the returned dictionary, the set of files it is mapped to have changes in them with no assigned reviewers.""" teams = {} for file_id, file_reviewers in reviewers.items(): if None in file_reviewers: teams.setdefault(None, set()).add(file_id) team = frozenset(filter(None, file_reviewers.keys())) if team: teams.setdefault(team, set()).add(file_id) return teams def assignChanges(db, user, review, commits=None, changesets=None, update=False): cursor = db.cursor() if changesets is None: assert commits is not None changesets = [] for commit in commits: changesets.extend(changeset_utils.createChangeset(db, user, review.repository, commit)) applyfilters = review.applyfilters applyparentfilters = review.applyparentfilters reviewers, watchers = getReviewersAndWatchers(db, review.repository, changesets=changesets, reviewfilters=review.getReviewFilters(db), applyfilters=applyfilters, applyparentfilters=applyparentfilters) cursor.execute("SELECT uid FROM reviewusers WHERE review=%s", (review.id,)) reviewusers = set([user_id for (user_id,) in cursor]) reviewusers_values = set() reviewuserfiles_values = set() reviewuserfiles_existing = {} if update: cursor.execute("""SELECT reviewuserfiles.uid, reviewfiles.changeset, reviewfiles.file FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s""", (review.id,)) for user_id, changeset_id, file_id in cursor: reviewuserfiles_existing[(user_id, changeset_id, file_id)] = True new_reviewers = set() new_watchers = set() cursor.execute("""SELECT DISTINCT uid FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE review=%s""", (review.id,)) old_reviewers = set([user_id for (user_id,) in cursor]) for file_id, file_users in reviewers.items(): for user_id, user_changesets in file_users.items(): if user_id: new_reviewers.add(user_id) if user_id not in reviewusers: reviewusers.add(user_id) reviewusers_values.add((review.id, user_id)) for changeset_id in user_changesets: if (user_id, changeset_id, file_id) not in reviewuserfiles_existing: reviewuserfiles_values.add((user_id, review.id, changeset_id, file_id)) for file_id, file_users in watchers.items(): for user_id, user_changesets in file_users.items(): if user_id: if user_id not in reviewusers: new_watchers.add(user_id) reviewusers.add(user_id) reviewusers_values.add((review.id, user_id)) new_reviewers -= old_reviewers new_watchers -= old_reviewers | new_reviewers cursor.executemany("INSERT INTO reviewusers (review, uid) VALUES (%s, %s)", reviewusers_values) cursor.executemany("INSERT INTO reviewuserfiles (file, uid) SELECT id, %s FROM reviewfiles WHERE review=%s AND changeset=%s AND file=%s", reviewuserfiles_values) if configuration.extensions.ENABLED: cursor.execute("""SELECT id, uid, extension, path FROM extensionhookfilters WHERE repository=%s""", (review.repository.id,)) rows = cursor.fetchall() if rows: if commits is None: commits = set() for changeset in changesets: commits.add(changeset.child) commits = list(commits) filters = Filters() filters.setFiles(db, list(getFileIdsFromChangesets(changesets))) for filter_id, user_id, extension_id, path in rows: filters.addFilter(user_id, path, None, None, filter_id) for filter_id, file_ids in filters.matched_files.items(): extensions.role.filterhook.queueFilterHookEvent( db, filter_id, review, user, commits, file_ids) return new_reviewers, new_watchers def createChangesetsForCommits(db, commits, silent_if_empty=set(), full_merges=set(), replayed_rebases={}): repository = commits[0].repository changesets = [] silent_commits = set() silent_changesets = set() simple_commits = [] for commit in commits: if commit not in full_merges and commit not in replayed_rebases: simple_commits.append(commit) if simple_commits: changeset_utils.createChangesets(db, repository, simple_commits) for commit in commits: if commit in full_merges: commit_changesets = changeset_utils.createFullMergeChangeset( db, user, repository, commit, do_highlight=False) elif commit in replayed_rebases: commit_changesets = changeset_utils.createChangeset( db, user, repository, from_commit=commit, to_commit=replayed_rebases[commit], conflicts=True, do_highlight=False) else: commit_changesets = changeset_utils.createChangeset( db, user, repository, commit, do_highlight=False) if commit in silent_if_empty: for commit_changeset in commit_changesets: if commit_changeset.files: break else: silent_commits.add(commit) silent_changesets.update(commit_changesets) changesets.extend(commit_changesets) return changesets, silent_commits, silent_changesets def addCommitsToReview(db, user, review, commits, new_review=False, commitset=None, pending_mails=None, silent_if_empty=set(), full_merges=set(), replayed_rebases={}, tracked_branch=False): cursor = db.cursor() if not new_review: import index new_commits = log_commitset.CommitSet(commits) old_commits = log_commitset.CommitSet(review.branch.getCommits(db)) merges = new_commits.getMerges() for merge in merges: # We might have stripped it in a previous pass. if not merge in new_commits: continue tails = filter(lambda sha1: sha1 not in old_commits and sha1 not in merge.parents, new_commits.getTailsFrom(merge)) if tails: if tracked_branch: raise index.IndexException("""\ Merge %s adds merged-in commits. Please push the merge manually and follow the instructions.""" % merge.sha1[:8]) cursor.execute("SELECT id, confirmed, tail FROM reviewmergeconfirmations WHERE review=%s AND uid=%s AND merge=%s", (review.id, user.id, merge.getId(db))) row = cursor.fetchone() if not row or not row[1]: if not row: cursor.execute("INSERT INTO reviewmergeconfirmations (review, uid, merge) VALUES (%s, %s, %s) RETURNING id", (review.id, user.id, merge.getId(db))) confirmation_id = cursor.fetchone()[0] merged = set() for tail_sha1 in tails: children = new_commits.getChildren(tail_sha1) while children: child = children.pop() if child not in merged and new_commits.isAncestorOf(child, merge): merged.add(child) children.update(new_commits.getChildren(child) - merged) merged_values = [(confirmation_id, commit.getId(db)) for commit in merged] cursor.executemany("INSERT INTO reviewmergecontributions (id, merged) VALUES (%s, %s)", merged_values) db.commit() else: confirmation_id = row[0] message = "Merge %s adds merged-in commits:" % merge.sha1[:8] for tail_sha1 in tails: for parent_sha1 in merge.parents: if parent_sha1 in new_commits: parent = new_commits.get(parent_sha1) if tail_sha1 in new_commits.getTailsFrom(parent): message += "\n %s..%s" % (tail_sha1[:8], parent_sha1[:8]) message += """ Please confirm that this is intended by loading: %s/confirmmerge?id=%d""" % (dbutils.getURLPrefix(db, user), confirmation_id) raise index.IndexException(message) elif row[2] is not None: if row[2] == merge.getId(db): cursor.execute("SELECT merged FROM reviewmergecontributions WHERE id=%s", (row[0],)) for (merged_id,) in cursor: merged = gitutils.Commit.fromId(db, review.repository, merged_id) if merged.sha1 in merge.parents: new_commits = new_commits.without([merged]) break else: tail = gitutils.Commit.fromId(db, review.repository, row[2]) cut = [gitutils.Commit.fromSHA1(db, review.repository, sha1) for sha1 in tail.parents if sha1 in new_commits] new_commits = new_commits.without(cut) if commitset: commitset &= set(new_commits) commits = [commit for commit in commits if commit in commitset] changesets, silent_commits, silent_changesets = \ createChangesetsForCommits(db, commits, silent_if_empty, full_merges, replayed_rebases) if not new_review: print "Adding %d commit%s to the review at:\n %s" % (len(commits), len(commits) > 1 and "s" or "", review.getURL(db)) reviewchangesets_values = [(review.id, changeset.id) for changeset in changesets] cursor.executemany("""INSERT INTO reviewchangesets (review, changeset) VALUES (%s, %s)""", reviewchangesets_values) cursor.executemany("""INSERT INTO reviewfiles (review, changeset, file, deleted, inserted) SELECT reviewchangesets.review, reviewchangesets.changeset, fileversions.file, COALESCE(SUM(chunks.deleteCount), 0), COALESCE(SUM(chunks.insertCount), 0) FROM reviewchangesets JOIN fileversions USING (changeset) LEFT OUTER JOIN chunks USING (changeset, file) WHERE reviewchangesets.review=%s AND reviewchangesets.changeset=%s GROUP BY reviewchangesets.review, reviewchangesets.changeset, fileversions.file""", reviewchangesets_values) new_reviewers, new_watchers = assignChanges(db, user, review, changesets=changesets) cursor.execute("SELECT include FROM reviewrecipientfilters WHERE review=%s AND uid IS NULL", (review.id,)) try: opt_out = cursor.fetchone()[0] is True except: opt_out = True if not new_review: for user_id in new_reviewers: new_reviewuser = dbutils.User.fromId(db, user_id) print "Added reviewer: %s <%s>" % (new_reviewuser.fullname, new_reviewuser.email) if opt_out: # If the user has opted out from receiving e-mails about this # review while only watching it, clear the opt-out now that the # user becomes a reviewer. cursor.execute("DELETE FROM reviewrecipientfilters WHERE review=%s AND uid=%s AND include=FALSE", (review.id, user_id)) for user_id in new_watchers: new_reviewuser = dbutils.User.fromId(db, user_id) print "Added watcher: %s <%s>" % (new_reviewuser.fullname, new_reviewuser.email) review.incrementSerial(db) reviewing.comment.propagateCommentChains(db, user, review, new_commits, replayed_rebases) if pending_mails is None: pending_mails = [] notify_commits = filter(lambda commit: commit not in silent_commits, commits) notify_changesets = filter(lambda changeset: changeset not in silent_changesets, changesets) if not new_review and notify_changesets: recipients = review.getRecipients(db) for to_user in recipients: pending_mails.extend(mail.sendReviewAddedCommits( db, user, to_user, recipients, review, notify_commits, notify_changesets, tracked_branch=tracked_branch)) mail.sendPendingMails(pending_mails) review.reviewers.extend([User.fromId(db, user_id) for user_id in new_reviewers]) for user_id in new_watchers: review.watchers[User.fromId(db, user_id)] = "automatic" return True def createReview(db, user, repository, commits, branch_name, summary, description, from_branch_name=None, via_push=False, reviewfilters=None, applyfilters=True, applyparentfilters=False, recipientfilters=None): cursor = db.cursor() if via_push: applyparentfilters = bool(user.getPreference(db, 'review.applyUpstreamFilters')) branch = dbutils.Branch.fromName(db, repository, branch_name) if branch is not None: raise OperationFailure( code="branchexists", title="Invalid review branch name", message="""\ <p>There is already a branch named <code>%s</code> in the repository. You have to select a different name.</p> <p>If you believe the existing branch was created during an earlier (failed) attempt to create this review, you can try to delete it from the repository using the command<p> <pre> git push <remote> :%s</pre> <p>and then press the "Submit Review" button on this page again.""" % (htmlutils.htmlify(branch_name), htmlutils.htmlify(branch_name)), is_html=True) if not commits: raise OperationFailure( code="nocommits", title="No commits specified", message="You need at least one commit to create a review.") commitset = log_commitset.CommitSet(commits) heads = commitset.getHeads() if len(heads) != 1: # There is really no plausible way for this error to occur. raise OperationFailure( code="disconnectedtree", title="Disconnected tree", message=("The specified commits do do not form a single connected " "tree. Creating a review of them is not supported.")) head = heads.pop() if len(commitset.getTails()) != 1: tail_id = None else: tail_id = gitutils.Commit.fromSHA1(db, repository, commitset.getTails().pop()).getId(db) if not via_push: try: repository.createBranch(branch_name, head.sha1) except gitutils.GitCommandError as error: raise OperationFailure( code="branchfailed", title="Failed to create review branch", message=("<p><b>Output from git:</b></p>" "<code style='padding-left: 1em'>%s</code>" % htmlutils.htmlify(error.output)), is_html=True) createChangesetsForCommits(db, commits) try: cursor.execute("""INSERT INTO branches (repository, name, head, tail, type) VALUES (%s, %s, %s, %s, 'review') RETURNING id""", (repository.id, branch_name, head.getId(db), tail_id)) branch_id = cursor.fetchone()[0] reachable_values = [(branch_id, commit.getId(db)) for commit in commits] cursor.executemany("""INSERT INTO reachable (branch, commit) VALUES (%s, %s)""", reachable_values) from_branch_id = None if from_branch_name is not None: cursor.execute("""SELECT id FROM branches WHERE repository=%s AND name=%s""", (repository.id, from_branch_name)) row = cursor.fetchone() if row: from_branch_id = row[0] cursor.execute("""INSERT INTO reviews (type, branch, origin, state, summary, description, applyfilters, applyparentfilters) VALUES ('official', %s, %s, 'open', %s, %s, %s, %s) RETURNING id""", (branch_id, from_branch_id, summary, description, applyfilters, applyparentfilters)) review = dbutils.Review.fromId(db, cursor.fetchone()[0]) cursor.execute("""INSERT INTO reviewusers (review, uid, owner) VALUES (%s, %s, TRUE)""", (review.id, user.id)) if reviewfilters is not None: cursor.executemany("""INSERT INTO reviewfilters (review, uid, path, type, creator) VALUES (%s, %s, %s, %s, %s)""", [(review.id, filter_user_id, filter_path, filter_type, user.id) for filter_user_id, filter_path, filter_type, filter_delegate in reviewfilters]) is_opt_in = False if recipientfilters is not None: cursor.executemany( """INSERT INTO reviewrecipientfilters (review, uid, include) VALUES (%s, %s, %s)""", [(review.id, filter_user_id, filter_include) for filter_user_id, filter_include in recipientfilters]) for filter_user_id, filter_include in recipientfilters: if filter_user_id is None and not filter_include: is_opt_in = True addCommitsToReview(db, user, review, commits, new_review=True) # Reload to get list of changesets added by addCommitsToReview(). review = dbutils.Review.fromId(db, review.id) pending_mails = [] recipients = review.getRecipients(db) for to_user in recipients: pending_mails.extend(mail.sendReviewCreated(db, user, to_user, recipients, review)) if not is_opt_in: recipient_by_id = dict((to_user.id, to_user) for to_user in recipients) cursor.execute("""SELECT userpreferences.uid, userpreferences.repository, userpreferences.filter, userpreferences.integer FROM userpreferences LEFT OUTER JOIN filters ON (filters.id=userpreferences.filter) WHERE userpreferences.item='review.defaultOptOut' AND userpreferences.uid=ANY (%s) AND (userpreferences.filter IS NULL OR filters.repository=%s) AND (userpreferences.repository IS NULL OR userpreferences.repository=%s)""", (recipient_by_id.keys(), repository.id, repository.id)) user_settings = {} has_filter_settings = False for user_id, repository_id, filter_id, integer in cursor: settings = user_settings.setdefault(user_id, [None, None, {}]) value = bool(integer) if repository_id is None and filter_id is None: settings[0] = value elif repository_id is not None: settings[1] = value else: settings[2][filter_id] = value has_filter_settings = True if has_filter_settings: filters = Filters() filters.setFiles(db, review=review) for user_id, (global_default, repository_default, filter_settings) in user_settings.items(): to_user = recipient_by_id[user_id] opt_out = None if repository_default is not None: opt_out = repository_default elif global_default is not None: opt_out = global_default if filter_settings: # Policy: # # If all of the user's filters that matched files in the # review have review.defaultOptOut enabled, then opt out. # When determining this, any review filters of the user's # that match files in the review count as filters that don't # have the review.defaultOptOut enabled. # # If any of the user's filters that matched files in the # review have review.defaultOptOut disabled, then don't opt # out. When determining this, review filters are ignored. # # Otherwise, ignore the filter settings, and go with either # the user's per-repository or global setting (as set # above.) filters.load(db, review=review, user=to_user) # A set of filter ids. If None is in the set, the user has # one or more review filters in the review. (These do not # have ids.) active_filters = filters.getActiveFilters(to_user) for filter_id in active_filters: if filter_id is None: continue elif filter_id in filter_settings: if not filter_settings[filter_id]: opt_out = False break else: break else: if None not in active_filters: opt_out = True if opt_out: cursor.execute("""INSERT INTO reviewrecipientfilters (review, uid, include) VALUES (%s, %s, FALSE)""", (review.id, to_user.id)) db.commit() mail.sendPendingMails(pending_mails) return review except: if not via_push: repository.run("branch", "-D", branch_name) raise def getDraftItems(db, user, review): return "approved=%(reviewedNormal)d,disapproved=%(unreviewedNormal)d,approvedBinary=%(reviewedBinary)d,disapprovedBinary=%(unreviewedBinary)d,comments=%(writtenComments)d,reopened=%(reopenedIssues)d,closed=%(resolvedIssues)d,morphed=%(morphedChains)d" % review.getDraftStatus(db, user) def renderDraftItems(db, user, review, target): items = review.getDraftStatus(db, user) target.addExternalStylesheet("resource/review.css") target.addExternalScript("resource/review.js") div = target.div(id='draftStatus') if any(items.values()): div.span('draft').text("Draft: ") approved = items.pop("reviewedNormal", None) if approved: div.text(' ') div.span('approved').text("reviewed %d line%s" % (approved, approved > 1 and "s" or "")) if any(items.values()): div.text(',') disapproved = items.pop("unreviewedNormal", None) if disapproved: div.text(' ') div.span('disapproved').text("unreviewed %d line%s" % (disapproved, disapproved > 1 and "s" or "")) if any(items.values()): div.text(',') approved = items.pop("reviewedBinary", None) if approved: div.text(' ') div.span('approved-binary').text("reviewed %d binary file%s" % (approved, approved > 1 and "s" or "")) if any(items.values()): div.text(',') disapproved = items.pop("unreviewedBinary", None) if disapproved: div.text(' ') div.span('disapproved-binary').text("unreviewed %d binary file%s" % (disapproved, disapproved > 1 and "s" or "")) if any(items.values()): div.text(',') comments = items.pop("writtenComments", None) if comments: div.text(' ') div.span('comments').text("wrote %d comment%s" % (comments, comments > 1 and "s" or "")) if any(items.values()): div.text(',') reopened = items.pop("reopenedIssues", None) if reopened: div.text(' ') div.span('reopened').text("reopened %d issue%s" % (reopened, reopened > 1 and "s" or "")) if any(items.values()): div.text(',') closed = items.pop("resolvedIssues", None) if closed: div.text(' ') div.span('closed').text("resolved %d issue%s" % (closed, closed > 1 and "s" or "")) if any(items.values()): div.text(',') morphed = items.pop("morphedChains", None) if morphed: div.text(' ') div.span('closed').text("morphed %d comment%s" % (morphed, morphed > 1 and "s" or "")) if any(items.values()): div.text(',') div.text(' ') buttons = div.span("buttons") buttons.button(onclick='previewChanges();').text("Preview") buttons.button(onclick='submitChanges();').text("Submit") buttons.button(onclick='cancelChanges();').text("Abort") return True else: return False def addReviewFilters(db, creator, user, review, reviewer_paths, watcher_paths): cursor = db.cursor() cursor.execute("INSERT INTO reviewassignmentstransactions (review, assigner) VALUES (%s, %s) RETURNING id", (review.id, creator.id)) transaction_id = cursor.fetchone()[0] def add(filter_type, paths): for path in paths: cursor.execute("""SELECT id, type FROM reviewfilters WHERE review=%s AND uid=%s AND path=%s""", (review.id, user.id, path)) row = cursor.fetchone() if row: old_filter_id, old_filter_type = row if old_filter_type == filter_type: continue else: cursor.execute("""DELETE FROM reviewfilters WHERE id=%s""", (old_filter_id,)) cursor.execute("""INSERT INTO reviewfilterchanges (transaction, uid, path, type, created) VALUES (%s, %s, %s, %s, false)""", (transaction_id, user.id, path, old_filter_type)) cursor.execute("""INSERT INTO reviewfilters (review, uid, path, type, creator) VALUES (%s, %s, %s, %s, %s)""", (review.id, user.id, path, filter_type, creator.id)) cursor.execute("""INSERT INTO reviewfilterchanges (transaction, uid, path, type, created) VALUES (%s, %s, %s, %s, true)""", (transaction_id, user.id, path, filter_type)) add("reviewer", reviewer_paths) add("watcher", watcher_paths) filters = Filters() filters.setFiles(db, review=review) filters.load(db, review=review, user=user) if user not in review.reviewers and user not in review.watchers and user not in review.owners: cursor.execute("""INSERT INTO reviewusers (review, uid, type) VALUES (%s, %s, 'manual')""", (review.id, user.id,)) delete_files = set() insert_files = set() if watcher_paths: # Unassign changes currently assigned to the affected user. cursor.execute("""SELECT reviewfiles.id, reviewfiles.file FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s""", (review.id, user.id)) for review_file_id, file_id in cursor: if not filters.isReviewer(user.id, file_id): delete_files.add(review_file_id) if reviewer_paths: # Assign changes currently not assigned to the affected user. cursor.execute("""SELECT reviewfiles.id, reviewfiles.file FROM reviewfiles JOIN changesets ON (changesets.id=reviewfiles.changeset) JOIN commits ON (commits.id=changesets.child) JOIN gitusers ON (gitusers.id=commits.author_gituser) LEFT OUTER JOIN usergitemails ON (usergitemails.email=gitusers.email AND usergitemails.uid=%s) LEFT OUTER JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id AND reviewuserfiles.uid=%s) WHERE reviewfiles.review=%s AND usergitemails.uid IS NULL AND reviewuserfiles.uid IS NULL""", (user.id, user.id, review.id)) for review_file_id, file_id in cursor: if filters.isReviewer(user.id, file_id): insert_files.add(review_file_id) if delete_files: cursor.executemany("DELETE FROM reviewuserfiles WHERE file=%s AND uid=%s", izip(delete_files, repeat(user.id))) cursor.executemany("INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) VALUES (%s, %s, %s, false)", izip(repeat(transaction_id), delete_files, repeat(user.id))) if insert_files: cursor.executemany("INSERT INTO reviewuserfiles (file, uid) VALUES (%s, %s)", izip(insert_files, repeat(user.id))) cursor.executemany("INSERT INTO reviewassignmentchanges (transaction, file, uid, assigned) VALUES (%s, %s, %s, true)", izip(repeat(transaction_id), insert_files, repeat(user.id))) return generateMailsForAssignmentsTransaction(db, transaction_id) def parseReviewFilters(db, data): reviewfilters = [] for filter_data in data: filter_user = dbutils.User.fromName(db, filter_data["username"]) filter_type = filter_data["type"] filter_path = reviewing.filters.sanitizePath(filter_data["path"]) # Make sure the path doesn't contain any invalid wild-cards. try: reviewing.filters.validatePattern(filter_path) except reviewing.filters.PatternError as error: raise OperationFailure(code="invalidpattern", title="Invalid filter pattern", message="Problem: %s" % error.message) reviewfilters.append((filter_user.id, filter_path, filter_type, None)) return reviewfilters def parseRecipientFilters(db, data): mode = data.get("mode", "opt-out") included = data.get("included", []) excluded = data.get("excluded", []) recipientfilters = [] if mode == "opt-in": recipientfilters.append((None, False)) filter_usernames = included filter_include = True else: filter_usernames = excluded filter_include = False for filter_username in filter_usernames: filter_user = dbutils.User.fromName(db, filter_username) if not filter_user: raise OperationError("no such user: '%s'" % filter_username) recipientfilters.append((filter_user.id, filter_include)) return recipientfilters def queryFilters(db, user, review, globalfilters=False, parentfilters=False): cursor = db.cursor() if globalfilters: cursor.execute("UPDATE reviews SET applyfilters=TRUE WHERE id=%s", (review.id,)) review.applyfilters = True if parentfilters: cursor.execute("UPDATE reviews SET applyparentfilters=TRUE WHERE id=%s", (review.id,)) review.applyparentfilters = True cursor.execute("""SELECT changeset FROM reviewchangesets WHERE review=%s""", (review.id,)) # TODO: This two-phase creation of Changeset objects is a bit silly. changesets = [diff.Changeset.fromId(db, review.repository, changeset_id) for (changeset_id,) in cursor] changeset_load.loadChangesets( db, review.repository, changesets, load_chunks=False) return assignChanges(db, user, review, changesets=changesets, update=True) def applyFilters(db, user, review, globalfilters=False, parentfilters=False): new_reviewers, new_watchers = queryFilters(db, user, review, globalfilters, parentfilters) pending_mails = [] cursor = db.cursor() for user_id in new_reviewers: new_reviewer = dbutils.User.fromId(db, user_id) cursor.execute("""SELECT reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewuserfiles ON (reviewuserfiles.file=reviewfiles.id) WHERE reviewfiles.review=%s AND reviewuserfiles.uid=%s GROUP BY reviewfiles.file""", (review.id, user_id)) pending_mails.extend(mail.sendFiltersApplied( db, user, new_reviewer, review, globalfilters, parentfilters, cursor.fetchall())) for user_id in new_watchers: new_watcher = dbutils.User.fromId(db, user_id) pending_mails.extend(mail.sendFiltersApplied( db, user, new_watcher, review, globalfilters, parentfilters, None)) review.incrementSerial(db) db.commit() mail.sendPendingMails(pending_mails) def generateMailsForBatch(db, batch_id, was_accepted, is_accepted, profiler=None): cursor = db.cursor() cursor.execute("SELECT review, uid FROM batches WHERE id=%s", (batch_id,)) review_id, user_id = cursor.fetchone() review = dbutils.Review.fromId(db, review_id) from_user = dbutils.User.fromId(db, user_id) pending_mails = [] recipients = review.getRecipients(db) for to_user in recipients: pending_mails.extend(mail.sendReviewBatch(db, from_user, to_user, recipients, review, batch_id, was_accepted, is_accepted, profiler=profiler)) return pending_mails def generateMailsForAssignmentsTransaction(db, transaction_id): cursor = db.cursor() cursor.execute("SELECT review, assigner, note FROM reviewassignmentstransactions WHERE id=%s", (transaction_id,)) review_id, assigner_id, note = cursor.fetchone() review = dbutils.Review.fromId(db, review_id) assigner = dbutils.User.fromId(db, assigner_id) cursor.execute("""SELECT uid, path, type, created FROM reviewfilterchanges WHERE transaction=%s""", (transaction_id,)) by_user = {} for reviewer_id, path, filter_type, created in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault(reviewer_id, ([], [], [], [])) if created: added_filters.append((filter_type, path or "/")) else: removed_filters.append((filter_type, path or "/")) cursor.execute("""SELECT reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file, SUM(reviewfiles.deleted), SUM(reviewfiles.inserted) FROM reviewfiles JOIN reviewassignmentchanges ON (reviewassignmentchanges.file=reviewfiles.id) WHERE reviewassignmentchanges.transaction=%s GROUP BY reviewassignmentchanges.uid, reviewassignmentchanges.assigned, reviewfiles.file""", (transaction_id,)) for reviewer_id, was_assigned, file_id, deleted, inserted in cursor: added_filters, removed_filters, unassigned, assigned = by_user.setdefault(reviewer_id, (None, None, [], [])) if was_assigned: assigned.append((file_id, deleted, inserted)) else: unassigned.append((file_id, deleted, inserted)) pending_mails = [] for reviewer_id, (added_filters, removed_filters, unassigned, assigned) in by_user.items(): reviewer = dbutils.User.fromId(db, reviewer_id) if assigner != reviewer: pending_mails.extend(mail.sendAssignmentsChanged(db, assigner, reviewer, review, added_filters, removed_filters, unassigned, assigned)) return pending_mails def retireUser(db, user): cursor = db.cursor() # Set the user's status to 'retired'. cursor.execute("""UPDATE users SET status='retired' WHERE id=%s""", (user.id,)) # Delete any assignments of unreviewed (pending) changes to the user. We're # leaving assignments of reviewed changes in-place; no particular need to # drop historical data. # # Deleting even this risks dropping some historical data, specifically # changes involving files being marked as reviewed, and then unmarked again. # But having "active" assignments to users that aren't going to review them # complicates a whole bunch of queries, so to keep things simple, we can # sacrifice a little history. cursor.execute("""DELETE FROM reviewuserfiles WHERE uid=%s AND file IN (SELECT id FROM reviewfiles WHERE state='pending')""", (user.id,)) ================================================ FILE: src/run_unittest.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import argparse parser = argparse.ArgumentParser() parser.add_argument("--coverage", action="store_true") parser.add_argument("test_module") parser.add_argument("test_arguments", nargs=argparse.REMAINDER) arguments = parser.parse_args() if arguments.coverage: from coverage import call else: def call(_, fn, *args, **kwargs): fn(*args, **kwargs) def execute(path, argv): """Load |path| and call main() in it with |argv| as arguments If there is no main(), instead assume that each argument in |argv| is the name of a test, and run each test by calling a function named the same as the test, with no arguments.""" module = {} execfile(path, module) if "main" in module: module["main"](argv) return for test in argv: if test in module: module[test]() try: call("unittest", execute, arguments.test_module, arguments.test_arguments) sys.exit(0) except Exception: import traceback traceback.print_exc() sys.exit(1) ================================================ FILE: src/syntaxhighlight/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import os.path import bz2 import htmlutils import textutils import configuration import diff.parse LANGUAGES = set() class TokenTypes: Operator = 1 Identifier = 2 Keyword = 3 Character = 4 String = 5 Comment = 6 Integer = 7 Float = 8 Preprocessing = 9 TokenClassNames = { TokenTypes.Operator: "op", TokenTypes.Identifier: "id", TokenTypes.Keyword: "kw", TokenTypes.Character: "chr", TokenTypes.String: "str", TokenTypes.Comment: "com", TokenTypes.Integer: "int", TokenTypes.Float: "fp", TokenTypes.Preprocessing: "pp", } class HighlightRequested(Exception): pass def generateHighlightPath(sha1, language, mode="legacy"): if mode == "json": suffix = ".json" else: suffix = "" return os.path.join(configuration.services.HIGHLIGHT["cache_dir"], sha1[:2], sha1[2:] + "." + language + suffix) def isHighlighted(sha1, language, mode="legacy"): path = generateHighlightPath(sha1, language, mode) return os.path.isfile(path) or os.path.isfile(path + ".bz2") def wrap(raw_source, mode): if mode == "json": return "\n".join(textutils.json_encode([[None, line]]) for line in diff.parse.splitlines(raw_source)) return htmlutils.htmlify(raw_source) def readHighlight(repository, sha1, path, language, request=False, mode="legacy"): from request import requestHighlights async = mode == "json" source = None if language: path = generateHighlightPath(sha1, language, mode) if os.path.isfile(path): os.utime(path, None) source = open(path).read() elif os.path.isfile(path + ".bz2"): os.utime(path + ".bz2", None) source = bz2.BZ2File(path + ".bz2", "r").read() elif request: requestHighlights(repository, { sha1: (path, language) }, mode, async=async) if mode == "json": raise HighlightRequested() return readHighlight(repository, sha1, path, language, False, mode) if not source: source = wrap(textutils.decode(repository.fetch(sha1).data), mode) return source # Import for side-effects: these modules add strings to the LANGUAGES set to # indicate which languages they support highlighting. import cpp import generic ================================================ FILE: src/syntaxhighlight/clexer.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # Simple C/C++ lexer # # Module Contents # =============== # # split(source[, include_ws=True, include_comments=True]) # # Returns an iterator that returns the tokens in the C/C++ source as # individual strings. If 'include_ws' is true, sequences of whitespace # (including linebreaks) separating tokens are returned as well. If # 'include_comments' is true, comments are returned as individual tokens as # well. # # Preprocessor lines are returned as single tokens, starting with the first # character of the line and ending before the linebreak character that ends # the preprocessor directive; including any backslashes and linebreak # characters following backslashes. # # Character and string literal tokens are returned exactly as they occurred in # the source string, with any escape sequences (including escaped linebreaks) # preserved. # # Escaped linebreaks outside of preprocessor directives and string literals # are not handled; in practice the backslash will vanish and the linebreak # will be returned as a whitespace token (possibly combined with any following # whitespace.) If whitespace preceded the backslash, there will be two # separate whitespace tokens, split where the backslash was. # # tokenize(tokens[, filename="<unknown>"]) # # Returns an iterator that returns each string returned by the iterable # 'tokens' converted into a Token object. The token's have line and column # numbers calculated assuming that the token sequence contains all tokens and # whitespace sequences from a single file. If not, the line and column # numbers will not be correct. The supplied 'filename' is also stored in each # token. # # group(tokens[, groups={ '(':')', '{':'}', '[':']' }]) # # Returns a list containing all tokens returned by the iterable 'tokens', # grouped into sublists according to 'groups', which should be a dictionary # mapping group start tokens to group end tokens. # # Each sublist created will start with a token from the set of keys in # 'groups' and end with the corresponding end token. Every token returned by # the iterable 'tokens' will occur exactly once in the tree of lists returned # by this function. # # Throws a CLexerException if an unexpected group end token is encountered, or # if the sequence of tokens ends inside a group. # # group1(tokens, end[, groups={ '(':')', '{':'}', '[':']' }]) # # Like group(), but returns a tuple containing a single group (ending with the # token 'end') and the actual token ending the group (since it's not included # in the group.) Typically 'tokens' will be an iterator that has just # returned the corresponding start token. Upon return, if 'tokens' is an # iterator, it will just have returned a token identical to 'end'. # # The returned list contains neither the group start or end token, and is # grouped as if group() had been used to group that particular sequence of # tokens. (It is necessary to do this grouping anyway to identify the token # that actually ends the group while ignoring sub-groupings using the same # pair of start/end tokens.) # # partition(tokens, separator) # # Splits the sequence of tokens returned by the iterable 'tokens' by the token # 'separator'. Normally, the token sequence 'tokens' should be grouped using # group() first, so that occurrences of 'separator' inside groups are ignored. # The return value is a list of lists of tokens (or lists as created by # group()). # # If 'tokens' returns zero tokens, an empty list is returned. # # flatten(tokens) # # Returns an iterator that returns each token returned by the iterable # 'tokens' while reversing the type of grouping done by group()/group1(). # # This means that flatten(group(tokens)) is a no-op. # # join(tokens[, insertSpaces=True]) # # Returns a string produced by concatenating all tokens returned by the # iterable 'tokens'. If 'insertSpaces' is true, a single space is inserted # between each token unless there was whitespace strings in the token sequence # there already. Automatically flattens the token sequence first. # # CLexerException # # Exception type thrown by the functions group() and group1(). Inherits # the built-in Exception type and adds absolutely nothing. # # Token Objects # ============= # # Token objects are created by the function tokenize(), but can also be # constructed manually using the constructor # # Token(value[, filename="<unknown>", line=0, column=0]) # # Token instances are comparable, hashable, are true (non-zero) unless they # represent whitespace or comments, and can be converted back to string form by # str(). In addition they support the following methods: # # filename() # # Returns the filename from which the token stems. In practice, either the # filename passed to tokenize() or to the Token constructor. # # line() # # Returns the line number (first line=1) at which the token occurred. # # column() # # Returns the column number (first column=0) at which the token occurred. # # isidentifier() # # Returns true if the token is an identifier. # # isspace() # # Returns true if the token is whitespace. # # iscomment() # # Returns true if the token is a comment. # # isppdirective() # # Returns true if the token is a preprocessor directive. # # reduced() # # Returns a string where special tokens (whitespace, comments and preprocessor # directives) are converted to the shortest possible sequence of whitespace # that preserves the line and column number of following tokens. import re import sys import itertools import traceback def rejoin(items, escape): if escape: fixed = [] for item in sorted(items, key=len, reverse=True): for ch in "(){}[]*+?|.^$": item = item.replace(ch, "\\" + ch) fixed.append(item) items = fixed return "|".join(items) OPERATORS_AND_PUNCTUATORS = [ "(", ")", "{", "}", "[", "]", "<", ">", "<=", ">=", "<<", ">>", "<<=", ">>=", "+", "-", "*", "/", "%", "+=", "-=", "*=", "/=", "%=", "&&", "||", "&", "|", "^", "!", ",", ".", "::", ":", ";", "=", "==", "!=", "&=", "|=", "^=", "++", "--", "~", "?", "->", "->*", ".*", "##", "#" ]; KEYWORDS = set([ "asm", "do", "if", "return", "typedef", "auto", "double", "inline", "short", "typeid", "bool", "dynamic_cast", "int", "signed", "typename", "break", "else", "long", "sizeof", "union", "case", "enum", "mutable", "static", "unsigned", "catch", "explicit", "namespace", "static_cast", "using", "char", "export", "new", "struct", "virtual", "class", "extern", "operator", "switch", "void", "const", "false", "private", "template", "volatile", "const_cast", "float", "protected", "this", "wchar_t", "continue", "for", "public", "throw", "while", "default", "friend", "register", "true", "delete", "goto", "reinterpret_cast", "try" ]) CONFLICT_MARKER = "^(?:<<<<<<<|=======|>>>>>>>)[^\n]*$" INT_LITERAL = "(?:0|[1-9][0-9]*|0x[0-9a-fA-F]+)[lLuU]*(?![0-9a-zA-Z_\\.])" FLOAT_LITERAL = "(?:(?:[0-9]*\\.[0-9]+|[0-9]+\\.)(?:[eE][+-]?[0-9]+)?|[0-9]+[eE][+-]?[0-9]+)[fFlL]*(?![0-9a-zA-Z_])" IDENTIFIER = "[a-zA-Z_][a-zA-Z0-9_]*" MULTILINE_COMMENT = "/\\*.*?\\*/" SINGLELINE_COMMENT = "//.*?(?=\n|$)" STRING_LITERAL = '"(?:\\\\.|[^"\n])*"' CHARACTER_LITERAL = "'(?:\\\\.|[^'\n])*'" WIDE_STRING_LITERAL = 'L"(?:\\\\.|[^"\n])*"' WIDE_CHARACTER_LITERAL = "L'(?:\\\\.|[^'\n])*'" PREPROCESSOR_DIRECTIVE = "^[ \t]*#(?:%s|%s|%s|%s|%s|%s|\\\\\n|[^\n])*" % (MULTILINE_COMMENT, SINGLELINE_COMMENT, STRING_LITERAL, CHARACTER_LITERAL, WIDE_STRING_LITERAL, WIDE_CHARACTER_LITERAL) OPERATOR_OR_PUNCTUATOR = rejoin(OPERATORS_AND_PUNCTUATORS, escape=True) WHITESPACE = "\\s+" BYTE_ORDER_MARK = u"\ufeff".encode("utf-8") SUBPATTERNS = [FLOAT_LITERAL, INT_LITERAL, WIDE_STRING_LITERAL, WIDE_CHARACTER_LITERAL, IDENTIFIER, MULTILINE_COMMENT, SINGLELINE_COMMENT, PREPROCESSOR_DIRECTIVE, STRING_LITERAL, CHARACTER_LITERAL] RE_CTOKENS = re.compile(rejoin([CONFLICT_MARKER, IDENTIFIER, FLOAT_LITERAL, MULTILINE_COMMENT, SINGLELINE_COMMENT, PREPROCESSOR_DIRECTIVE, OPERATOR_OR_PUNCTUATOR, INT_LITERAL, STRING_LITERAL, CHARACTER_LITERAL, BYTE_ORDER_MARK, "."], escape=False), re.DOTALL | re.MULTILINE) RE_CTOKENS_INCLUDE_WS = re.compile(rejoin([CONFLICT_MARKER, IDENTIFIER, FLOAT_LITERAL, MULTILINE_COMMENT, SINGLELINE_COMMENT, PREPROCESSOR_DIRECTIVE, OPERATOR_OR_PUNCTUATOR, INT_LITERAL, STRING_LITERAL, CHARACTER_LITERAL, BYTE_ORDER_MARK, WHITESPACE, "."], escape=False), re.DOTALL | re.MULTILINE) RE_IDENTIFIER = re.compile(IDENTIFIER) RE_INT_LITERAL = re.compile("^" + INT_LITERAL + "$") RE_FLOAT_LITERAL = re.compile("^" + FLOAT_LITERAL + "$") class CLexerException(Exception): def __init__(self, message): Exception.__init__(self, message) class CLexerGroupingException(Exception): def __init__(self, message, tokens): Exception.__init__(self, message) self.__tokens = tokens def tokens(self): return self.__tokens def iskeyword(value): return (str(value)[0].isalpha() or str(value)[0] == "_") and str(value) in KEYWORDS def isidentifier(value): return (str(value)[0].isalpha() or str(value)[0] == "_") and str(value) not in KEYWORDS def isspace(value): return str(value).isspace() def iscomment(value): return str(value)[0:2] in ("/*", "//") def isppdirective(value): return str(value).lstrip(" \t").startswith("#") def isconflictmarker(value): value = str(value) return value.startswith("<<<<<<<") or value.startswith("=======") or value.startswith(">>>>>>>") def isint(value): return RE_INT_LITERAL.match(str(value)) is not None def isfloat(value): return RE_FLOAT_LITERAL.match(str(value)) is not None def isbyteordermark(value): return str(value) == BYTE_ORDER_MARK def split(input, include_ws=True, include_comments=True): if include_ws: expression = RE_CTOKENS_INCLUDE_WS else: expression = RE_CTOKENS tokens = itertools.imap(lambda match: match.group(0), expression.finditer(input)) if include_comments: return tokens else: return itertools.ifilter(lambda token: not iscomment(token), tokens) class Token: def __init__(self, value, filename="<unknown>", line=0, column=0): self.__value = value self.__filename = filename self.__line = line self.__column = column def __cmp__(self, other): return cmp(self.__value, other) def __str__(self): return self.__value def __repr__(self): return repr(self.__value) def __hash__(self): return hash(self.__value) def __nonzero__(self): return not (self.isspace() or self.iscomment()) def filename(self): return self.__filename def line(self): return self.__line def column(self): return self.__column def iskeyword(self): return iskeyword(self.__value) def isidentifier(self): return isidentifier(self.__value) def isspace(self): return isspace(self.__value) def iscomment(self): return iscomment(self.__value) def isppdirective(self): return isppdirective(self.__value) def isconflictmarker(self): return isconflictmarker(self.__value) def isstring(self): return self.__value[0] == '"' or self.__value.startswith('L"') def ischar(self): return self.__value[0] == "'" or self.__value.startswith("L'") def isint(self): return isint(self.__value) def isfloat(self): return isfloat(self.__value) def isbyteordermark(self): return isbyteordermark(self.__value) def reduced(self): if self.isspace() or self.iscomment(): if self.__value.startswith("//"): return "" else: linebreaks = self.__value.count("\n") if linebreaks: last = self.__value.rindex("\n") return "\n" * linebreaks + " " * (len(self.__value) - last - 1) else: return " " * len(self.__value) elif self.isppdirective(): return "\n" * self.__value.count("\n") else: return self.__value def tokenize(tokens, filename="<unknown>"): line = 1 column = 0 for token in tokens: if isinstance(token, Token): yield token token = str(token) else: yield Token(token, filename, line, column) linebreaks = token.count("\n") if linebreaks: line += linebreaks column = len(token) - 1 - token.rindex("\n") else: column += len(token) def locate(tokens, index): line = 1 column = 0 for token_index, token in enumerate(flatten(tokens)): if index == token_index: break linebreaks = token.count("\n") if linebreaks: line += linebreaks column = len(token) - 1 - token.rindex("\n") else: column += len(token) return line, column DEFAULT_GROUP = {'(': ')', '{': '}', '[': ']'} DEFAULT_GROUP_REVERSE = {')': '(', '}': '{', ']': '['} def group(tokens, groups=None): if groups is None: groups = DEFAULT_GROUP reverse = DEFAULT_GROUP_REVERSE else: reverse = dict([(end, start) for start, end in groups.items()]) stack = [('<EOF>', [], -1)] currentEnd = stack[-1][0] currentList = stack[-1][1] for index, token in enumerate(tokens): if token in groups: stack.append((groups[token], [token], index)) currentList = stack[-1][1] currentEnd = stack[-1][0] elif token == currentEnd: currentList.append(token) stack.pop() stack[-1][1].append(currentList) currentEnd = stack[-1][0] currentList = stack[-1][1] elif token in reverse: if isinstance(token, Token): line, column = token.line(), token.column() else: line, column = locate(tokens, index) raise CLexerException("%d:%d: expected '%s', got '%s'" % (line, column, currentEnd, token)) else: currentList.append(token) if len(stack) > 1: token = stack[-1][1][0] if isinstance(token, Token): line, column = token.line(), token.column() else: line, column = locate(tokens, stack[-1][2]) raise CLexerException("%d:%d: unmatched group opener '%s'" % (line, column, token)) return currentList def group1(iterable, end, groups=None): if groups is None: groups = DEFAULT_GROUP reverse = DEFAULT_GROUP_REVERSE else: reverse = dict([(end, start) for start, end in groups.items()]) stack = [('<EOF>', [])] currentEnd = stack[-1][0] currentList = stack[-1][1] for token in iterable: if token in groups: stack.append((groups[token], [token])) currentList = stack[-1][1] currentEnd = stack[-1][0] elif token == end and len(stack) == 1: return currentList, token elif token == currentEnd: stack.pop() currentList.append(token) stack[-1][1].append(currentList) currentEnd = stack[-1][0] currentList = stack[-1][1] elif token in reverse: currentList.append(token) while len(stack) > 1: stack.pop() stack[-1][1].append(currentList) currentList = stack[-1][1] raise CLexerGroupingException("expected '%s', got '%s'" % (currentEnd, token), flatten(currentList)) else: currentList.append(token) while len(stack) > 1: stack.pop() stack[-1][1].append(currentList) currentList = stack[-1][1] token = stack[-1][1][0] raise CLexerGroupingException("unmatched group opener '%s'" % token, list(flatten(currentList))) def partition(tokens, separator): current = [] partitions = [current] try: tokens = iter(tokens) while True: token = tokens.next() if token == separator: current = [] partitions.append(current) else: current.append(token) except StopIteration: if len(partitions) == 1 and not partitions[0]: return [] else: return partitions def flatten(tokens): tokens = iter(tokens) try: while True: token = tokens.next() if isinstance(token, list): tokens = itertools.chain(token, tokens) else: yield token except StopIteration: pass def join(tokens, insertSpaces=True): if insertSpaces: result = "" lastWasSpace = True for token in flatten(tokens): if not lastWasSpace and not token.isspace(): result += " " result += str(token) lastWasSpace = token.isspace() return result else: return "".join(itertools.imap(str, flatten(tokens))) # Run regression tests if we're the main script and not being imported as a module. if __name__ == "__main__": # The token expression does not match whitespace. assert not RE_CTOKENS.match(" ") assert not RE_CTOKENS.match("\t") assert not RE_CTOKENS.match("\r") assert not RE_CTOKENS.match("\n") def testToken(token, subpattern, rest="", isOperator=False): wholeMatch = RE_CTOKENS.match(token) assert wholeMatch assert wholeMatch.group(0) + rest == token subMatch = re.match(subpattern, token, re.DOTALL | re.MULTILINE) assert subMatch assert subMatch.group(0) + rest == token for other in SUBPATTERNS: if other != subpattern and not (isOperator and other == PREPROCESSOR_DIRECTIVE and (token == "#" or token == "##")): assert not re.match(other, token, re.DOTALL | re.MULTILINE) for operatorOrPunctuator in OPERATORS_AND_PUNCTUATORS: testToken(operatorOrPunctuator, OPERATOR_OR_PUNCTUATOR, isOperator=True) testToken("0", INT_LITERAL) testToken("0u", INT_LITERAL) testToken("0l", INT_LITERAL) testToken("0ul", INT_LITERAL) testToken("0lu", INT_LITERAL) testToken("1", INT_LITERAL) testToken("4711", INT_LITERAL) testToken("0x0", INT_LITERAL) testToken("0xffffu", INT_LITERAL) testToken("0.", FLOAT_LITERAL) testToken("123.f", FLOAT_LITERAL) testToken("123.f", FLOAT_LITERAL) testToken("123.l", FLOAT_LITERAL) testToken("123.e1", FLOAT_LITERAL) testToken("123.e1f", FLOAT_LITERAL) testToken("123.e1l", FLOAT_LITERAL) testToken(".0", FLOAT_LITERAL) testToken(".123", FLOAT_LITERAL) testToken(".123f", FLOAT_LITERAL) testToken(".123l", FLOAT_LITERAL) testToken(".123e1", FLOAT_LITERAL) testToken(".123e1f", FLOAT_LITERAL) testToken(".123e1l", FLOAT_LITERAL) testToken("0.0", FLOAT_LITERAL) testToken("123.123", FLOAT_LITERAL) testToken("123.123f", FLOAT_LITERAL) testToken("123.123l", FLOAT_LITERAL) testToken("123.123e1", FLOAT_LITERAL) testToken("123.123e1f", FLOAT_LITERAL) testToken("123.123e1l", FLOAT_LITERAL) testToken("0e1", FLOAT_LITERAL) testToken("123e1", FLOAT_LITERAL) testToken("123e1f", FLOAT_LITERAL) testToken("123e1l", FLOAT_LITERAL) testToken("123e100", FLOAT_LITERAL) testToken("123e+100", FLOAT_LITERAL) testToken("123e-100", FLOAT_LITERAL) testToken("i", IDENTIFIER) testToken("this_or_that", IDENTIFIER) testToken("_", IDENTIFIER) testToken("__i", IDENTIFIER) testToken("i1", IDENTIFIER) testToken("/**/", MULTILINE_COMMENT) testToken("/***/", MULTILINE_COMMENT) testToken("/****/", MULTILINE_COMMENT) testToken("/*****/", MULTILINE_COMMENT) testToken("/*foo*/", MULTILINE_COMMENT) testToken("/*foo\nfoo\nfoo*/", MULTILINE_COMMENT) testToken("//", SINGLELINE_COMMENT) testToken("///", SINGLELINE_COMMENT) testToken("////", SINGLELINE_COMMENT) testToken("//foo", SINGLELINE_COMMENT) testToken("//\n", SINGLELINE_COMMENT, "\n") testToken("///\n", SINGLELINE_COMMENT, "\n") testToken("////\n", SINGLELINE_COMMENT, "\n") testToken("//bar\n", SINGLELINE_COMMENT, "\n") testToken("#", PREPROCESSOR_DIRECTIVE) testToken("#foo", PREPROCESSOR_DIRECTIVE) testToken(" #", PREPROCESSOR_DIRECTIVE) testToken(" #foo", PREPROCESSOR_DIRECTIVE) testToken("#\n", PREPROCESSOR_DIRECTIVE, "\n") testToken("#foo\n", PREPROCESSOR_DIRECTIVE, "\n") testToken(" #\n", PREPROCESSOR_DIRECTIVE, "\n") testToken(" #foo\n", PREPROCESSOR_DIRECTIVE, "\n") testToken('""', STRING_LITERAL) testToken('"foo"', STRING_LITERAL) testToken('"\\"\\"\\""', STRING_LITERAL) testToken("''", CHARACTER_LITERAL) testToken("'foo'", CHARACTER_LITERAL) testToken("'\\'\\'\\''", CHARACTER_LITERAL) ================================================ FILE: src/syntaxhighlight/context.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import syntaxhighlight import configuration def importCodeContexts(db, sha1, language): codecontexts_path = syntaxhighlight.generateHighlightPath(sha1, language) + ".ctx" if os.path.isfile(codecontexts_path): contexts_values = [] for line in open(codecontexts_path): line = line.strip() first_line, last_line, context = line.split(" ", 2) if len(context) > configuration.services.HIGHLIGHT["max_context_length"]: context = context[:configuration.services.HIGHLIGHT["max_context_length"] - 3] + "..." contexts_values.append((sha1, context, int(first_line), int(last_line))) cursor = db.cursor() cursor.execute("DELETE FROM codecontexts WHERE sha1=%s", [sha1]) cursor.executemany("INSERT INTO codecontexts (sha1, context, first_line, last_line) VALUES (%s, %s, %s, %s)", contexts_values) db.commit() os.unlink(codecontexts_path) return len(contexts_values) else: return 0 ================================================ FILE: src/syntaxhighlight/cpp.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import syntaxhighlight import syntaxhighlight.clexer import htmlutils import configuration from syntaxhighlight import TokenTypes class HighlightCPP: def highlightToken(self, token): if token.iskeyword(): self.outputter.writeSingleline(TokenTypes.Keyword, str(token)) elif token.isidentifier(): self.outputter.writeSingleline(TokenTypes.Identifier, str(token)) elif token.iscomment(): if str(token)[0:2] == "/*": self.outputter.writeMultiline(TokenTypes.Comment, str(token)) else: self.outputter.writeSingleline(TokenTypes.Comment, str(token)) elif token.isppdirective(): self.outputter.writeMultiline(TokenTypes.Preprocessing, str(token)) elif token.isspace(): self.outputter.writePlain(str(token)) elif token.isconflictmarker(): self.outputter.writePlain(str(token)) elif str(token)[0] == '"': self.outputter.writeSingleline(TokenTypes.String, str(token)) elif str(token)[0] == "'": self.outputter.writeSingleline(TokenTypes.Character, str(token)) elif token.isfloat(): self.outputter.writeSingleline(TokenTypes.Float, str(token)) elif token.isint(): self.outputter.writeSingleline(TokenTypes.Integer, str(token)) elif token.isbyteordermark(): self.outputter.writePlain(u"\ufeff") else: self.outputter.writeSingleline(TokenTypes.Operator, str(token)) def outputContext(self, tokens, terminator): if not self.contexts: return def spaceBetween(first, second): # Never insert spaces around the :: operator. if first == '::' or second == '::': return False # Always a space after a comma. if first == ',': return True # Always a space before a keyword or identifier, unless preceded by *, & or (. if second.iskeyword() or second.isidentifier(): return str(first) not in ('*', '&', '(') # Always a space before a * or &, unless preceded by (another) *. if (second == '*' or second == '&') and first != '*': return True # Always spaces around equal signs. if first == '=' or second == '=': return True # No spaces between by default. return False first_line = tokens[-1].line() + 1 last_line = terminator.line() if last_line - first_line >= configuration.services.HIGHLIGHT["min_context_length"]: previous = tokens[0] context = str(previous) for token in tokens[1:]: if token.isspace() or token.iscomment(): continue if spaceBetween(previous, token): context += " " context += str(token) previous = token self.contexts.write("%d %d %s\n" % (first_line, last_line, context)) def processTokens(self, tokens): currentContexts = [] nextContext = [] nextContextClosed = False level = 0 for token in tokens: self.highlightToken(token) if token.isspace() or token.iscomment() or token.isppdirective() or token.isconflictmarker(): pass elif token.iskeyword(): if str(token) in ("if", "else", "for", "while", "do", "switch", "return", "break", "continue"): nextContext = None nextContextClosed = True elif not nextContextClosed: nextContext.append(token) elif token.isidentifier(): if not nextContextClosed: nextContext.append(token) elif token == '{': if nextContext: currentContexts.append([nextContext, level]) nextContext = [] nextContextClosed = False level += 1 elif token == '}': level -= 1 if currentContexts and currentContexts[-1][1] == level: thisContext = currentContexts.pop() self.outputContext(thisContext[0], token) nextContext = [] nextContextClosed = False elif nextContext: if token == ',' and not nextContextClosed: nextContext = None nextContextClosed = True elif token == ':': nextContextClosed = True elif token == ';': nextContext = [] nextContextClosed = False elif token == '(': if not nextContextClosed: nextContext.append(token) try: group, token = syntaxhighlight.clexer.group1(tokens, ')') group = list(syntaxhighlight.clexer.flatten(group)) + [token] nextContext.extend(group) for token in group: self.highlightToken(token) except syntaxhighlight.clexer.CLexerGroupingException as error: for token in error.tokens(): self.highlightToken(token) nextContext = [] nextContextClosed = False elif not nextContextClosed: nextContext.append(token) def __call__(self, source, outputter, contexts_path): source = source.encode("utf-8") self.outputter = outputter if contexts_path: self.contexts = open(contexts_path, "w") else: self.contexts = None self.processTokens(syntaxhighlight.clexer.tokenize(syntaxhighlight.clexer.split(source))) if contexts_path: self.contexts.close() @staticmethod def create(language): if language == "c++": return HighlightCPP() else: return None syntaxhighlight.LANGUAGES.add("c++") ================================================ FILE: src/syntaxhighlight/generate.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import errno import syntaxhighlight import gitutils import textutils import htmlutils import diff.parse from syntaxhighlight import TokenClassNames def createHighlighter(language): import cpp highlighter = cpp.HighlightCPP.create(language) if highlighter: return highlighter import generic highlighter = generic.HighlightGeneric.create(language) if highlighter: return highlighter class Outputter(object): def __init__(self, output_file): self.output_file = output_file def writeMultiline(self, token_type, content): parts = content.split("\n") for part in parts[:-1]: if part: self._writePart(token_type, part) self._endLine() if parts[-1]: self._writePart(token_type, parts[-1]) def writeSingleline(self, token_type, content): assert "\n" not in content self._writePart(token_type, content) def writePlain(self, content): parts = content.split("\n") for part in parts[:-1]: if part: self._writePlain(part) self._endLine() if parts[-1]: self._writePlain(parts[-1]) def flush(self): self._flush() self.output_file.close() class HTMLOutputter(Outputter): def _writePart(self, token_type, content): self.output_file.write( "<b class='%s'>%s</b>" % (TokenClassNames[token_type], htmlutils.htmlify(content))) def _writePlain(self, content): self.output_file.write(htmlutils.htmlify(content)) def _endLine(self): self.output_file.write("\n") def _flush(self): pass class JSONOutputter(Outputter): def __init__(self, output_file): super(JSONOutputter, self).__init__(output_file) self.line = [] def _writePart(self, token_type, content): if self.line \ and isinstance(self.line[-1], list) \ and self.line[-1][0] == token_type: self.line[-1][1] += content else: self.line.append([token_type, content]) def _writePlain(self, content): self.line.append([None, content]) def _endLine(self): self.output_file.write(textutils.json_encode(self.line) + "\n") self.line = [] def _flush(self): if self.line: self._endLine() def generateHighlight(repository_path, sha1, language, mode, output_file=None): highlighter = createHighlighter(language) if not highlighter: return False source = gitutils.Repository.readObject(repository_path, "blob", sha1) source = textutils.decode(source) if output_file: highlighter(source, output_file, None) else: output_path = syntaxhighlight.generateHighlightPath(sha1, language, mode) try: os.makedirs(os.path.dirname(output_path), 0750) except OSError as error: if error.errno == errno.EEXIST: pass else: raise output_file = open(output_path + ".tmp", "w") contexts_path = output_path + ".ctx" if mode == "json": outputter = JSONOutputter(output_file) else: outputter = HTMLOutputter(output_file) highlighter(source, outputter, contexts_path) output_file.close() os.chmod(output_path + ".tmp", 0660) os.rename(output_path + ".tmp", output_path) return True ================================================ FILE: src/syntaxhighlight/generic.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re try: import pygments.lexers import pygments.token except ImportError: LANGUAGES = {} else: LANGUAGES = { "python": pygments.lexers.PythonLexer, "perl": pygments.lexers.PerlLexer, "java": pygments.lexers.JavaLexer, "ruby": pygments.lexers.RubyLexer, "php": pygments.lexers.PhpLexer, "makefile": pygments.lexers.MakefileLexer, "javascript": pygments.lexers.JavascriptLexer, "sql": pygments.lexers.SqlLexer, "objective-c": pygments.lexers.ObjectiveCLexer, "xml": pygments.lexers.XmlLexer } import htmlutils from syntaxhighlight import TokenTypes class HighlightGeneric: def __init__(self, lexer): self.lexer = lexer def highlightToken(self, token, value): value = value.encode("utf-8") if token in pygments.token.Token.Punctuation or token in pygments.token.Token.Operator: self.outputter.writeSingleline(TokenTypes.Operator, value) elif token in pygments.token.Token.Name or token in pygments.token.Token.String.Symbol: self.outputter.writeSingleline(TokenTypes.Identifier, value) elif token in pygments.token.Token.Keyword: self.outputter.writeSingleline(TokenTypes.Keyword, value) elif token in pygments.token.Token.String: self.outputter.writeMultiline(TokenTypes.String, value) elif token in pygments.token.Token.Comment: self.outputter.writeMultiline(TokenTypes.Comment, value) elif token in pygments.token.Token.Number.Integer: self.outputter.writeSingleline(TokenTypes.Integer, value) elif token in pygments.token.Token.Number.Float: self.outputter.writeSingleline(TokenTypes.Float, value) else: self.outputter.writePlain(value) def __call__(self, source, outputter, contexts_path): self.outputter = outputter blocks = re.split("^((?:<<<<<<<|>>>>>>>)[^\n]*\n)", source, flags=re.MULTILINE) in_conflict = False for index, block in enumerate(blocks): if (index & 1) == 0: if in_conflict: blocks = re.split("^(=======[^\n]*\n)", block, flags=re.MULTILINE) else: blocks = [block] for index, block in enumerate(blocks): if (index & 1) == 0: if block: for token, value in self.lexer.get_tokens(block): self.highlightToken(token, value) else: assert block[0] == "=" self.outputter.writePlain(block) else: assert block[0] == "<" or block[0] == ">" self.outputter.writePlain(block) in_conflict = block[0] == "<" @staticmethod def create(language): lexer = LANGUAGES.get(language) if lexer: return HighlightGeneric(lexer(stripnl=False)) else: return None import syntaxhighlight syntaxhighlight.LANGUAGES.update(LANGUAGES.keys()) ================================================ FILE: src/syntaxhighlight/request.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import socket import base import configuration import syntaxhighlight from textutils import json_encode, json_decode class HighlightBackgroundServiceError(base.ImplementationError): def __init__(self, message): super(HighlightBackgroundServiceError, self).__init__( "Highlight background service failed: %s" % message) def requestHighlights(repository, sha1s, mode, async=False): requests = [ { "repository_path": repository.path, "sha1": sha1, "path": path, "language": language, "mode": mode } for sha1, (path, language) in sha1s.items() if not syntaxhighlight.isHighlighted(sha1, language, mode) ] if not requests: return False try: connection = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) connection.connect(configuration.services.HIGHLIGHT["address"]) connection.send(json_encode({ "requests": requests, "async": async })) connection.shutdown(socket.SHUT_WR) data = "" while True: received = connection.recv(4096) if not received: break data += received connection.close() except EnvironmentError as error: raise HighlightBackgroundServiceError(str(error)) if async: return True if not data: raise HighlightBackgroundServiceError( "returned an invalid response (no response)") try: results = json_decode(data) except ValueError: raise HighlightBackgroundServiceError( "returned an invalid response (%r)" % data) if type(results) != list: # If not a list, the result is probably an error message. raise HighlightBackgroundServiceError(str(results)) if len(results) != len(requests): raise HighlightBackgroundServiceError("didn't process all requests") return True ================================================ FILE: src/textformatting.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import configuration import dbutils import gitutils import htmlutils def renderFormatted(db, user, table, lines, toc=False, title_right=None): re_h1 = re.compile("^=+$") re_h2 = re.compile("^-+$") data = { "configuration.URL": dbutils.getURLPrefix(db, user), "configuration.base.HOSTNAME": configuration.base.HOSTNAME, "configuration.base.SYSTEM_USER_NAME": configuration.base.SYSTEM_USER_NAME, "configuration.base.SYSTEM_GROUP_NAME": configuration.base.SYSTEM_GROUP_NAME, "configuration.paths.CONFIG_DIR": configuration.paths.CONFIG_DIR, "configuration.paths.INSTALL_DIR": configuration.paths.INSTALL_DIR, "configuration.paths.DATA_DIR": configuration.paths.DATA_DIR, "configuration.paths.GIT_DIR": configuration.paths.GIT_DIR } references = {} blocks = [] block = [] for line in lines: match = re.match(r'\[(.*?)\]: (.*?)(?: "(.*?)")?$', line) if match: name, url, title = match.groups() references[name] = (url, title) continue if line.strip(): block.append(line % data) elif block: blocks.append(block) block = [] else: if block: blocks.append(block) text = None for block in blocks: def textToId(text): return text.lower().replace(' ', '_') if len(block) == 2: if re_h1.match(block[1]): table.setTitle(block[0]) h1 = table.tr("h1").td("h1").h1(id=textToId(block[0])) h1.text(block[0]) if title_right: span_right = h1.span("right") if callable(title_right): title_right(span_right) else: span_right.text(title_right) text = None if toc: toc = table.tr("toc").td("toc").div().table("toc callout") toc.tr("heading").th().text("Table of contents") continue elif re_h2.match(block[1]): if toc: toc.tr("h2").td().a(href="#" + textToId(block[0])).text(block[0]) table.tr("h2").td("h2").div().h2(id=textToId(block[0])).text(block[0]) text = None continue if len(block) == 1 and block[0] == "[repositories]": text = None repositories = table.tr().td().table("repositories callout") headings = repositories.thead().tr() headings.th("name").text("Short name") headings.th("path").text("Repository path") repositories.tr().td(colspan=2) cursor = db.cursor() cursor.execute("SELECT name, path FROM repositories ORDER BY id ASC") for name, path in cursor: row = repositories.tr("repository") row.td("name").text(name) row.td("path").text(gitutils.Repository.constructURL(db, user, path)) continue if not text: text = table.tr("text").td("text") def translateLinks(text): def linkify(match): config_item, reference_text, reference_name = match.groups() if config_item: url = "/config?highlight=%s" % config_item text = config_item title = None else: reference_name = re.sub(r"\s+", " ", reference_name) assert reference_name in references, reference_name url, title = references[reference_name] text = reference_text link = "<a href=%s" % htmlutils.htmlify(url, True) if title: link += " title=%s" % htmlutils.htmlify(title, True) return link + ">%s</a>" % htmlutils.htmlify(text) return re.sub(r"CONFIG\(([^)]+)\)|\[(.*?)\]\n?\[(.*?)\]", linkify, text, flags=re.DOTALL) def processText(lines): if isinstance(lines, basestring): lines = [lines] for index, line in enumerate(lines): if line.startswith(" http"): lines[index] = "<a href='%s'>%s</a>" % (line.strip(), line.strip()) text = translateLinks("\n".join(lines)) # Replace double dashes with —, but only if they are # surrounded by either spaces or word characters on both sides. # # We don't want to translate the double dashes in a # --command-line-argument used in the text. text = re.sub(r"(^| )--( |$)", r"\1—\2", text, flags=re.MULTILINE) text = re.sub(r"(\w)--(\w)", r"\1—\2", text) return text if len(block) > 2 and re_h2.match(block[1]): if toc: toc.tr("h3").td().a(href="#" + textToId(block[0])).text(block[0]) div = text.div() div.h3(id=textToId(block[0])).text(block[0]) block = block[2:] if block[0].startswith("|"): pre = text.div().table("pre callout").tr().td().preformatted() contents = "\n".join([line[2:] for line in block]) if block[0].startswith("||"): pre.innerHTML(contents) else: pre.text(contents) elif block[0].startswith("* ") or block[0].startswith("1 "): if block[0].startswith("* "): items = text.div().ul() else: items = text.div().ol() item = [] for line in block: if line[:2] != ' ': if item: items.li().text(processText(item), cdata=True) item = [] else: assert line[:2] == " " item.append(line[2:]) if item: items.li().text(processText(item), cdata=True) elif block[0].startswith("? "): items = text.div().dl() term = [] definition = None for line in block: if line[:2] == '? ': if definition: items.dt().text(processText(" ".join(term)), cdata=True) items.dd().text(processText(definition), cdata=True) definition = None term = [line[2:]] elif line[:2] == '= ': assert term assert definition is None definition = [line[2:]] elif definition is None: term.append(line[2:]) else: definition.append(line[2:]) items.dt().text(processText(" ".join(term)), cdata=True) items.dd().text(processText(definition), cdata=True) elif block[0].startswith(" "): text_data = translateLinks("\n".join(block)) if block[0].startswith(" <code>"): className = "example" else: className = "hint" text_data = text_data.replace("--", "—") text.div().div(className).text(text_data, cdata=True) else: text.div().text(processText(block), cdata=True) ================================================ FILE: src/textutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import json import unicodedata try: import configuration DEFAULT_ENCODINGS = configuration.base.DEFAULT_ENCODINGS[:] except ImportError: # This is the default set of default encodings. We could fail to # import 'configuration' for two principal reasons: # # 1) There's some catastrophic problem with the system. Ignoring # the problem here won't make the least bit of difference. # # 2) We're trying to run unit tests without an installed system. # This fallback is simply nice in that case. DEFAULT_ENCODINGS = ["utf-8"] def reflow(text, line_length=80, indent=0): if line_length == 0: return text paragraphs = re.split("\n\n+", text.replace("\r", "")) spaces = " " * indent for paragraph_index, paragraph in enumerate(paragraphs): lines = paragraph.split("\n") for line_index, line in enumerate(lines): if (line and line[0] in " \t-*") or line_index < len(lines) - 1 and len(line) < 0.5 * line_length: if indent: paragraphs[paragraph_index] = "\n".join([spaces + line for line in lines]) break else: lines = [] line = spaces words = re.split("(\s+)", paragraph) ws = "" for word in words: if not word.strip(): if "\n" in word: ws = " " else: ws = word else: if len(line) > indent and len(line) + len(ws) + len(word) > line_length: lines.append(line) line = spaces if len(line) > indent: line += ws line += word if line: lines.append(line) paragraphs[paragraph_index] = "\n".join(lines) text = "\n\n".join(paragraphs) if isinstance(text, unicode): return text.encode("utf-8") else: return text def indent(string, width=4): prefix = " " * width return prefix + ("\n" + prefix).join(string.splitlines()) def summarize(string, max_length=80, as_html=False): if len(string) <= max_length: return string string = string[:max_length - 5] if as_html: import htmlutils return htmlutils.htmlify(string) + "[…]" else: return string + "[...]" def escape(text): special = { "\a": "a", "\b": "b", "\t": "t", "\n": "n", "\v": "v", "\f": "f", "\r": "r" } def escape1(match): substring = match.group(0) if ord(substring) < 128: if substring in special: return "\\%s" % special[substring] elif ord(substring) < 32: return "\\x%02x" % ord(substring) else: return substring category = unicodedata.category(substring) if category[0] in "CZ" or category == "So": if ord(substring) < 256: return "\\x%02x" % ord(substring) elif ord(substring) < 65536: return "\\u%04x" % ord(substring) else: return "\\U%08x" % ord(substring) else: return substring text = decode(str(text)) escaped = re.sub("\W", escape1, text, flags=re.UNICODE) return escaped.encode("utf-8") json_encode = json.dumps def deunicode(v): if type(v) == unicode: return v.encode("utf-8") elif type(v) == list: return map(deunicode, v) elif type(v) == dict: return dict([(deunicode(a), deunicode(b)) for a, b in v.items()]) else: return v def json_decode(s): return deunicode(json.loads(s)) def decode(text): if isinstance(text, unicode): return text text = str(text) for index, encoding in enumerate(DEFAULT_ENCODINGS): try: decoded = text.decode(encoding) except UnicodeDecodeError: continue except LookupError: del DEFAULT_ENCODINGS[index] else: # Replace characters in the surrogate pair range with U+FFFD since # PostgreSQL's UTF-8 decoder won't accept them. return re.sub(u"[\ud800-\udfff]", "\ufffd", decoded) return text.decode("ascii", errors="replace") def encode(text): if isinstance(text, unicode): return text.encode("utf-8") return str(text) ================================================ FILE: src/textutils_unittest.py ================================================ def independence(): # Simply check that textutils can be imported. import textutils print "independence: ok" ================================================ FILE: src/tutorials/administration.txt ================================================ System Administration ===================== Upgrading Critic ---------------- To install a newer version of Critic, simply check out a newer commit in the <code>critic.git</code> repository clone from which Critic was installed, and then run the command | python upgrade.py as root. If the repository clone from which Critic was installed has been deleted, upgrading Critic from a newly cloned repository will usually work as well. The only case in which this does not work is if the commit that was installed (or last upgraded to) is not available in the newly cloned repository. Upgrading Actions ----------------- The actions taken by the <code>upgrade.py</code> are roughly these, in this order: 1 Offer to create a backup copy (dump) of Critic's database, 2 install additional prerequisite software the the new version depends on, 3 ask questions for new configuration settings in the new version, 4 copy updated source files (to <code>%(configuration.paths.INSTALL_DIR)s/</code>), 5 write updated configuration files (to <code>%(configuration.paths.CONFIG_DIR)s/configuration/</code>) and other system files (e.g. Apache site definition file, SysV init script, <code>/usr/bin/criticctl</code>), 6 restart Apache and Critic background services, and 7 run migration scripts to update the database schema or perform other one-time system updating tasks. Each of these steps could be empty, and thus skipped, depending on the changes between the currently installed version of Critic and the new version. Locally Modified Files ---------------------- If the upgrade process wants to write an updated version of an installed file, it will first check if the currently installed file has been modified since it was installed. No local modification are reverted without asking first. Note though that modified configuration files can be modified without asking if this can be done in a way that does not conflict with, and that preserves, the local modifications. Migration Scripts ----------------- Some changes to the system requires custom modifications during an upgrade to adjust the installed system to match how the new version of Critic would have set the system up if it had been installed from scratch. The most common such change is changes to the database schema, such as adding new tables or new columns to existing tables. This type of custom modifications are performed by running small one-time scripts called migration scripts. When upgrading to a new version of Critic, all migration scripts that exist in the new version that didn't exist in the previously installed version are executed as the final step of the upgrade. If the upgrade process would run any migration script that modifies the database schema, the offer to create a backup copy of Critic's database at the beginning of the upgrade process has "yes" as its default choice instead of "no" as it otherwise has. It is strongly recommended that the offer is accepted, since the changes made by such a migration script are generally not reversible in any other than to restore an earlier backup copy of the database. Change Host Web Server ---------------------- Critic's installation/upgrading scripts support a few different host web server configurations: * Apache 2.x (with mod_wsgi) * nginx (HTTP front-end) + uWSGI (WSGI back-end) * uWSGI (as both HTTP front-end and WSGI back-end) The Apache configuration used to be the only supported, so an old Critic system probably runs that one. The nginx+uWSGI configuration is the currently preferred (and default) one, but the installation script will ask which one to use when a new system is installed. The <code>upgrade.py</code> script can be used switch to a different configuration, i.e. replicate how a new install would have configured the desired new web server configuration. Doing this requires just a little bit of manual administration work: 1 Stop the current host web server. This is required so that it stops listening at the HTTP/HTTPS ports, so that the new web server can do so instead. You can typically do that by running the command <code>service <name> stop</code> as root, where <i>name</i> is one of <code>apache2</code>, <code>nginx</code> and <code>uwsgi</code>. 2 Edit <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code>, changing the <code>WEB_SERVER_INTEGRATION</code> setting to the desired value. See the inline documentation in the file for details. 3 Run <code>python upgrade.py</code> as root. Note that unless you're also upgrading Critic to a new version, you may need to add the <code>--force</code> command-line argument to force the upgrading script to run despite there being no changes to install. The upgrading script will, if necessary, install the new web server, and then install configuration files for it. Note that the script may warn that it failed to stop the <i>new</i> web server before the actual upgrade. This is expected, since the new web server may not have been running, or even been installed, at that time. After this, Critic should be set up and running in the new web server. You may need to make sure the previous web server doesn't start up again the next time the system boots. The easiest way to do that is probably to uninstall it completely. Configuring Critic ------------------ The installation and upgrading procedure will produce a fully configured Critic system. Not all details can be controlled, however, and sometimes it may be necessary to change the installed configuration later on. To do this, one needs to edit the configuration files under <code>%(configuration.paths.CONFIG_DIR)s/</code> and restart the host web server (to reload the web front-end) and Critic's background services. The best way to restart Critic is using the <code>criticctl</code> utility: | criticctl restart Before restarting, it will run a basic configuration test to make sure the configuration isn't obviously invalid. This will not catch all possible errors, but helps to prevent putting the Critic system in an unusable state. It is also possible to just run the configuration test: | criticctl configtest Do note that while it is necessary to manually restart to immediately make changes effective, some background services spawns child processes to do the actual work, and they will pick up changes right away, without an explicit restart. Likewise, the host web server may spawn additional web front-end child processes, or stop and restart existing ones, thus picking up changes before being explicitly restarted. Due to this fact, it's best to explicitly restart immediately after making a change, so that any problems are detected and can be addressed. Git Repository Access --------------------- Critic's Git repositories should normally be accessible to the users of the system, both to clone and examine locally and to push new commits to. It is not strictly required that they are -- Critic can be set up such that users only access Git repositories on some other system from which Critic in turn fetches -- but it is the typical set-up. Git supports a variety of schemes (or protocols) for accessing repositories. In general, any access scheme will work with Critic as long as doesn't involve directly manipulating the repository on the Critic server in a way that bypasses Critic's <code>pre-receive</code> hook. Some access schemes require the system administrator to perform administration tasks beyond installing and managing Critic, however. git://host/path.git ------------------- The Git scheme, provided by the <code>git daemon</code> command, is typically used only for read-only access to Git repositories. It can support push as well but it will be unauthenticated, which means there will be no way of knowing which user is responsible for the push. Critic has no support for starting and running <code>git daemon</code>. This simple command can be used to start a <code>git daemon</code> process that provides read-only access to all of Critic's repositories on %(configuration.base.HOSTNAME)s: | git daemon --export-all --base-path=%(configuration.paths.GIT_DIR)s %(configuration.paths.GIT_DIR)s The command needs to be executed as the Critic system user (<code>%(configuration.base.SYSTEM_USER_NAME)s</code>), for instance using <code>sudo</code>: | sudo -H -u critic git daemon ... For more details about how to run <code>git daemon</code>, see its built-in documentation: | git daemon --help http://host/path.git -------------------- The HTTP (and/or HTTPS) protocol, provided by the <code>git http-backend</code> command, is supported by Critic and enabled by default. If the Critic web front-end allows anonymous users, then it also allows unauthenticated read-only access. Push is only supported from authenticated users. ssh://host%(configuration.paths.GIT_DIR)s/path.git / host:%(configuration.paths.GIT_DIR)s/path.git --------------------------------------------------------------------- Access to repositories over SSH simply requires users to be able to log into the Critic system over SSH and be members of the <code>%(configuration.base.SYSTEM_GROUP_NAME)s</code> system group; Git takes care of the rest. See also the note on <a href="#system_users">System Users</a> below. Hint: To allow users to access Git repositories over SSH without giving them "shell access," set the system users' shell to <code>git-shell</code>. Configuration ------------- There are two settings in Critic relating to repository access. First, there's the system configuration setting <code>REPOSITORY_URL_TYPES</code>, set in <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code>, which is a list of access schemes that are supported on the system. If this list does not contain the string <code>"http"</code>, the built-in support for this access scheme is disabled. The other schemes -- <code>"git"</code>, <code>"ssh"</code> and <code>"host"</code> -- are not in themselves affected by the setting. Second, there's the user preference setting CONFIG(repository.urlType) that determines what type of repository URLs the web front-end displays. Each user can select their preferred URL type among the set of supported schemes as defined by the system configuration setting above. Users ----- Critic has a user database containing records of the users of the system. The record for each user contains the (typically short) user name, the user's full name (which is what is normally displayed) and the user's email address, which is used when the system sends notifications to the user. The user database is populated in different ways depending on how Critic was configured to identify and authenticate users. Authentication mode: Critic --------------------------- If Critic was configured to authenticate users itself, then users can be added to the database manually by a system administrator. This can be done using the <code>criticctl</code> utility (installed as <code>/usr/bin/criticctl</code> by Critic's installation script): | criticctl adduser --name=USERNAME --email=EMAIL --fullname=FULLNAME Note: The program will prompt for the user's password. If any of the options are omitted, it will also prompt for that information. It is also possible to let new users create user records in Critic themselves, by changing the system setting <code>ALLOW_USER_REGISTRATION</code> in <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code>. When enabled, a link is added on the sign-in page to a separate page where users can input the necessary information to create a user record. Is is also possible to let users sign in, and optionally create user records, by authenticating via an external system, such as GitHub or Google, using the OAuth 2.0 protocol. For more information about this mechanism, see the [tutorial on external authentication][extauth]. [extauth]: /tutorial?item=external-authentication Authentication mode: Host ------------------------- If Critic was configured to let the host web server authenticate users, it instead assumes that the <code>REMOTE_USER</code> WSGI environment variable is set to the name of the accessing user, and automatically creates a user record in its database if one doesn't already exist. In this scenario, there's typically no reason for the system administrator to manually add users to Critic's database (but it's still possible to do so.) See <a href="/tutorial?item=customization#automatic_user_creation">the customization tutorial</a> for details on this automatic user creation, and how it can be tweaked. Anonymous Access ---------------- It is possible to let users access a Critic system without signing in, by setting the system setting <code>ALLOW_ANONYMOUS_USER</code> in <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code> to True. When Critic was configured to authenticate users itself (authentication mode "critic") and use HTTP cookies to handle sessions (instead of HTTP authentication) allowing anonymous access simply means the system won't redirect to the "Sign in" page for most page accesses; instead there will be a "Sign in" link in the paqe header. When Critic was configured to let the host web server authenticate users, the user is simply considered anonymous when the <code>REMOTE_USER</code> WSGI environment variable is unset. The host web server must be configured appropriately for this to be a meaningful configuration, and there are typically limited use-cases. One possible use-case is to make parts of the web UI available anonymously by configuring the host web server to require a signed in user for some paths and not for others. System Users ------------ Critic's user database doesn't necessarily have to match the system's user database. However, if access to the Git repositories is provided over SSH, then, for the purpose of SSH authentication, there needs to be system users. And in this case, for each system user who can push changes to the Git repositories over SSH there must be a matching user in Critic's user database with the same username. Also note that system users that should be allowed to access the Git repositories must be members of the <code>%(configuration.base.SYSTEM_GROUP_NAME)s</code> system group. Git Authors and Committers -------------------------- Critic's user database doesn't necessarily contain records for every user occurring as the author or committer of commits in the Git repositories, but commits can be mapped to Critic users by matching the email addresses in the Git commit meta-data. Each Critic user can register any number of email addresses that he or she uses or has used when creating Git commits. By default, the user's primary email address is used to map commits to the user, but the user is in no way forced to use this particular email address in Git commits. Currently, the only effect of mapping a commit's user references to a Critic user is that a Critic user is never automatically assigned to review commits that he/she is the author of. Deleting Users -------------- It is also possible to "delete" a user using <code>criticctl</code>: | criticctl deluser --name=USERNAME This doesn't actually delete the user record from the database, since that is likely to be referenced from many places, depending on what the user has done in the system. Instead, what deleting a user does is 1 Marks the user as "retired", which prevents the system from acting as if the user is expected to be reviewing any changes in the future, and 2 deletes the user's password so that he or she can no longer sign in. <strong>Important note:</strong> Deleting a user's password only prevents access to the system if Critic handles user authentication. If the web server handles it, the user must primarily be disabled in whatever mechanism the web server uses to authenticate users. Critic will <b>not</b> disallow access in this case. What's more, in this case, if a deleted/retired user signs in, the user's "retired" status is automatically reverted, thus completely undoing the effects of "deleting" the user in the first place. Roles ----- User roles is a very basic access rights scheme, limiting which Critic users can do a small number of things, including adding new repositories, modifying other users' information, and adding news items. Other than that, all users can access all information in the system, modify all reviews and so on. The available roles are: ? administrator = A user with the <code>administrator</code> role can restart system background services via the <code><a href="/services">%(configuration.URL)s/services</a></code> page, edit other users' information via <code style='white-space: nowrap'>%(configuration.URL)s/home?user=<name></code> and also enable/disable any tracked branch via <code><a href="/repositories">%(configuration.URL)s/repositories</a></code>. More exceptions are likely to be added in the future. ? repositories = A user with the <code>repositories</code> role can add new repositories to the system via the <code><a href="/newrepository">%(configuration.URL)s/newrepository</a></code> page. ? newswriter = A user with the <code>newswriter</code> role can write news items items that appear on the <code><a href="/news">%(configuration.URL)s/news</a></code> page. The user can also edit existing items. It is likely that news items will be also be added automatically when upgrading the system, to inform about significant changes. ? developer = The <code>developer</code> role doesn't really give access to restricted functionality. Instead it affects how unexpected errors -- such as uncaught Python exceptions -- are handled. Normally, unexpected errors are presented to the user as just that, an "unexpected error," with a note that a message has been sent to the system administrator, and an email is sent to the system administrator with as much details as possible about the error. But if the user triggering the error has the <code>developer</code> role, no email is sent to the system administrator (who is likely to be the same person in practice) and instead the error details -- typically a Python stack-trace -- is displayed directly to the user. Assigning Roles --------------- To assign a role to a user, use the <code>criticctl</code> utility: | criticctl addrole --name=USERNAME --role=ROLE and to unassign a role: | criticctl delrole --name=USERNAME --role=ROLE Note: If the options are not provided, <code>criticctl</code> will instead prompt for the missing information. Background Services ------------------- The running Critic system is divided into two parts: the web front-end code, running as a WSGI application (in Apache, using mod_wsgi) and a number of background services running as daemon processes (started via a SysV init script.) The background services are listed on the page <code><a href="/services">%(configuration.URL)s/services</a></code> where the system administrator (any user with the <code>administrator</code> role) can restart them and view their logs. The WSGI daemon processes can also be restarted from that page. By default there are two WSGI daemon processes; this number can be adjusted by modifying the <code>WSGIDaemonProcess</code> directive in the Apache site definition file. Normally, the system administrator does not need to do anything to any of the background services; they are restarted automatically when Critic is upgraded or if any of them crash. If any errors or warnings are logged by a background service, an email is sent to the system administrator email address. Background services are configured in the configuration file <code>%(configuration.paths.CONFIG_DIR)s/configuration/services.py</code> where the set of services to start is defined, as well as various per-service configuration settings. If these settings are changed, the services need to be restarted for the new settings to take effect. The following background services are started automatically by Critic: branchtracker ------------- The <code>branchtracker</code> service is responsible for fetching tracked branches from remote Git repositories and pushing them to Critic's repositories. If a Critic repository is created to mirror another Git repository, one such tracked branch is automatically set up, typically named <code>master</code>. Additional tracked branches can be added via the <code><a href="/repositories">%(configuration.URL)s/repositories</a></code> page by any user with the <code>administrator</code> role. Reviews can also be created to track branches in other Git repositories using the <code><a href="/createreview">%(configuration.URL)s/createreview</a></code> page. changeset --------- The <code>changeset</code> service is responsible for processing diffs and storing cached information about them in Critic's database. The service also periodically scans the database for cached diffs that haven't been accessed for some time, and deletes them. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | CHANGESET["max_workers"] = 4 | CHANGESET["rss_limit"] = 1024 ** 3 | CHANGESET["purge_at"] = (2, 15) The "max_workers" setting decides how many parallel jobs the service can run. The "rss_limit" setting is a safe-guard against run-away memory usage. The "purge_at" setting is a tuple (HOUR, MINUTE) that defines when during the day (in the server's local time) that the service should perform its maintenance tasks. githook ------- The <code>githook</code> service performs all the checks and updates needed when Critic's repositories are updated by users using <code>git push</code>. The actual <code>pre-receive</code> hook that is installed in Critic's repositories is a simple script that connects to the service over a UNIX socket and forwards all information about the update to the service. highlight --------- The <code>highlight</code> service is responsible for generating syntax highlighted copies of source file versions and storing them in a cache, as well as periodically compressing and eventually deleting old highlighted copies. As part of syntax highlighting, "code contexts" are in some cases (depending on the language of the source file) calculated and stored in Critic's database. A "code context" is a short string, for instance a function signature, mapped to a range of lines, and used by the diff viewer to give context to the code fragments it displays. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | HIGHLIGHT["cache_dir"] = os.path.join(configuration.paths.CACHE_DIR, "highlight") | HIGHLIGHT["min_context_length"] = 5 | HIGHLIGHT["max_context_length"] = 256 | HIGHLIGHT["max_workers"] = 4 | HIGHLIGHT["compact_at"] = (3, 15) The "cache_dir" setting defines where cached highlighted copies are stored on disk. The "min_context_length" and "max_context_length" limits length of code context strings. The "max_workers" setting defines how many parallel jobs the service can run. The "compact_at" setting is a tuple (HOUR, MINUTE) that defines when during the day (in the server's local time) that the service should perform its maintenance tasks. maildelivery ------------ The <code>maildelivery</code> service is responsible for picking up queued mails produced by other parts of Critic and delivering them to the configured SMTP server. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | MAILDELIVERY["timeout"] = 10 The "timeout" setting defines the (socket) timeout used when contacting the SMTP server. If a connection or mail delivery attempt times out, the service will wait a while and then try again. maintenance ----------- The <code>maintenance</code> service performs miscellaneous maintenance tasks periodically. Currently it performs these tasks: * Keeping the UTC offsets in the <code>timezones</code> table in Critic's database up-to-date. The offsets are fetched from PostgreSQL's <code>pg_timezone_names</code> table, but accessing this table directly is quite slow, so the offsets in it are read periodically and cached. * Performing scheduled [review branch archivals][archival]. * Running <code>git gc</code> in Critic's Git repositories. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | MAINTENANCE["maintenance_at"] = (4, 0) The "maintenance_at" setting is a tuple (HOUR, MINUTE) that defines when during the day (in the server's local time) that the service should perform its maintenance tasks. [archival]: #review_branch_archival servicemanager -------------- The <code>servicemanager</code> service is a meta-service that starts all other services, and restarts them if they crash. If this service is restarted it restarts all other services. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | SERVICEMANAGER["services"] = [...] The "services" setting is a list of services to start. watchdog -------- The <code>watchdog</code> service is a simple monitoring service. By default it only monitors the memory usage of the WSGI daemon processes and restarts them if their memory usage exceeds the configured limits. There are two limits; a soft limit at which it tells the WSGI process to restart itself by sending it a <code>SIGINT</code> signal, and a hard limit at which it kills the WSGI process by sending it a <code>SIGKILL</code> signal. If configured to do so, the service can also monitor overall system load and send emails to the system administrator if the load exceeds configured limits. The service has the following additional configuration settings in <code>services.py</code> (with the default settings): | WATCHDOG["rss_soft_limit"] = 1024 ** 3 | WATCHDOG["rss_hard_limit"] = 2 * WATCHDOG["rss_soft_limit"] | | # Not set by default: | WATCHDOG["load1_limit"] = <1-minute load average limit> | WATCHDOG["load5_limit"] = <5-minute load average limit> | WATCHDOG["load15_limit"] = <15-minute load average limit> The "rss_soft_limit" and "rss_hard_limit" defines the memory limits, in bytes. The load average limit settings defines the limit at which to start warning about high system load. Any one, or all three, can be set to different values. Note: the configured value is multiplied by the number of CPUs in the system before being compared to the actual system load. Extensions ---------- Critic supports an extension mechanism that allows users of the system to extend Critic's functionality in various ways. Extensions are written in ECMAScript (executed in a custom interpreter based on the V8 ECMAScript engine) and have access to a high-level API exposing most information in Critic's database. Extensions can for instance be used to integrate Critic and other systems, such as build servers or issue tracking systems. The extension mechanism is not enabled in a newly installed Critic system. The system administrator needs to use the <code>extend.py</code> script to prepare and enable the extension mechanism. This script installs additional required software, downloads (from GitHub) the source code used to build the ECMAScript interpreter, builds and installs it, and finally modifies Critic's configuration to enable the extension mechanism. To enable the extension mechanism, simply run this command as root: | python extend.py For general information about the extension mechanism and how to create extensions, see the <a href="/tutorial?item=extensions">extensions tutorial</a> and the <a href="/tutorial?item=extensions-api">extensions API tutorial</a>. Security considerations ----------------------- Extensions are scripts running as the Critic system user, and effectively has access to all information in Critic's database and Git repositories, and to all files in the system's file systems that the Critic system user has access to. In addition, they can start child processes, and thus do essentially anything the Critic system user can do. Untrusted users should obviously not be allowed to add extensions to a Critic system. To add extensions to a Critic system, a user needs shell access to the system, or at least some way to create directories and files in their $HOME directory. Typically, an untrusted user should not be given shell access to the system anyway, so not allowing them to add extensions is somewhat a secondary concern. Hint: As mentioned above, users can be given access to Git repositories over SSH without giving them shell access, by setting the system users' shell to <code>git-shell</code>. If users must be given shell access to the system, but not be allowed to add extensions to it, it is possible to disable "user extensions" -- extensions added by regular users -- by setting the configuration option <code>USER_EXTENSIONS_DIR</code> in <code>%(configuration.paths.CONFIG_DIR)s/configuration/extensions.py</code> to the value <code>None</code>. With that configuration, only users with write access to the directory <code>%(configuration.paths.DATA_DIR)s/extensions/</code> can add extensions to the system. Review branch archival ---------------------- For a description of this mechanism, see the [tutorial on the subject][archival-tutorial]. A system administrator can tweak the mechanism by changing the system-wide or per-repository defaults for the settings CONFIG(review.branchArchiveDelay.closed) and CONFIG(review.branchArchiveDelay.dropped). Individual users can override the defaults, but only to achieve a longer delay, or disable archival entirely, for reviews they own. If review branch archival is not desired at all, it can also be disabled system-wide by the system administrator by setting <code>ARCHIVE_REVIEW_BRANCHES</code> in <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code> to <code>False</code>. [archival-tutorial]: /tutorial?item=archival ================================================ FILE: src/tutorials/archival.txt ================================================ Review branch archival ====================== Background ---------- Over time, Critic's repositories may accumulate many effectively obsolete review branch refs belonging to long-since finished and closed, or dropped, reviews. This slows down various Git operations performed on the repository, and also pollutes your local repositories if you ever fetch from Critic's repository. To remedy this problem and keep Critic's repositories clean and tidy, old review branch refs are archived. This means that the ref is simply deleted from the repository, while making sure that all commits are kept alive in the repository. Details ------- Only review branches -- those with a <code>r/</code> prefix -- are archived, no other branches. Review branches are furthermore only archived when they are <em>obsolete</em>. A review branch is considered obsolete when the review is finished and has been closed, or when the review has been dropped. When a review branch becomes obsolete, it's scheduled for archival some time into the future. By default, if the review is finished, its branch is archived no sooner than 7 whole days after the review was closed. If the review is dropped, its branch is archived no sooner than one whole day after the review was dropped. The actual archival is done by a nightly maintenance task. Configuration ------------- The delays -- <em>7 days</em> and <em>one day</em> -- can be changed via the configuration settings CONFIG(review.branchArchiveDelay.closed) and CONFIG(review.branchArchiveDelay.dropped). The settings can be set per repository, meaning a user can configure different delays for different repositories, and also that the system default can be different for different repositories. When the appropriate delay is calculated for a review, the (possibly per-repository) delay settings for each owner of the review, as well as the (possibly per-repository) system default, are considered. If any of those settings is zero, the review branch is not scheduled for archival at all. Otherwise, the <em>highest</em> setting is used, thus resulting in the longest delay requested by any owner, or the system administrator. Resurrection ------------ When a review branch has been archived it is in no way lost forever. It can always be <em>resurrected</em>, simply by clicking the <code>Resurrect Branch</code> button on the review's front-page. When a review branch is manually resurrected this way, another archival is always rescheduled with the usual delay. The review branch is also automatically resurrected if the review is reopened. In that case, no new archival is rescheduled until the review is closed or dropped again. Local repositories ------------------ If you have a local clone of one of Critic's repositories, or have added Critic's repository as a remote and fetched from it, you will have local copies -- called <em>remote tracking branches</em> and named <code>refs/remotes/origin/*</code> or similar -- of all branches that existed in Critic's repository when you cloned or fetched from it. Git will by default not remove these local copies when the corresponding branch ref is deleted in Critic's repository. To have Git clean up your local repository, you can run the command | git remote prune $name where <code>$name</code> is the name of your Critic remote, typically <code>origin</code> if you cloned Critic's repository directly. Alternatively, you can make <code>git fetch</code> do this pruning each time you fetch from the remote by running the command | git config remote.$name.prune true or each time you fetch from any remote using the command | git config fetch.prune true Warning ------- In Git versions prior to 2.0.1, this pruning can take a long time, if there are many refs to prune, and especially if there are many other refs in the repository as well. The <code>git remote prune</code> command can be stopped and restarted any number of times, and can thus be used to incrementally prune refs in chunks, if desired. ================================================ FILE: src/tutorials/checkbranch.txt ================================================ Branch Review Status Analyzer ============================= Commit Status ------------- Critic's automatic branch review status analyzer is designed to answer the question "If branch A was merged into branch B, would any unreviewed changes be merged?" In the typical use-case, "A" would be a task branch, and "B" would be master. To analyze a branch, visit the <a href=checkbranch>/checkbranch</a> page, enter the name of the branch to check, select the repository in which it lives, optionally request that the branch be fetched from the repository's upstream repository and then press the "Check" button. The list of commits that would be merged is calculated using <code>git rev-list</code>; the exact command used is included on the result page. The review status of the branch is calculated per commit, and is signalled using the colors green, yellow and red. Green ----- A green commit status means that the commit is verified by Critic to have been reviewed in an accepted review in Critic. To be assigned this status, one of these two statements must be true about the commit: 1 The commit is the same as the head commit on the review branch (same SHA-1) 2 The commit is included on the review branch, and is the parent of another green commit from the same review. The meaning of the second point is simply that if the head commit of an accepted review is among the checked commits, then all other commits from that review are also green. It also means that if the head commit of an accepted review is not among the checked commits, then no other commits from the review will be green, even if they are exactly the same commits (same SHA-1); this is because the changes that were accepted in the review were exactly all the changes, not just some of them. Trying to merge only some of the commits from the review can't be verified by Critic as a safe operation, since it might mean that you are accidentally trying to merge an incomplete set of changes, for instance missing fixes for all review issues. Yellow ------ A yellow commit status means that the commit has been manually connected to a review, or had its review status otherwise explained, by a user. This might mean that the commit is a squash of the changes in a review, but that a strong SHA-1-based connection can't be made by Critic because the review hasn't been rebased, or can't be rebased. It could also mean that the changes in the commit were reviewed outside of Critic. Red --- A red commit status either means that Critic hasn't been able to connect the commit to a review at all, or that the review to which the commit has been connected isn't accepted or finished. That the commit hasn't been connected to a review does not conclusively mean the commit hasn't been reviewed, it just means Critic can't say anything for sure about it. Since Critic requires the same SHA-1 to automatically match a review to a commit, simply cherry-picking a commit from a review branch to the checked branch will always make it show up as red, even if the cherry-pick was conflict-free and the commit message is identical. Usage ----- The only way in which one needs to interact with the branch review status page is by manually connecting commits to reviews when Critic can't make the connection automatically, or otherwise explain how a commit was reviewed. To do this, every non-green commit has an <code>[edit]</code> link in the right-most column. Red commits committed by the user looking at the <code>/checkbranch</code> page are highlighted for visibility. The <code>[edit]</code> link opens up a dialog. This dialog has an input field for entering the ID number of a review, and a text area for writing a comment. If Critic finds any reviews that seem like likely candidates to have contained the changes in the commit, the dialog also contains a drop-down containing those candidate reviews. Selecting an item in the drop-down simply sets the review ID input field's value. Entering a comment is optional if a review ID is specified. If no review ID is specified, for instance because the changes were reviewed outside of Critic, a comment is required. The comment should just be a very short explanation for how the changes were reviewed. Review Rebase ------------- If the ID number of a review is entered, Critic will automatically check if it would be possible to rebase the review to contain this single commit. Doing so would make the commit appear green and is always preferable to having it appear yellow, since green means Critic has verified that the commit contains exactly the same changes as the ones that were approved in the review, and thus eliminates the possibility of the wrong changes having been pushed to the branch by mistake. <b>Note:</b> It's important to only accept the offer to rebase the review if the commit is a squash of all changes in the review. If the commit is only part of the review, for instance a cherry-pick of a single bug fix made on a larger task branch, rebasing the review would be a very bad idea. But since the green commit status is a much stronger guarantee that the right changes made it onto the branch, it's strongly recommended that the review is rebased whenever possible, so that the commit appears green. ================================================ FILE: src/tutorials/customization.txt ================================================ System Customization ==================== Critic supports some low-level, and typically very simple, customization of various aspects of its behavior. This customization would be done by the Critic system administrator and is entirely optional; if a Critic system is not customized at all, the default behavior is typically entirely reasonable. The customization mechanism consists of conditional imports of functions from modules in a package named "customization", which by default does not exist. If the system administrator creates such a package in a directory in Critic's <code>PYTHONPATH</code>, Critic will automatically start using whatever customization behaviours it implements. In this Critic install, two Critic-specific directories are included in its <code>PYTHONPATH</code>: <code>%(configuration.paths.CONFIG_DIR)s/</code> and <code>%(configuration.paths.INSTALL_DIR)s/</code>. To initially set up customization, create a sub-directory named "customization" in one of these directories, containing an empty file named <code>__init__.py</code>. Individual customizations (described below) are then done by creating additional <code>.py</code> files in this directory. In all cases, the actual implementation of functions and classes below are only examples meant to indicate how the interfaces could be implemented. In other words, only the function names and argument lists in the examples are in any way "normative." Automatic User Creation ----------------------- This customization affects how users are automatically created when Critic is configured to let the web server do authentication, and the authenticated user does not exist in Critic's user database (in other words, the first time a user logs in.) customization/email.py ---------------------- This module should define this function: | def getUserEmailAddress(username): | return username + "@example.com" The function <code>customization.email.getUserEmailAddress()</code>, if defined, is called to calculate a user's email address from the username. If the function returns None, or is not defined, the user is created with no email address. Either way, the user can change the email address via the /home page when logged in. Hyperlinks in Text ------------------ Critic's web front-end does automatic pattern-based "linkification" of text (such as review descriptions, commit messages and user comments.) By default it handles plain http:// and https:// URLs, <URL:...>, SHA-1 commit references (including BEGIN..END ranges), and r/NNNN review references. This mechanism can be extended with other patterns, for instance linking issue references to an issue tracking system. customization/linktypes.py -------------------------- This module should import the module <code>linkify</code> (which is part of Critic) and then define sub-classes of the class <code>linkify.LinkType</code>, and create one or more instances of each such sub-class (<code>linkify.LinkType</code>'s <code>__init__()</code> registers the link type in a global structure.) Each sub-class should call <code>linkify.LinkType</code>'s <code>__init__()</code> with a single argument that is a string containing a regular expression (with no capture groups) that matches the sub-string that should be made into a link. The sub-class should also implement the method <code>linkify</code> which will be called with two arguments, a string containing the sub-string that should be made into a link and a context object. This method should either return a string containing a URL, or None. If it returns a string, the word is made into a link to that URL. | import linkify | | class IssueLink(linkify.LinkType): | def __init__(self): | super(IssueLink, self).__init__("#[0-9]+") | def linkify(self, word, context): | return "https://issuetracker.example.com/showIssue?id=" + word[1:] | | IssueLink() The context argument can safely be ignored. Notable simple and potentially useful information in it is <code>context.review.id</code>, which is the integer id of the review, if the page loaded is connected to a review, and <code>context.repository.name</code>, which is the string name of the repository containing the review or commit. Both of these sub-objects (<code>context.review</code> and <code>context.repository</code>) can be <code>None</code> if link conversion happens in a context where there is no associated review or repository. Branch Name Patterns -------------------- Critic can handle reviews that are configured to track branches in other repositories (not on Critic.) In such a scenario, a review can be rebased by switching it over to track a new (rebased) branch in the other repository. If branch names in the other repository follow a certain naming pattern, like <code><basename>/<serial-number></code>, Critic can be customized to find and suggest appropriate branch names based on that naming pattern when rebasing such reviews. customization/branches.py ------------------------- This module should define one or both of these functions: | import os | | def getRebasedBranchPattern(branch_name): | return os.path.dirname(branch_name) + "/*" | | def isRebasedBranchCandidate(current_name, other_name): | return os.path.dirname(current_name) == os.path.dirname(other_name) When searching for plausible new branches for a review to track in the other repository, the other repository is scanned using <code>git ls-remote</code>. The <code>getRebasedBranchPattern()</code> function, if defined, is used to calculate a pattern argument to <code>git ls-remote</code>. If it is not defined, or if it returns None, <code>git ls-remote</code> is called to list all branches in the other repository. The <code>isRebasedBranchCandidate()</code> function, if defined, is used to filter the set of branches returned by <code>git ls-remote</code> down further. If neither function is defined, no scanning of the other repository is done, and no branches are suggested to the user. This doesn't leave the user entirely without guidance, however; the input field on the page where the user is asked to input the branch name will auto-complete input based on branches that exist in the other repository (also using <code>git ls-remote</code>.) Custom pre-receive Checks ------------------------- It is entirely possible to install custom Git hooks in Critic's repositories, but doing so may conflict with the hooks that Critic installs and depends on for its interactions with the repositories. Currently, Critic only installs a pre-receive hook, but other hooks may be installed in the future. If custom checks should be applied to ref updates, before Critic handles them, the preferred way is using Critic's customization mechanism. customization/githook.py ------------------------ This module should define an exception class, Reject, and a function, update(): | class Reject(Exception): | pass | | def update(repository_path, ref_name, old_value, new_value): | if ref_name.startswith("refs/heads/reserved/"): | raise Reject("Don't push reserved refs!") The function is called with the arguments 'repository_path', which is the (absolute) file-system path of the Git repository (any one of Critic's repositories on the system); 'ref_name', which is the full name of the reference being updated; 'old_value', which is the current SHA-1 of the reference, or None if the reference is being created; and 'new_value', which is the new SHA-1 of the reference, or None if the reference is being deleted. If a Reject exception (or sub-class thereof) is raised by update(), the update is rejected with an error message constructed by applying str() to the exception object. For a sub-class of Exception created with a single argument, as in the example above, this means that argument is converted to a string and returned. If an exception of any other type is raised, it is ignored, and the update is processed by Critic (and either rejected or allowed, as usual.) If the update() function writes to stdout and the update is not rejected by either the update() function itself or Critic's normal handling of the update, the text written is sent to the Git client updating the reference, and included in its output, with each line prefixed by the string "remote: ". ================================================ FILE: src/tutorials/extensions-api.txt ================================================ Critic Extensions API ===================== Basic Usage ----------- This API is available to extension scripts as the module "critic" on which constructors and interface objects are exposed as properties. The constructor for the User interface is accessible to extension scripts as "critic.User", for instance. Basic Data Types ---------------- CriticError ----------- | exception CriticError : Error { | }; Thrown by other functions when called with incorrect arguments or similar. Native ECMAScript exceptions such as TypeError or ReferenceError may also be thrown. User ---- | interface Filter { | long id; | User user; | Repository repository; | string path; | string type; | User[] delegates; | }; | | interface User { | dictionary UserData { | long id; | string name; | string email; | }; | | constructor User(UserData data); | constructor User(string name); | constructor User(long id); | | readonly attribute long id; | readonly attribute string name; | readonly attribute string email; | readonly attribute string fullname; | | static readonly attribute User current; | | boolean|long|string getPreference(string name); | | Filter[] getFilters(Repository repository); | Filter addFilter(Repository repository, string path, string type, string? delegates); | }; Simple object representing a Critic user. The getPreference method can be used to access the user's preferences; it returns a value of type boolean, long or string depending on the preference. The getFilters method can be used to list the user's global filters for a given repository and the addFilter method can be used to add a new global filter for a user. Note: adding a new filter does not cause changes in existing reviews to be assigned to the user. An extension can fetch user records from the database simply by constructing User objects, supplying either the user name or a user ID number; "new critic.User('jl')" or "new critic.User(18)". The current user (the user who installed the extension and is currently loading a page, pushing changes or whatever triggered the extension) is always accessible as "critic.User.current". File ------------------ | interface File { | readonly attribute long id; | readonly attribute string path; | | string toString(); | long valueOf(); | | static File find(string path); | static File find(long id); | }; Object representing a file. Exist in Critic primarily as a means to assign system-wide ID numbers to paths, and are used throughout the Critic extensions API for representing paths. The toString() methods return the value of the 'path' attribute, and the valueOf() methods return the value of the 'id' attribute. The 'path' attribute of a File object has no leading forward slash (and of course no trailing one either.) The find() methods will return the same object if called multiple times to find the same object (whether via path or ID.) Git Repository Access --------------------- GitObject --------- | interface GitObject { | readonly attribute string sha1; | readonly attribute string type; | readonly attribute Uint8Array data; | }; Raw representation of an object fetched from the Git repository. GitUserTime ----------- | interface GitUserTime { | readonly attribute string fullname; | readonly attribute string email; | readonly attribute Date time; | readonly attribute User? user; | }; Representation of the information in the "author" and "committer" fields in a Git commit object. Repository ---------- | interface Repository { | constructor Repository(string name); | constructor Repository(long id); | | readonly attribute long id; | readonly attribute string name; | readonly attribute string path; | | interface Filter { | readonly attribute User user; | readonly attribute string path; | readonly attribute string type; | readonly attribute User[] delegates; | }; | | readonly attribute Filter[] filters; | | GitObject fetch(string sha1); | string revparse(string ref); | | Commit getCommit(string ref); | Commit getCommit(long id); | | Branch getBranch(string name); | | Changeset getChangeset(string ref); | Changeset getChangeset(Commit commit); | Changeset getChangeset(string a, string b); | Changeset getChangeset(Commit a, Commit b); | Changeset getChangeset(long id); | | RepositoryWorkCopy | getWorkCopy(); | }; Representation of one of Critic's repositories. Can be constructed supplying either the repository's "short name" or its ID. The 'filters' attribute returns an array of all user filters registered for the repository. The array object also has the properties 'users' and 'paths', that returns dictionaries mapping user names and paths to filtered arrays of filters, respectively. The full array and all the filtered arrays combined contain the same set of filters. The 'path' attribute on filter objects define what set of the tree is being filtered. The value of the 'type' attribute on filter objects is one of the strings "reviewer" and "watcher". The fetch() method can be used to read low-level objects from the Git repository. Using this function is usually not necessary. Its argument must be a full 40 character SHA-1 sum. The revparse() method can be used to interpret a Git ref name and convert it to a full 40 character SHA-1 sum. This function runs the command "git rev-parse --verify --quiet <ref>" and will thus fail for any argument that isn't "usable as a single, valid object name" according to Git. The getCommit() method can be used to fetch Commit objects, either specifying a string (which is interpreted using revparse()) or a Critic commit ID. The getBranch() method can be used to fetch Branch objects, either specifying a string (which must be the exact ref name, minus the "refs/heads/" prefix) or a Critic branch ID. The getChangeset() method can be used to fetch an object representing the diff between two commits. The arguments can be one commit, two commits or a Critic changeset ID. Commit arguments can be specified either as strings (which are interpreted using revparse()) or as Commit objects. The getWorkCopy() method can be used to create or access a work-copy clone of this repository. RepositoryWorkCopy ------------------ | interface RepositoryWorkCopy { | readonly attribute Repository repository; | readonly attribute string path; | | string run(string command, ..., Object? environment); | }; Representation of a work-copy clone of a repository. Work-copys are semi-persistent and can be reused by different invokations of an extension, but are never shared between users or between extensions. An extension should not depend on changes made in a work-copy remaining for more than 24 hours, and if possible, shouldn't depend on them to remain between invokations of the extension at all. The "path" attribute is the absolute file system path of the repository, without a trailing slash. The run() method can be used to execute arbitrary git commands in the repository. The first argument should be the name of the git command, such as "status" or "commit". Additional arguments are passed as options to the git command. If the last argument to the function is an object, it is not passed as an regular argument to the git command, instead its enumerable properties are set as environment variables for the git command. Input can be passed to the git command's stdin by setting the special environment variable "stdin" (which is not actually set as an environment variable.) When a work-copy clone is reused, it's always cleaned up by running "git clean -x -d -f -f" and "git reset --hard HEAD" in it. Commit ------ | interface Commit { | readonly attribute Repository repository; | readonly attribute long id; | readonly attribute string sha1; | readonly attribute string tree; | readonly attribute GitUserTime author; | readonly attribute GitUserTime committer; | readonly attribute string message; | readonly attribute string summary; | readonly attribute Commit[] parents; | | CommitFileVersion getFile(string path); | }; Representation of a Git commit. The "tree" attribute contains the SHA-1 sum of the tree object that the commit references. The "summary" attribute contains either the first line of the commit message, or, if that line starts with either "fixup!" or "squash!" and is followed by at least one non-empty line, the first non-empty line following the first line. The getFile() method can be used to access an arbitrary file as it appears in the commit. The file need not have been modified in the commit. FileVersion ----------- | interface FileVersion : File { | readonly Repository repository; | readonly string mode; | readonly long size; | readonly string sha1; | | readonly Uint8Array bytes; | readonly string[]? lines; | }; | | interface CommitFileVersion : FileVersion { | readonly Commit commit; | }; Representation of a version of a certain file in a repository. The "mode" attribute is a string containing 6 digits, usually "100644". The three last digits represent the access bits (R, W and X) for user, group and others, respectively. The "size" attribute is the size of the file in bytes. The "sha1" attribute is the SHA-1 sum of the blob object in the repository. The "bytes" attribute contains the file version's contents as an Uint8Array object. The "lines" attribute contains the file version's contents as an array of lines, or null if the file is heuristically determined to contain binary data (the heuristics are the same as git uses.) The raw contents of the file are interpreted as UTF-8 and if that does not work, as ISO-8859-1. CommitSet --------- | interface CommitSet : Array { | constructor CommitSet(Commit[] commits); | | readonly attribute long length; | readonly attribute Object parents; | readonly attribute Object children; | readonly attribute Commit[] heads; | readonly attribute Commit[] tails; | readonly attribute Commit[] upstreams; | }; Representation of a set of commits. The object is first of all an array of Commit objects, in depth-first first-parent-first order, starting from the head commits of the set in unspecified order. (In sets with multiple heads, parents can occur before their children, so in such sets, the order should not be relied upon to be strictly topological.) For each commit in the set, the object also has a non-enumerable property whose name is the SHA-1 sum of the commit and whose value is the Commit object. The parents and children attributes are objects with one property per commit in the set whose name is the SHA-1 sum of the commit and whose value is an array of Commit objects, containing the parents and children of the commit in the set, respectively. (IOW, if a commit has multiple parents, some included in the set and some not included in the set, this array contains only those parents that are in the set. The commit object's parents attribute returns an array containing the commit's full set of parents.) The heads attribute is an array of Commit objects that contains all commits in the set that has no descendants in the set. The array also has a non-enumerable property per such commit whose name is the SHA-1 sum of the commit and whose value is the Commit object. The tails attribute is an array of Commit objects that contains all commits that are parents of commits included in the commit-set but are not included in the commit-set themselves. The array also has a non-enumerable property per such commit whose name is the SHA-1 sum of the commit and whose value is the Commit object. The upstreams attribute is an array of Commit objects that contains all commits in the tails array that aren't ancestors of another commit in the tails array. If the commit-set contains the commits on a branch, and the branch has been merged with its upstream branch (e.g. 'master') a number of times, the tails array would contain each commit from the upstream branch that was merged in, whereas the upstreams array would only contain one -- the commit that was merged in most recently. Note: A commit-set representing a simple branch of commits without any merges will have a heads array containing a single commit and a tails array containing a single commit. Multiple tails indicate that the commit-set includes merges that merge in commits that are not included in the commit-set. Multiple heads typically indicate that the commit-set consists of multiple disconnected sub-trees of commits, such as the commits that make up a review that has been rebased one or more times. Branch ------ | dictionary BranchData { | long id; | | Repository repository; | string name; | }; | | interface Branch { | constructor Branch(BranchData data); | | readonly attribute Repository repository; | readonly attribute long id; | readonly attribute string name; | readonly attribute Commit head; | readonly attribute Branch? base; | readonly attribute Review? review; | readonly attribute CommitSet commits; | | RepositoryWorkCopy getWorkCopy(); | }; Representation of Critic's view of a branch. This is somewhat different from Git's normal view of a branch in that a branch is considered to be based on (branched from) another branch, and doesn't contain commits it has in common with its base branch. For each repository, however, there is a root branch (typically 'master') that contains the same set of commits in Critic's view of it as in Git's normal view. This root branch has no base branch. The review attribute returns a Review object representing the review associated with this branch. If this branch is a review branch, the associated review is the review whose review branch this is. Otherwise, the associated review is the latest review created from this branch, if any. If there is no associated review, the attribute is null. The commits attribute returns a CommitSet object containing all commits that Critic considers part of the branch. This set's heads array contains a single commit which is the same as the commit returned from the Branch object's head attribute. The set's tails array may contain multiple commits, if the branch contains merges. The getWorkCopy() method can be used to create a work-copy of the repository containing this branch. The work-copy's "origin" remote will be the repository containing this branch. The work-copy's current branch will have the same name as this branch, and will be set up to track this branch in "origin". TrackedBranch ------------- | dictionary FindData { | Branch branch; | | string remote; | string name; | }; | | interface TrackedBranch { | readonly attribute Branch branch; | readonly attribute Review? review; | readonly attribute string remote; | readonly attribute string name; | | readonly attribute boolean disabled; | readonly attribute boolean pending; | readonly attribute boolean updating; | | static TrackedBranch find(FindData data); | }; Representation of a tracked branch. The branch attribute represents the local branch. The review attribute is the review connected with the local branch, if there is one, and null otherwise. The remote and name attributes represent the remote repository and branch name. The disabled attribute indicates whether the tracking is disabled. The pending attribute indicates whether the branch will be updated ASAP. The updating attribute indicates whether the branch is being updated right now. Changesets ---------- Changeset --------- | interface Changeset { | readonly attribute Repository repository; | readonly attribute Review? review; | readonly attribute long id; | readonly attribute Commit parent; | readonly attribute Commit child; | readonly attribute ChangesetFile[] files; | readonly attribute Commit[] commits; | }; Represents the collection of changes to files between two commits; parent and child. The files attribute is an array containing all changed files. MergeChangeset -------------- | interface MergeChangeset { | readonly attribute Repository repository; | readonly attribute Review? review; | readonly attribute Commit commit; | readonly attribute Changeset[] changesets; | }; Represents the relevance-filtered changes in a merge commit. The changesets attribute is an array of Changeset objects, one for each parent of the commit returned by the commit attribute. Note that these changesets do not contain the full set of changes between the merge commit and its parents; they only contain changes that are deemed likely to have caused conflicts. ChangesetFile ------------- | interface ChangesetFile : File { | readonly attribute Changeset changeset; | readonly attribute ChangesetFileVersion? oldVersion; | readonly attribute ChangesetFileVersion? newVersion; | readonly attribute ChangesetChunk[]? chunks; | readonly attribute long deleteCount; | readonly attribute long insertCount; | | readonly attribute boolean? isReviewed; | readonly attribute User? reviewedBy; | }; Represents a file changed in a changeset. The chunks attribute is an array of chunks, each representing a sequence of changed lines. If a file was added or removed in the changeset, oldVersion or newVersion is null, respectively, and the chunks attribute is null. The deleteCount and insertCount attributes are the total number of lines deleted and inserted in the file in the changeset, respectively. If the parent Changeset object (returned by the changeset attribute) was fetched from a Review object (using the Review.getChangeset method), the attribute isReviewed is set to true if these changes in this file have been marked as reviewed, and the reviewedBy attribute is set to the user who (most recently) marked the file as reviewed. If the parent Changeset object was not fetched from a Review object, both properties are null. ChangesetFileVersion -------------------- | interface ChangesetFileVersion : FileVersion { | readonly attribute Changeset changeset; | readonly attribute ChangesetFile file; | }; Represents a single version of a file changed in the changeset. ChangesetChunk -------------- | interface ChangesetChunk { | readonly attribute Changeset changeset; | readonly attribute ChangesetFile file; | readonly attribute long deleteOffset; | readonly attribute long deleteCount; | readonly attribute long insertOffset; | readonly attribute long insertCount; | readonly attribute ChangesetLine[] lines; | }; Represents a continuous set of changed lines. The offset attributes deleteOffset and insertOffset represent the start line (zero-based) of the modification; either the first deleted or first inserted line. If deleteCount or insertCount is zero, the line specified by the corresponding offset is the first line after the changes. A zero deleteCount means lines were only inserted and a zero insertCount means lines were only deleted; both counts will never be zero. The lines attribute is an array of lines from a hypothetical side-by-side diff. It is possible for this array to contain more elements than deleteCount or insertCount. The maximum possible number of elements is deleteCount+insertCount, and the minimum possible number of elements is MAX(deleteCount,insertCount). ChangesetLine ------------- | interface ChangesetLine { | const long TYPE_CONTEXT = 0; | const long TYPE_WHITESPACE = 1; | const long TYPE_REPLACED = 2; | const long TYPE_MODIFIED = 3; | const long TYPE_DELETED = 4; | const long TYPE_INSERTED = 5; | | const long OPERATION_REPLACE = 0; | const long OPERATION_DELETE = 1; | const long OPERATION_INSERT = 2; | | readonly attribute long type; | readonly attribute long oldIndex; | readonly attribute string? oldText; | readonly attribute long newIndex; | readonly attribute string? newText; | readonly attribute Array[]? operations; | }; Representation of a line in a hypothetical side-by-side diff. The type of the line represents the line's role in the diff. Context lines aren't changed at all, whitespace lines have only whitespace changes, replaced lines are lines where there are few similarities between the old and new versions, modified lines are lines where there are enough similarities between the old and new version to assume the line was edited (in which case the operations attribute contains an analysis of how the line was edited), and deleted and inserted lines are lines that are only present in the old or new version, respectively. (Technically, a replaced line is really just a deleted line and an inserted line collapsed together.) The operations attribute returns an array of arrays. Each array represents a modification of the line. The first element of each array is the modification type; replace, delete or insert. In the case of replace, the array contains an additional four elements; deleteStart, deleteEnd, insertStart and insertEnd; and the meaning is that the characters [deleteStart,deleteEnd) in the old version of the line (oldText) were replaced by the characters [insertStart,insertEnd) in the new version of the line (newText). In the case of delete or insert, the array contains an additional two elements; start and end; and the meaning is that the characters [start,end) were deleted from the old version (delete) or inserted into the new version (insert). Reviews ------- Review ------ | interface Review { | constructor Review(long id); | | readonly attribute long id; | readonly attribute string state; | readonly attribute User[] owners; | readonly attribute User? closedBy; | readonly attribute User? droppedBy; | readonly attribute string summary; | readonly attribute string description; | readonly attribute Repository repository; | readonly attribute Branch branch; | readonly attribute CommitSet commits; | readonly attribute User[] users; | readonly attribute Object reviewers; | readonly attribute Object watchers; | readonly attribute CommentChain[] commentChains; | readonly attribute OldBatch[] batches; | readonly attribute ReviewFilter[] filters; | readonly attribute ReviewProgress progress; | readonly attribute TrackedBranch trackedBranch; | | OldBatch getBatch(long id); | | CommentChain getCommentChain(long id); | Comment getComment(long id); | | Changeset|MergeChangeset getChangeset(Commit commit); | | NewBatch startBatch(User acting_user); | | dictionary PrepareRebaseData { | boolean historyRewrite; | boolean singleCommit; | Commit newUpstream; | | string branch; | }; | | void prepareRebase(PrepareRebaseData data); | void cancelRebase(); | }; The prepareRebase() method prepares the review to rebased. This is the same operation as is performed using the "Prepare Rebase" button on the review front-page. Exactly one of data.historyRewrite, data.singleCommit and data.newUpstream needs to be set (true / non-null) to indicate the type of rebase. A history rewrite rebase is only allowed to change the commit history, not the tree. A move rebase, indicated by data.singleCommit or data.newUpstream, is allowed to change the tree as well. If data.singleCommit is true, the new upstream is implicitly the parent commit of the new head of the branch, otherwise data.newUpstream must be set accordingly. (See the tutorial on review rebasing for more details.) The cancelRebase() method cancels a previous prepared (but not yet performed) rebase. While a rebase is prepared, no-one but the user who prepared the rebase is allowed to push to the review branch, and no other user can prepare a rebase of the review. Thus, if a rebase is prepared but won't be performed, it should be cancelled instead. ReviewFilter ------------ | interface ReviewFilter { | readonly attribute Review review; | readonly attribute string type; | readonly attribute User user; | readonly attribute string path; | readonly attribute User creator; | }; Representation of a review filter. The value of the type attribute will be either "reviewer" or "watcher". ReviewProgress -------------- | interface ReviewProgress { | readonly attribute boolean accepted; | readonly attribute boolean finished; | readonly attribute boolean dropped; | readonly attribute long pendingLines; | readonly attribute long reviewedLines; | readonly attribute long openIssues; | | string toString(); | }; CommentChain ------------ | interface CommentChain { | const long TYPE_ISSUE = 0; | const long TYPE_NOTE = 1; | | const long STATE_OPEN = 0; | const long STATE_RESOLVED = 0; | const long STATE_ADDRESSED = 0; | | readonly attribute Review review; | readonly attribute Batch batch; | readonly attribute long id; | readonly attribute User user; | readonly attribute User? closedBy; | readonly attribute long type; | readonly attribute long state; | readonly attribute Comment[] comments; | | Comment getComment(long id); | }; FileCommentChain ---------------- | interface FileCommentChain : CommentChain { | const long ORIGIN_OLD = 0; | const long ORIGIN_NEW = 1; | | readonly attribute Changeset changeset; | readonly attribute Commit? addressedBy; | readonly attribute ChangesetFile file; | readonly attribute long origin; | readonly attribute Object lines; | readonly attribute string context; | readonly attribute string minimizedContext; | }; CommitCommentChain ------------------ | interface CommitCommentChain : CommentChain { | readonly attribute Commit commit; | readonly attribute long firstLine; | readonly attribute long lastLine; | }; Comment ------- | interface Comment { | readonly attribute CommentChain chain; | readonly attribute Batch batch; | readonly attribute long id; | readonly attribute User user; | readonly attribute Date time; | readonly attribute string text; | }; Batch ----- | interface Batch { | readonly attribute Review review; | readonly attribute User user; | }; OldBatch -------- | interface OldBatch : Batch { | readonly attribute long id; | readonly attribute Date time; | readonly attribute CommentChain? commentChain; | readonly attribute CommentChain[] issues; | readonly attribute CommentChain[] notes; | readonly attribute Comment[] replies; | }; Object representing a batch of changes submitted earlier. NewBatch -------- | interface NewBatch : Batch { | readonly attribute long? id; | | dictionary Location { | long lineIndex; | long lineCount; | }; | dictionary FileLocation : Location { | ChangesetFileVersion fileVersion; | }; | dictionary CommitLocation : Location { | Commit commit; | }; | | void raiseIssue(string text, FileLocation data); | void raiseIssue(string text, CommitLocation data); | void raiseIssue(string text); | | void writeNote(string text, FileLocation data); | void writeNote(string text, CommitLocation data); | void writeNote(string text); | | void addReply(CommentChain chain, string text); | void resolveIssue(CommentChain chain); | void markIssueAddressedBy(CommentChain chain, Commit commit); | | void assignChanges(User assignee, Changeset changeset); | void assignChanges(User assignee, ChangesetFile changeset); | | void unassignChanges(User assignee, Changeset changeset); | void unassignChanges(User assignee, ChangesetFile changeset); | | void addReviewFilter(User user, string type, string path); | void removeReviewFilter(User user, string type, string path); | | dictionary FinishData { | string text; | boolean silent; | }; | | void finish(FinishData? data); | }; Object used to collect changes to submit to a review, and then submit them. Each batch can only manipulate one review on behalf of one user, and is started using the method Review.startBatch(). Until finish() is called, nothing is added to the database at all. When finish() is called, everything is added to the database, and mails are sent to relevant users. ReviewSet --------- | interface ReviewSet { | }; The ReviewSet interface is a small utility interface used by the Dashboard interface to represent a set of reviews. For each review in the set, the ReviewSet object has one enumerable property whose name is the review ID and whose value is the Review object. Dashboard --------- | interface Dashboard { | readonly attribute User user; | | interface Owned { | readonly attribute Review[] finished; | readonly attribute Review[] accepted; | readonly attribute Review[] pending; | readonly attribute Review[] dropped; | }; | | interface Active : Review[] { | readonly attribute ReviewSet hasPendingChanges; | readonly attribute ReviewSet hasUnreadComments; | readonly attribute ReviewSet isReviewer; | readonly attribute ReviewSet isWatcher; | }; | | interface Inactive : Review[] { | readonly attribute ReviewSet isReviewer; | readonly attribute ReviewSet isWatcher; | }; | | readonly attribute Owned owned; | readonly attribute Active active; | readonly attribute Inactive inactive; | }; The Dashboard interface exposes various sets of reviews with which the user is associated, roughly corresponding to those displayed on the built-in dashboard page. The owned attribute returns a sub-object containing four arrays of reviews owned by the user, one array per review state: finished, accepted, pending and dropped. These arrays are non-overlapping. (The reviews in the finished and dropped arrays are not displayed on the built-in dashboard page.) The active attribute returns an array of open (accepted or pending) reviews that are "active," which actually rather means the user ought to be active; that is, in which there is work for the user to do. This array can contain reviews owned by the user. The array also has four additional attributes that return ReviewSet objects containing sub-sets of the array. The hasPendingChanges set contains reviews in which there are unreviewed changes assigned to the user. The hasUnreadComments set contains reviews in which the user has unread comments. These sets can overlap. The isReviewer and isWatcher sets contain reviews in which the user is a reviewer (has changes assigned to him, already reviewed or not) and those in which the user is not a reviewer. These sets don't overlap, and together they contain all reviews in the array. The inactive attribute returns an array of all open reviews with which the user is associated that were not included in the array returned by the active attribute. The isReviewer and isWatcher attributes work the same way as the corresponding attributes on the array return by the active attribute. Filters ------- | interface Filters { | dictionary FiltersInit { | Repository repository; | Review review; | User user; | }; | | constructor Filters(FiltersInit data); | | readonly attribute Repository repository; | readonly attribute Review? review; | | boolean isReviewer(User user, File file); | boolean isWatcher(User user, File file); | boolean isRelevant(User user, File file); | | Object listUsers(File file); | }; The Filters interface provides easy access to Critic's filtering system, applying global filters, inherited global filters and review filters. If FiltersInit.review is set, FiltersInit.repository is ignored (the repository is inferred from the review instead.) If FiltersInit.review is not set, FiltersInit.repository must be set. If FiltersInit.user is set, only filters relating to that user are loaded. This is an optimization for the case that only a single user's filters is of interest. The isReviewer() and isWatcher() functions return true if the provided user is a reviewer or watcher, respectively, of the provided file. The isRelevant() function returns true if the provided user is either a reviewer or a watcher of the provided file. The listUsers() function returns the set of users that either review or watch the provided file. The returned object has one enumerable property per user in the set whose name is the user ID and whose value is a User object. Per-user Data Storage --------------------- Storage ------- | interface Storage { | constructor Storage(User user); | | string get(string key); | void set(string key, string value); | }; The Storage interface provides a simple key/value storage. The length of the key is limited to 64 characters, the value can be an arbitrarily long string. To store more complex data, the builtin JSON encoder (JSON.stringify()) and JSON decoder (JSON.parse()) can be used. A storage object accessing data for the current extension and current user is available as "critic.storage". Storage objects accessing data for the current extension and an arbitrary user can be constructed using "new critic.Storage(user)". Per-user Log ------------ Log --- | interface Log { | constructor Log(User user); | | dictionary WriteData { | string category; | }; | | void write(...[, WriteData data]); | | dictionary SearchData { | Date|string timeStart; | Date|string timeEnd; | string category; | }; | | interface Entry { | User user; | Date time; | string category; | string text; | }; | | Entry[] fetch(SearchData data); | void remove(SearchData data); | }; The Log interface provides a simple per-extension and per-user logging facility. Each log entry consists of a timestamp, a category (an arbitrary string of no more than 64 characters) and the text (an arbitrarily long string.) Entries are added to the log using the write() method. If the last parameter to it is an object, it is used as a WriteData dictionary instance, and does not contribute to the text logged. The dictionary can be used to set the category of the log entry. If not set, the category defaults to "default". The rest of the arguments are passed to the built-in format() function. Entries from the log can be fetched using the fetch() method and removed using the remove() function. Both functions take an optional SearchData dictionary argument, which can be used to select specific entries. If the timeStart member is specified, only entries written since the specified point in time are selected. If the timeEnd member is specified, only entries written before the specified point in time are selected. Both members can be either a Date object for an absolute time or a string on the form "N unit(s)", such as "1 month" or "5 hours", for a relative time. If the category member is specified, only entries with that category are selected. A log object logging for the current extension and current user is available as "critic.log". Log objects logging for the current extension and an arbitrary user can be constructed using "new critic.Log(user)". Statistics ---------- Statistics ---------- | interface Statistics { | constructor Statistics(); | | void setReview(Review review); | | void setInterval(Date start[, Date end]); | void setInterval(string start[, string end]); | | void setUser(User user); | | void addDirectory(string path); | void addFile(string path); | | Object getReviewedLines(); | Object getWrittenComments(); | }; The Statistics interface allows extraction of simply statistics data. The database queries issued by this interface can take a long time to complete, especially when not filtering per review or user, and if the time interval is large. It may take 30 seconds or more for getReviewedLines() to return. The setReview() method restricts the query to a single review. If no review is set, the query applies to all reviews (in all repositories.) The setInterval() method restricts the query to a certain period of time. Start and end points are specified as Date objects, or as strings on the form "N unit(s)", such as "1 month", "2 weeks" or "12 hours". Supported units are seconds, minutes, hours, days, weeks, months and years. The end point is optional; if not set, it defaults to the current time. The setUser() method restricts the query to a certain user. The addDirectory() method restricts the query to files under one or more directories. The getReviewedLines() method executes a query counting the number of lines marked as reviewed by each user. The returned object has one property per user in the query result whose name is the user ID and whose value is an object with the properties deleteCount and insertCount. A simple enumeration of the query results could look something like | var data = statistics.getReviewedLines(); | for (var user_id in data) | { | var user = new critic.User({ id: user_id }); | var lines = data[user_id]; | writeln("%%s has reviewed %%d lines", user.fullname, lines.deleteCount + lines.insertCount); | } Note: the user_id variable in this code contains the user ID, which is really an integer, but since it comes from a property enumeration, the variable's type is string. Calling critic.User(user_id) directly would not work, since it will interpret its argument as a user name when it's a string, hence the variant critic.User({ id: user_id }). Using parseInt(user_id) to convert the user ID to a number would have worked too, of course. The getWrittenComments() method executes a query counting the number of comments written by each user. It's similar to getReviewedLines(), except the per-user data objects have the properties raisedIssues, writtenNotes, totalComments and totalCharacters. The totalComments property is the sum of raisedIssues and writtenNotes, plus the number of replies written. The totalCharacters property is the sum of the lengths of each comment counted by totalComments. HTML Utilities -------------- Utilities for generating pages that look like other Critic pages are available in a sub-module accessible as "critic.html", for instancce "critic.html.writeStandardHeader()" and "new critic.html.PaleYellowTable()". writeStandardHeader() --------------------- | dictionary HeaderData { | User user; | Array stylesheets; | Array scripts; | Object links; | Review review; | }; | | void writeStandardHeader(string title, HeaderData? data); Writes a document header. This will write a DOCTYPE, start the HTML element, write a HEAD element with TITLE, default external stylesheets and scripts, additional external stylesheets and scripts specified in the data argument, and then start the BODY element and write a TABLE element containing the visible page header (the Critic logo and navigation links.) Additional content written after the call to this function will be parsed into the BODY element. The data.user attribute controls which user's preferences and other state (such as unread news items) is used to render the page header. If not specified, it defaults to the user loading the page being generated. The data.stylesheets and data.scripts arrays should contain URLs as strings. The data.links object is enumerated and one link is added for every enumerable property. If the name of the property starts with "rel=", a LINK element is added to the HEAD element, otherwise a regular link whose title is the name of the property is added to the page header (next to the "Home", "Dashboard", "Branches" et c. links). The value of the property is used as the link's HREF in both cases. The data.review object, if set, adds a standard "Back To Review" link, and adds draft changes summary and Submit/Preview/Abort buttons to the right side of the header, if the user has any unsubmitted changes in the review. A call to writeStandardHeader() might look something like | critic.html.writeStandardHeader("Page Title", { | links: { "rel=up": "/dashboard", | "BTS": "https://bugs.opera.com/" } | stylesheets: ["/extension-resource/HelloWorld/custom.css"], | scripts: ["/extension-resource/HelloWorld/custom.js"] | }); writeStandardFooter() --------------------- | dictionary FooterData { | User user; | }; | | void writeStandardFooter(FooterData? data); Writes a document footer that adds a little bit of content and closes the BODY and HTML elements. escape() -------- | string escape(string text); Replaces all characters in the argument string that have special meaning in HTML with HTML entity references, so that it can safely be written as text or in an attribute value. (This is a trivial conversion, of course, and is exposed merely as convenience.) PaleYellowTable --------------- | interface PaleYellowTable { | constructor PaleYellowTable(string title); | | void addHeading(string heading); | | dictionary StandardItem { | string name; | string value; | string description; | Object buttons; | }; | dictionary ButtonsItem { | Object buttons; | }; | dictionary CustomItem { | string html; | }; | | void addItem(StandardItem item); | void addItem(ButtonsItem item); | void addItem(CustomItem item); | | void write(); | }; The PaleYellowTable interface can be used to generate typical Critic "pale yellow tables." The title argument to the constructor is the main title of the table. Additional headings can be added using the addHeading() methods. Regular content in the table is added using the addItem() method. When the addItem() method is called with an argument matching the StandardItem dictionary, a fixed-form item is added, which produces a result similar to the first table on a review front-page, containing basic information about the review. The buttons attribute in the dictionary is optional; if present, the object's properties are enumerated and a button is added for each, whose title is the name of the property, and whose onclick attribute value is the value of the property. All strings except the value (name, description, and button title and onclick handler) will have characters with special meaning in HTML replaced by entity references, and thus can't contain HTML styling. The value string is output as-is, and should be escaped using critic.html.escape() (or similar) if its content is unknown and shouldn't be interpreted as HTML. When the addItem() method is called with an argument matching the ButtonsItem dictionary, a row of buttons is added. The object is handled the same way as StandardItem.buttons described above. When the addItem() method is called with an argument matching the CustomItem dictionary, the value of the html attribute is output as-is (without escaping) inside a full-width table cell. The write() method writes all HTML for the whole table to stdout using the built-in (global) write() function. This means several PaleYellowTable objects can be constructed and populated in parallel, and then all written (in some order) at the end. ================================================ FILE: src/tutorials/extensions.txt ================================================ Critic Extensions ================= Creating an Extension --------------------- To create a Critic extension, you log into the server running Critic using SSH, and create a directory named <code>CriticExtensions</code> in your home directory. Under this directory, you create a sub-directory with the same name as your extension. Under this directory, you create a file named <code>MANIFEST</code>. You will also need to make sure the directory and all files under it are accessible to the system group <code>%(configuration.base.SYSTEM_GROUP_NAME)s</code>, for instance by running these commands: | chmod a+x $HOME | chmod --recursive a+rX $HOME/CriticExtensions Once the <code>MANIFEST</code> file exists, Critic will consider this extension to exist, and it will show up on the extension management page. Naturally, the contents of the <code>MANIFEST</code> file must follow certain rules, and if it doesn't, the extension management page will simply state that the extension has a broken manifest. Extensions with broken manifests are only displayed to the author. Hint: The red text that says the extension has a broken manifest is a link to a page that shows what's wrong with the manifest. Critic extensions are written in ECMAScript, and executed by a standalone ECMAScript interpreter. An ECMAScript API for accessing much of the data in Critic's database, as well as for performing operations such as raising issues and assigning changes to reviewers, is available to the extension script. See the [extensions API tutorial][api-tutorial] for details on the API. [api-tutorial]: /tutorial?item=extensions-api Installing an Extension ----------------------- Extensions won't run just because they are created; they need to be installed to become active. To install an extension, a user simply goes to the [extension management page][manageextensions], selects a version of the extension to install, and clicks the "Install" button. Alternatively, a system administrator can install an extension "universally", which activates the extension for all users. For an extension to have one or more "official" versions, the extension's directory should be a Git repository, and this repository should have one or more branches named "version/*". For instance, to have a version named "stable", the repository should contain a branch named "version/stable". When a user installs an official version of an extension, the commit currently referenced by the version's branch is checked out into a directory outside the author's home directory, and the extension is run from there instead. If the branch changes, the user can update his installation of the extension via the [extension management page][manageextensions]. (This may be automated in the future.) In addition to official versions, every extension can be installed in "live" mode, which means that the extension runs directly from the extension's directory in the author's home directory. This is typically the most convenient mode for the author of the extension to use while developing. Regular users would typically be better off installing an official version, to avoid temporary breakage while the author is developing the extension. If the extension's directory isn't a Git repository, or if the repository has no branches named "version/*", only the "live" mode is available. [manageextensions]: /manageextensions The MANIFEST ------------ The format of the MANIFEST file is similar to that of a traditional INI file. Sections are started by "[heading]" lines and key-value pairs are specified by "key=value" lines. In the MANIFEST file, the values are interpreted either as plain strings (no escapes, no line-breaks) or, if the value starts and ends with double quotes, as a JSON string literal (note: not a string containing an arbitrary JSON-formatted value.) The beginning of the file is an implicit top-level section (without header.) This section must specify two keys: "Author" and "Description". "Author" can be specified multiple times to specify multiple authors. Additionally, the key "Hidden", with the value "true" or "yes", can be used to hide the extension from other users; useful for extensions that are too unfinished to be of much interest to anyone but the author. Other sections in the MANIFEST file defines "roles" in which the extension executes. At least one role must be defined for the MANIFEST file to be considered valid. (If no roles are defined, the extension won't do anything, so that'd be rather pointless.) The manifest for a simple extension could look something like | Author = "Jens Lindstr\u00f6m" | Description = A simple extension. | | [Page simplified/r/*] | Description = Simplified review front-page: http://critic.example.com/simplified/r/1234 | Script = main.js | Function = pageReviewFrontPage | | [ProcessCommits] | Description = Automatically process commits. | Script = main.js | Function = processCommits Roles ----- Currently there are four supported roles: Page, Inject, ProcessCommits and FilterHook. Each role is defined with a separate section. Every such section must specify three keys, "Description", "Script" and "Function". The "Description" should simply describe what the extension does in this role; the "Script" is a path specifying the .js file to load (the path is relative the extension's directory) and "Function" is the name of the script function to call. Page ---- A "Page" role simply lets the extension handle URLs loaded from Critic. It can either define new URLs, or override existing ones. (Hint: If an extension overrides a built-in URL, and you want to access the built-in one, add "!/" to the beginning of the URL path to disable all extension URLs.) The section header for a page role is "[Page <glob>]" where <glob> is matched against the path of the URL. The path has no leading slash, and does not contain the query path. For instance, if the full URL is "http://host/a/b?d=e", then the string that the glob should match is "a/b". The script function specified for the role will be called with three arguments: 1 A string containing the HTTP method ("GET" or "POST",) 2 a string containing the path (what the glob was matched against) and 3 an object representing the query part of the URL. The query object is null if no query part was present in the URL. Otherwise, it has two properties: "raw", whose value is the query part as a string, completely uninterpreted (without a leading question mark,) and "params", whose value is an object with one property for every query parameter, whose values are the decoded values the parameters, or null if the parameter did not include a "=value" part. Example: the URL "http://host/path?foo=10&bar=hi%%20ho&damer" would produce the query object | { | raw: "foo=10&bar=hi%%20ho&damer", | params: { | foo: "10", | bar: "hi ho", | damer: null | } | } The script function generates the URL response by writing to stdout, using the built-in functions "write" and/or "writeln". The first line of output must contain only an HTTP response code as a decimal number, typically "200". The following lines define HTTP response headers, for instance "Content-Type: text/html". (The content type defaults to "text/plain", and "; charset=utf-8" is appended to it automatically if no charset is specified.) The response header list is terminated by writing an empty line. Everything after the first empty line is forwarded as-is to the client. If the script function returns without writing a single byte of output, the behavior is as if the page role hadn't existed in the first place. For a custom URL, this means the request fails, since the URL isn't handled. If the page role was invoked to override a built-in page, the built-in page is rendered instead, thus allowing a page role to conditionally override a built-in page. Additional static resources, such as images and external stylesheets or scripts, that are put in a sub-directory named "resources" are automatically available via the URL "/extension-resource/<extension>/<path>" (for users that have installed the extension.) For instance, the URL %(configuration.URL)s/extension-resource/HelloWorld/hello.txt would return the file /home/<author>/CriticExtensions/HelloWorld/resources/hello.txt for users that have installed the extension named HelloWorld. Inject ------ An "Inject" role is similar to a "Page" role in form, but instead of handling the URL completely, it can issue simple commands to inject some content onto built-in pages. The section header for an inject role is "[Inject <glob>]" where <glob> is matched against the path of the URL, exactly like for a page role. If the URL is not handled at all by Critic, or if it is one that doesn't produce an HTML document, the role will not be invoked. The script function specified for the role will be called with two arguments: 1 A string containing the path (what the glob was matched against) and 2 an object representing the query part of the URL. These arguments are the same as the arguments to the script function specified for a page role, except that the method argument is missing. It's missing because the role can only be invoked on URLs loaded using the GET method, and thus wouldn't be very interesting. The script function injects content by writing lines to its standard output using the write() or writeln() functions. Each line represents one injection. Each line must be on the format "<command> <JSON>". The command is a simple keyword specifying the type of injection. The parameters to the command is encoded as JSON and is different to different commands. Three commands are currently supported: ? script = Injects <script src="url"></script> into the page's HEAD element. It will be injected after all other scripts on the page. The JSON encoded value should be a string containing the URL. ? stylesheet = Injects <link type="text/css" href="url"> into the page's HEAD element. It will be injected after all other stylesheets on the page. The JSON encoded value should be a string containing the URL. ? link = Controls the set of links in the page header ("Home", "Dashboard", "Branches" et c.) The JSON encoded value should be an array containing two elements, the title of the link, and the URL it points to or null. If the title matches one of the built-in links (or, technically, one added by another extension whose inject role was invoked first) the existing link's URL is changed, or the existing link is removed, if the URL was specified as null. Otherwise, if the URL was not specified as null, a new link is added to the set of links. For example, to inject a custom script and a custom stylesheet, remove the built-in "Branches" link and add a link to http://www.opera.com/, the script function could write something like this: | script "/extension-resource/HelloWorld/custom.js" | stylesheet "/extension-resource/HelloWorld/custom.css" | link ["Branches", null] | link ["Opera.com", "http://www.opera.com/"] Hint: If an inject role fails to execute when loading a page, an error message is inserted into the loaded page as an HTML comment. So if your injection doesn't seem to happen, check if there are any HTML comments mentioning your extension! ProcessCommits -------------- A "ProcessCommits" role lets the extension process commits immediately when they are added to a review. The extension can for instance do pattern-based detection of problems, and raise issues about them automatically. A "ProcessCommits" role is invoked when a user that has installed the extension creates a review (via push or the web interface) or pushes additional commits to the review branch. If the extension is universally installed, the role is invoked whenever any user creates a review or pushes additional commits to a review branch. A "ProcessCommits" role has no addition parameters in the MANIFEST; only the "Script" and "Function" keys are necessary (and, of course, the "Description".) The script function specified for the role will be called with three arguments: 1 A [Review object][review] representing the review being modified, 2 a [Changeset object][changeset] representing the collected changes being added to the review and 3 a [CommitSet object][commitset] containing the added commits. The changeset argument is null if it isn't possible to describe the added changes as a single diff. This happens when the added commits include a merge with a different branch, in which case a simple diff would include all the merged in changes, even though those changes aren't added to the review. If the script function writes to its standard output, the written text will be shown to the user, either as output from the "git push" command, or in a dialog in the web interface. FilterHook ---------- A "FilterHook" role lets the extension define an additional filter type, that a user can select in filter type drop-down in the "Add Filter" dialog on their ["Home" page][home] instead of the built-in types (<code>Reviewer</code>, <code>Watcher</code> and <code>Ignored</code>) when they have installed the extension. When a user has such a filter, and a set of commits is added to a review including at least one commit that touches a file matched by the filter, the role is invoked. Note: When a review is first created, the initial set of commits to be reviewed are considered to be added to the review as part of creation, which also triggers the filter, if it matches any touched files. The section header for a filter hook role is <code>[FilterHook <name>]</code>, where <code><name></code> should be a simple identifier of the filter, unique to the extension. The name should contain only ASCII letter and digits, and underscores. The section can contain two additional keys: "Title" and "DataDescription". The "Title" is what is displayed in the filter type drop-down in the "Add Filter" dialog. If not present, it defaults to the role's name. The "DataDescription" key, if present, enables an extra input field in the "Add Filter" dialog when the filter is selected in the filter type drop-down, where the user can input an arbitrary string. The "DataDescription" should, as the name suggests, describe to the user what to input, and is displayed above the extra input field in the "Add Filter" dialog. The script function specified for the role will be called with five arguments: 1 A string containing the data that the user input in the extra data input field in the "Add Filter" dialog, or <code>null</code> if there was no "DataDescription" key in the role's section in the MANIFEST file. 2 A [Review object][review] representing the review being modified, 3 A [User object][user] representing the user who added the commits to the review (or created the review.) 4 A [CommitSet object][commitset] representing all the commits added. 5 An array of [File objects][file] representing the files touched by the added commits that actually matched the filter. Note: The current user (as returned by <code>critic.User.current</code>) when the script function is called is the user whose filter was triggered, not the user that added commits, or the user that authered/hosts the extension. The script function is called asynchronously, and can, unlike a "ProcessCommits" role, not generate output that end up output by the "git push" command that added commits to the review. If it wishes to produce output, it can either create issues or notes in the review, or send custom emails using the <code>critic.MailTransaction</code> API. [home]: /home [review]: /tutorial?item=extensions-api#review [user]: /tutorial?item=extensions-api#user [changeset]: /tutorial?item=extensions-api#changeset [commitset]: /tutorial?item=extensions-api#commitset [file]: /tutorial?item=extensions-api#file ================================================ FILE: src/tutorials/external-authentication.txt ================================================ External Authentication ======================= Critic supports letting an external system authenticate users using the [OAuth 2.0 protocol][oauth2]. This is convenient if the target audience of the Critic system can be assumed to already have accounts in the external system, as they would not need to set and remember a separate password in the Critic system. Basic operation --------------- An external authentication provider can be used in two different ways: * It can be supported as an alternative alongside regular password-based authentication, in which case Critic's "Sign in" page will contain a link to the external authentication provider below the username/password fields. * It can also be used as the only way of authentication users, in which case the "Sign in" link will redirect directly to the external authentication provider. The first mode is achieved by simply enabling one or more external authentication providers. The second mode is achieved by setting the <code>AUTHENTICATION_MODE</code> variable in <code>%(configuration.paths.CONFIG_DIR)s/configuration/base.py</code> to the name of the external provider (<code>"github"</code> or <code>"google"</code>). Note that in the second mode, access to Git repositories over HTTP/HTTPS will not be supported, since Git only supports authentication using HTTP authentication (using the "Authorization" request header.) Also, if anonymous access is disabled, all unauthenticated accesses to the Critic system redirects immediately to the external authentication provider, which may be confusing unless the user is prepared for it. [oauth2]: http://oauth.net/2/ Supported authentication providers ---------------------------------- Two providers of OAuth 2.0 authentication are supported by Critic out of the box: GitHub and Google. GitHub ------ To use GitHub for authentication, an OAuth application for the Critic instance needs to be created in the [GitHub account settings UI][github]. The application creation form will ask for some information about the application. The "Authorization callback URL" value requires particular attention: it must be set to <code>%(configuration.URL)s/oauth/github</code>. If you're reading this tutorial on a different Critic system than the one being configured, this URL need to be adjusted accordingly, of course. After creating the application, copy the generated "Client ID" and "Client Secret" into the <code>PROVIDERS["github"]</code> dictionary in <code>%(configuration.paths.CONFIG_DIR)s/configuration/auth.py</code>, update the rest of the settings there appropriately, and then restart Critic by running (as root) | criticctl restart [github]: https://github.com/settings/applications/new Google ------ To use Google for authentication, a project needs to be created at the [Google developers console][google]. Once created, an OAuth client ID is created in the project's management UI: "APIs & auth" → "Credentials" → "Create new client ID". The "Authorized Javascript origins" and "Authorized redirect URI" values require particular attention: The former must be set to <code>%(configuration.URL)s</code> and the latter must be set to <code>%(configuration.URL)s/oauth/github</code>. If you're reading this tutorial on a different Critic system than the one being configured, there URLs need to be adjusted accordingly, of course. After creating the project and OAuth client ID, copy the generated "Client ID" and "Client Secret" into the <code>PROVIDERS["google"]</code> dictionary in <code>%(configuration.paths.CONFIG_DIR)s/configuration/auth.py</code>, update the rest of the settings there appropriately, and then restart Critic by running (as root) | criticctl restart [google]: https://cloud.google.com/console/project ================================================ FILE: src/tutorials/filters.txt ================================================ Filters ======= Introduction ------------ "Filters" is Critic's mechanism for automatically assigning reviewers for changes when reviews are created or updated, and for allowing users to be notified about code changes without being assigned to review them. Filter Scope ------------ There are two filter scopes: repository and review. Repository ---------- Repository filters are added by each user on the user's <a href="/home">Home</a> page. Such filters apply to all reviews created in their repository, except when the user who creates the review explicitly requests that no filters should be applied. Review ------ Review filters are specific to a single review, and can be added either by the user whom the filter applies to, the review owner, or any other user. They can be added when the review is created, or can be added, or removed, at any later time. A review filter is always given priority over a repository filter, if the two conflict in any way. Filter Type ----------- There are three types of filters: * A <b>Reviewer</b> filter automatically assigns the user as a reviewer of all changes in files selected by the filter, unless the user is the author of the commit that makes the changes. * A <b>Watcher</b> filter will automatically "CC" the user on any review that changes any of the files selected by the filter, but without assigning the user as a reviewer. * An <b>Ignored</b> filter does nothing in itself, but can be used to override a Reviewer or Watcher filter for a sub-set of the selected files or in a single review, and thus ignore some changes that the user is not interested in. Delegates --------- A <b>Reviewer</b> filter can optionally define a list of "delegates." The delegates are users who should be assigned to review changes authored by the user that has the Reviewer filter, in his stead. File Selection -------------- Each filter applies to a set of files in the repository. This set of files is defined by a single path. If the path does not end with a path separator it names a file, and only that file is selected by the filter. If the path ends with a path separator it names a directory, and selects all files in that directory and any sub-directory of it. If the path is "/", all files in the whole repository are selected. Wildcards --------- The path can optionally contain wildcards to name multiple files or directories. The basic functionality is the same; if a wildcard path matches the path of a file, that file is selected, and if it matches the path of a directory, all files in that directory and in any sub-directory of it are selected. Three wildcards are supported: <code>?</code>, <code>*</code> and <code>**</code>. The <code>?</code> wildcard matches any character except the path separator, <code>/</code>. The <code>*</code> wildcard matches zero or more of any character except the path separator. The <code>**</code> wildcard matches zero or more complete path components, and can only occur at the beginning of the path or (alone) between two path separators. Filter Ordering --------------- It's entirely possible for a user to have multiple filters that select overlapping sets of files. When this happens, only one filter per user will be applied for each file, and that filter alone will define the type (Reviewer, Watcher or Ignored,) and the set of delegates. To define which filter among a set of matching filters to apply for a file, the filters are ordered according to the following rules, in order of priority: * A review filter wins over a repository filter. * A filter whose path does not end with a path separator (single file) wins over a filter whose path does (sub-tree). * A filter whose path contains more path separators wins over a filter whose path contains fewer path separators (even if the latter filter contains a wildcard that causes it to match a deeper path in practice.) * A filter whose path contains fewer wildcards wins over a filter whose path contains more wildcards. * Finally, if all other rules fail to differentiate between two filters, they are ordered lexicographically by path. ================================================ FILE: src/tutorials/rebasing.txt ================================================ Rebasing a Review ================= Rebase Types ------------ Critic's review rebase functionality handles two limited types of rebase: 1 In-place rebase 2 Move rebase The first type is typically used to clean-up or rewrite the history of the branch, without changing the branch's upstream commit (the parent commit of the first commit in the review.) Typically, such a rebase doesn't change the state of the source tree as it is at the head of the branch; it only changes how it came to be that way. Critic in fact requires that this type of rebase doesn't change the tree. The second type is typically used to update to a newer version of the upstream branch. Because the source tree onto which the review's changes are applied changes with this type of review, it can typically not produce an identical tree. Critic doesn't require that for this type of rebase, instead it only requires the new upstream commit to be a descendant of the old upstream commit, and that you tell it in advance which new upstream commit you're rebasing onto. Preparing to Rebase ------------------- To perform either kind of rebase, you must first visit the review's front-page and press the button labelled "Prepare Rebase" that is placed at the bottom of the table titled "Commits." Without being told in advance that you intend to rebase a review, Critic will reject all non-fast-forward updates. Pressing the button opens a dialog that asks you to select which type of rebase you're planning. Once the process of preparing the review for a rebase is completed, only a valid rebase push is accepted, and only by you. If other users try to push to the review branch, their pushes will be rejected, even if they are plain fast-forward updates, or correct rebases. If you change your mind, there's a button labelled "Cancel Rebase" on the review front-page where the "Prepare Rebase" button used to be. In-place Rebase --------------- By selecting the "History Rewrite / In-place" alternative in the dialog and pressing the "Continue" button, the process of preparing for an in-place rebase is complete, and a dialog telling you that will be displayed. You can now go ahead and push the rebased branch using <code>git push -f critic HEAD:r/<name></code> (the "-f" argument is required since Git otherwise rejects all non-fast-forward updates.) With this type of rebase you can add new changes to the review as well when pushing the rebased branch, as long as those new changes are in separate commits. The push is accepted as long as there is some commit on the rebased branch that references the exact same tree as the current head of the review branch. Move Rebase ----------- By selecting the "New Upstream / Move" alternative in the dialog and pressing the "Continue" button, a new dialog is displayed asking you to specify which upstream commit you intend to rebase the branch onto. If you rebase using a command like <code>git rebase --onto <newbase> <upstream></code> then the upstream commit you should specify is whatever "<newbase>" resolves to. If you rebase using a command like <code>git rebase <upstream></code> then the upstream commit you should specify is whatever "<upstream>" resolves to. Critic lets you specify the upstream commit either as a full 40-character SHA-1 sum or by selecting a tag that references a suitable commit. Abbreviated SHA-1 sums or branch names are not accepted since it is possible that the commit you intend to rebase onto doesn't exist in Critic's repository yet, or that your local branch (even 'master') doesn't reference the same commit as Critic's branch with the same name. (It could be said that the same problem might exist with tags, but it's much less likely.) It's very important that you specify the correct upstream commit, or, if you prepared the rebase before you performed it locally, that you actually rebase onto the upstream commit you specified. If the specified upstream commit is not reachable from the commit you later push, the push is rejected. If the specified upstream commit is reachable from the commit you later push, the push is accepted, even if you actually rebased on a different upstream commit; the effect of this is that commits are added to the review that you didn't mean to add, and that reviewing any conflict resolutions you did while rebasing becomes more difficult. It's also very important that you don't add any new changes to the rebased branch before pushing. Such changes, even if added as separate commits, will not be directly visible in the review. They will, if at all, show up as some sort of conflict resolutions. Critic simply assumes that the rebased branch you push corresponds exactly to the current review branch, only rebased onto another upstream commit. Like with an in-place rebase, to finish the rebase you simply push the rebased branch using <code>git push -f critic HEAD:r/<name></code> The push will automatically add a constructed "equivalent" merge commit to the review. This commit has as its parents the old head of the review branch and the new upstream commit that you previous specified, and references the same tree as the head of the rebased review branch. The constructed merge is exactly the same as you would get if you had merged the new upstream commit into the review branch instead of rebasing onto it, and is used to let reviewers review your resolutions to any conflicts that happened while rebasing the branch. Review Front-page Additions --------------------------- The branch log on the review front-page is rendered slightly differently once the review branch has been rebased. In fact, it's no longer strictly speaking a branch log at all. All commits that had been added to the review before the review branch was rebased are still displayed, with whatever review status they had before. All issues and notes are still attached to these commits. The point at which the rebase took place is signalled by a line that says either History rewritten by <name> or Branch rebased by <name> depending on the type of rebase. The first (most recent) such line also contains a link to the actual rebased/cleaned-up log of the branch. Commits pushed to the review branch after the rebase are displayed above the rebase signalling line. Issues and notes should be transferred from the old branch to the rebased branch automatically unless addressed by rebase, in which case they would be marked as addressed by the constructed merge commit instead. (An in-place rebase can't address open issues since the source tree can't be modified.) ================================================ FILE: src/tutorials/reconfiguring.txt ================================================ Reconfiguring Critic ==================== Email Configuration ------------------- When Critic sends emails about new or updated reviews, it generates a separate email for every recipient. (It then lies about whom it sent the email to by adding all recipients to the "To" header in each email sent.) You can configure what changes you wish to receive emails about, to some degree what information those emails should contain, and how the emails should be formatted. Any such configuration affects only emails sent to you. They do not affect emails sent to others because of things you do. Email Activation ---------------- First of all you can configure whether emails are sent to you at all, using the configuration item CONFIG(email.activated). It's enabled by default, and you should typically not disable it. Subject Line Formats -------------------- There are a number of configuration items that control the subject line of emails sent. Their names all start with "email.subjectLine.", for instance CONFIG(email.subjectLine.newReview). They all work the same way. The value should be a valid <a href="http://docs.python.org/library/stdtypes.html#string-formatting">Python format string</a> containing "%%(key)s" conversion specifiers to insert details about the review into the subject line. If the format string is invalid, instead of a useful subject line your emails will contain the error message Python produced. The following keys can be used in conversion specifiers: ? id = The review ID in "r/NNNN" format. ? summary = The review title or summary. ? progress = The current progress of the review in "NN %% and N issues" format. ? branch = The name of the review branch. Review Association Recipients ----------------------------- Critic always adds a custom header <code>OperaCritic-Association</code> to all emails sent about a review, containing a comma-separated list of tokens identifying how the user to whom the email is sent is associated with the review. The set of possible tokens are <code>owner</code>, <code>author</code>, <code>reviewer</code>, <code>watcher</code> and <code>none</code>. The purpose of this email header is to allow for client-side filtering of emails from Critic according to relevance or importance. Some email systems, for instance Gmail, doesn't support filtering based on custom headers. To support the same type of filtering in such limited email systems, Critic can optionally be configured to add phony recipients to the email's <code>To</code> header instead. These recipients are constructed by taking the email address used as <code>Sender</code> (not <code>From</code>) -- usually something like <code>critic@example.com</code> -- and appending <code>+token</code> to the user name part. In other words, if the user owns the review, all emails about it would include <code>critic+owner@example.com</code> in the <code>To</code> header. This workaround is enabled by the CONFIG(email.enableAssociationRecipients) setting. ================================================ FILE: src/tutorials/repository.txt ================================================ Repository Viewer ================= Displaying diffs ---------------- The core functionality of Critic is of course the display of diffs, since that's what is reviewed, and reviewing is what it's all about. But this functionality is not limited to reviews. A diff of any commit in Critic's repository can be displayed, and also a diff between any two commits in Critic's repository. The diff display in Critic is based on the output of the 'git diff' command, but is post-processed and analyzed to produce an optimal visualization of the changes made. It may not always succeed in producing an optimal visualization of the changes, of course, and when it doesn't, please <a href="https://github.com/jensl/critic/issues/new">report bugs</a> about its failures. Single commit ------------- To display a diff of a commit, all you need to specify in the URL is the SHA-1 sum of the commit. A simple URL such as %(configuration.URL)s/<SHA-1> is enough. Critic supports having multiple repositories, and here we didn't specify the repository in the URL. In this case, Critic searches all of its repositories for a commit. If it finds one, it doesn't really matter which repository it found it in, of course, since it would be the same commit regardless of repository. The first searched repository will be the user's <a href="%(configuration.URL)s/config?highlight=defaultRepository">default repository</a>. The SHA-1 sum specified can also be abbreviated--a prefix instead of the full 40 characters--or be specified in any form supported by the <code>git rev-parse</code> command, such as the name of a branch, or a SHA-1 sum followed by a caret (^). In this case, no search for an appropriate repository is done--only the user's default repository is considered unless a repository is specified explicitly, in which case only that repository is considered. A longer URL for displaying a single commit is %(configuration.URL)s/showcommit?sha1=<SHA-1> but this is rarely meaningful to use. This form of URL needs a full 40 character SHA-1 sum--it doesn't support neither abbreviated SHA-1 sums or other ways of specifying commits. Multiple commits ---------------- To display a diff of multiple commits, or between two commits, specify the two commits separated by two full stops/periods (..) similar to the syntax used by the <code>git diff</code> command. Like when displaying a single commit, the commits can be specified using any format supported by the <code>git rev-parse</code> command, and again the repository used is always the user's default repository unless one is explicitly specified. Examples: %(configuration.URL)s/<SHA-1>..<SHA-1> %(configuration.URL)s/master..<branch name> The longer form of URL can be used to display multiple commits as well, and supports two different, but largely redundant, ways to specify the range of commits: %(configuration.URL)s/showcommit?from=<SHA-1>&to=<SHA-1> %(configuration.URL)s/showcommit?first=<SHA-1>&last=<SHA-1> The from/to form is exactly equivalent to the A..B form supported by the shorter URL, except it requires full 40 character SHA-1 sums. The first/last form is different in that it includes the changes made in the first commit in the diff, and is thus equivalent to the A^..B short form. These longer forms have few advantages over the shorter forms in practice, and might as well be avoided entirely. Specifying repository --------------------- In some cases, it may be necessary to explicitly specify the repository to use. This is typically the case when the URL used relies on using <code>git rev-parse</code> to interpret the arguments, and the result depends on the repository in which the command is run, and the user's default repository is not the right one. The repository to use is specified in two ways depending on whether a short URL or the longer variant is used: %(configuration.URL)s/<repository>/<SHA-1> %(configuration.URL)s/showcommit?sha1=<SHA-1>&repository=<repository> In both cases, the repository parameter is the short name of the repository. This is the name used in the drop-down list used to set the user's <a href="%(configuration.URL)s/config?highlight=defaultRepository">default repository</a>. In the case of the longer URL, the repository can also be specified by ID, typically 1-N, in the order the repositories occur in drop-down lists, but this is of course less convenient to use. These are the available repositories, and their corresponding short names: [repositories] ================================================ FILE: src/tutorials/requesting.txt ================================================ Requesting a Review =================== What is a Review? ----------------- Critic is based around regular git repositories. A review consists of one or more commits at the tip of a branch in this git repository, annotated by comments written by the reviewers and other participants along the way. Any code changes involved in a review are always communicated to Critic via the git repository, through <code>git push</code>, and are always available in the git repository for anyone to fetch, checkout and test. Not every branch in Critic's git repositories is associated with a review. Regular branches, such as 'master', also exist in the repositories, and can be pushed to the repositories without any particular side-effects. Branches that are associated with reviews always have names that start with the prefix "<code class=bold>r/</code>", and no branches in the repositories not associated with reviews can have names with that prefix. This unambigiously identifies branches that are being reviewed. Step 1: Pushing changes to Critic --------------------------------- In order to request a review of the changes made by one or more commits, those commits must first be pushed to one of Critic's git repositories. Branches are pushed to Critic's git repositories as they are to any other git repository; just add one of Critic's repositories as a remote in your local repository and use 'git push' to push the branch to Critic's repository. These are the repositories that Critic has right now: [repositories] Of course, you need to select a name for your branch. There's really only one concern that is different from when selecting a branch name when pushing a branch to any other shared repository: if it starts with the "<code class=bold>r/</code>" prefix the branch is associated with a review. So you have two choices: 1 Push to a branch whose name doesn't have the "<code class=bold>r/</code>" prefix, in which case no significant further actions are triggered, or 2 Push to a branch whose name does have the "<code class=bold>r/</code>" prefix, in which case a review of only the first (head) commit of the branch is created automatically and immediately, and emails are sent to relevant users. If the prospect of creating a review immediately merely by pushing a branch to a remote seems frightening, fear not. The ability to create a review by pushing a branch is controlled by the configuration option <a href="config?highlight=review.createViaPush#go"><code>review.createViaPush</code></a>, which is disabled by default. Trying to push a branch whose name has the "<code class=bold>r/</code>" prefix without enabling the configuration option first causes the push to be rejected with a message saying you need to enable the option. <b>Note:</b> If you pushed to a branch whose name doesn't have the "<code class=bold>r/</code>" prefix, this branch is only used to create the review. When the review is created, a second branch will be created to go with it, and any updates to the first branch will have no effect on the review! Step 2: Locating Commit(s) -------------------------- If in step 1 you pushed a branch whose name had the "<code class=bold>r/</code>" prefix, then there is no step 2. You're done. If you used any other branch name, read on. Locate the branch you pushed, or the commit you want reviewed, in Critic's repository viewer. A branch can be displayed by loading the URL %(configuration.URL)s/log?repository=<short name>&branch=<branch> and a single commit can be displayed by loading the URL %(configuration.URL)s/<SHA-1> Also, when you pushed the branch, the reply from the remote, and output from your git client, should have contained a link to the branch and a direct link for creating a review of the whole branch. On every page displaying either the branch or a single commit you should see a button in the top-right corner of the page, with the label "Create Review". Press that button to proceed to the next step. Note that pressing the button does not create a review right away and is not final; you'll still have a chance to change your mind. Also note that there may be a significant delay between pressing the button and the next page loading. Do not be worried by this; it is simply Critic preparing all the commits for quicker viewing and takes some time if there are many commits and/or large files were modified. Step 3: Creating the Review --------------------------- The "Create Review" button takes you to a page where you will get to select a branch name for the review, write a one-line summary, a longer description (which is optional) and inspect what users would be assigned to review the changes, and what additional users would be watching the review. The page also lists all the commits that would be part of the review (initially) with links allowing you to inspect the diffs and make sure everything is to your satisfaction. Branch Name ----------- A branch name needs to be selected for the review. It must have the "<code class=bold>r/</code>" prefix, but can otherwise be chosen freely. Two simple patterns are recommended, however: 1 For a review of a bug fix, use the branch name "<code class=bold>r/<bug ID></code>". Critic will automatically pre-fill the branch name field with such a branch name if the review contains a single commit and the first line of its commit message indicates that it's a fix for a bug. (The detection of such an indication is a bit hit-and-miss; it's just a regexp.) 2 For other reviews, use a branch name with the prefix "<code class=bold>r/<username>/</code>". These patterns are only recommendations. Overly generic branch names should be avoided, of course, but this holds equally true for the same reasons in any other shared repository. Description ----------- The longer review description is optional since commits, and thus commit messages, are an integral part of the review in Critic. For a review of a single-commit bug fix, any sensible longer description of the review would likely be more or less exactly the same as the commit message of the single commit, and thus redundant. For a review of a larger body of work consisting of many commits, a description that summarizes the work would of course make sense. Step 4: Submit the Review ------------------------- The final step in the process is to submit the review by pressing the button labelled "Submit Review" found in the top-right corner of the "Create Review" page. Before that, no trace of the review exists in Critic's database. There is therefore no need, and no possibility, to "abort" the review at this point: it doesn' exist yet. Submitting the review does a number of things: 1 Creates the review branch in Critic's git repository. 2 Assigns all the changes in all the commits to all the appropriate reviewers. 3 Sends emails to all users associated with the review about the review having been created. After submitting the review, you're redirected to the newly created review's front-page, which contains or links to all information relevant to the review. At this point, the most prominent feature of the page will be the text "No progress", in big letters. As reviewers (and others) review the changes, this text will change to a percentage counter describing how much of the changes have been reviewed so far, and ultimately to the text "Accepted!" when all the changes have been reviewed without issues, at which point you'll be able to close the review. ================================================ FILE: src/tutorials/reviewing.txt ================================================ Reviewing Changes ================= Progress and State ------------------ Critic keeps track of the progress and state of a review in order to be able to declare when a set of changes has been accepted, and in order to be able to tell individual users what new changes they need to review. A review is considered accepted when there are nothing that blocks if from being accepted; there is no need for any (or all) reviewers to explicitly signal final acceptance. Two things block a review from being accepted: changes that haven't been reviewed yet, and issues raised by reviewers (or others) while reviewing the changes. A review is thus considered accepted once all changes have been reviewed, and either no issues were raised or all raised issues have been addressed. The current progress and/or state of a review is indicated on the review front-page in big letters. While a review is in progress, the progress is displayed as a percentage of changed lines that have been reviewed, and a count of open issues that need to be addressed. Once the review is accepted, the progress is displayed as "Accepted!". When the review is in this state, the review owner (or anyone else) can close the review as finished. At any point during the review process the review can also be dropped. To drop a review, press the "Drop Review" button in the top-right corner of the review front-page. Normally, only the review owner is given the option to drop the review. This limitation is convenience only: the assumption is that normally only the review owner is ever interested in dropping the review. If the configuration option <a href="config?highlight=review.dropAnyReview#go"><code>review.dropAnyReview</code></a> is enabled the "Drop Review" button is displayed on the front-page of all reviews. Displaying Changes ------------------ As a reviewer, the main task in a review is of course to review the actual code changes. The review front-page provides a range of options for displaying the changes: * In the list of commits, clicking the summary text of any commit will load a full diff of that commit. If there are changes in a commit that need your reviewing, the numbers in the columns "Pending" and "Total" in the table of commits are surrounded by a thick red dotted border. * If there are several commits in the review, a range of commits can be "squashed" to display the sum of changes in those commits. To do this, press the left mouse button over the summary text of the first commit in the range, move the mouse pointer to the summary text of the last commit in the range and release the left mouse button. * In addition, you can display either all changes that remain for you to review or all changes you have reviewed or should review by following the links labelled "[pending]" and "[reviewable]" in the top-right corner of the "Commits" table. In this view, changes in modules or files you are not reviewing are skipped, producing a smaller diff, but of course not a complete set of changes. Reviewing Changes ----------------- When a diff is displayed in the context of a review—whether it's a full commit, a range of commits or a filtered set of changes—the table of changed files displayed will have an extra column titled "Reviewed". For any file in which there are changes for you to review, this column will contain a checkbox. This checkbox is used to mark the changes as reviewed. In addition, the first row in the table will have checkbox that can be used to check (or uncheck) all the checkboxes in the table. As an alternative to manually checking checkboxes after reading the diff, the whole set of changes can be "paged through" using the SPACE key. Repeatedly pressing the SPACE key will display the changes in the first file, then scroll down one page at a time until the bottom of the page is reached, then hide the file, mark the changes in it as reviewed (checking the checkbox) and display the next file. Thus, by simply pressing the SPACE key, you can read all the changes and check all the checkboxes as you go along. All changes made to a review are immediately communicated to the server, but recorded as "draft changes" that are not visible to other users. As soon as any draft changes are stored, the top-right corner of any page related to the review will contain a summary of the changes made, and the buttons "Submit" and "Abort". Pressing the "Submit" button makes the changes visible to other users (that is, removes the draft status) and sends emails to all participants of the review about the changes made. Note: "Reviewed" does not mean "Approved" ----------------------------------------- Marking changes as reviewed does not in itself represent approval! It merely means you reviewed the changes; that you don't expect to be reviewing these same changes much more and that no other user needs to review them. This is how Critic keeps track of what you and other users need to do, and what remains to do before a review is finished. Approval is signalled implicitly by marking changes as reviewed without complaining about anything while doing so; there is no explicit approval action. You are of course free to express your approval of the changes in comments, but it is not required for Critic to consider a review as accepted. Leaving changes "unreviewed" after reading them because you found flaws in the code and don't wish to approve the code before those flaws are fixed is in itself flawed. The commit you are looking at is fixed and cannot be altered, only an additional commit can fix the flaws in the code. So the commit you are looking at must either be "approved" in its current form, with raised issues that block the review, or the entire review would have to be dropped. Writing Comments ---------------- A vital component in the reviewing of changes is of course the ability to annotate the code with comments. In Critic, such code comments are attached to specific lines of code, not to lines of a particular diff. In practice, this difference is not very significant; often you will barely notice the difference. Comments are added by selecting a range of lines in the diff; just press the left mouse button over the first line, move the mouse pointer over to the last line and release the left mouse button, after which a dialog is displayed in which you write the comment. There's typically no need to select additional context lines surrounding the code you wish to comment on when writing a comment; Critic will add such context lines itself when the comment is displayed. It is thus better to only select the specific lines that the comment relates to. In Critic, there are two types of comments: issues and notes. Issues are significant to the progress of the review; any issue raised by a reviewer (or other user) blocks the review from being accepted until the issue has been addressed or resolved. Notes, on the other hand, do not, and exist to allow users to add informational comments without affecting the progress of the review. It may seem drastic sometimes to call a comment an "issue", but think of it like this: an issue comment is something, anything, that needs to be handled somehow before the review is closed. An answer, from the review owner, to a question might really be all you're after, but by calling it an issue, Critic will help both you and the review owner to not forget about it before going ahead. If you call it a note instead, feeling that "issue" is too harsh, Critic will not care whether the comment receives any further attention from anyone. If a comment is added with the wrong type—an issue that ought to be just a note or a note that ought to be an issue—the type of the comment can be altered after it's been added, using the buttons labelled "Convert to Note" and "Convert to Issue". Converting an issue into a note may cause the review to become accepted, since it is quite similar to explicitly resolving the issue. Handling Issues --------------- Since open issues block the review from being accepted and closed, they need to be handled. There are two basic ways to handle an open issue: 1 Explicitly mark the issue as resolved using the "Resolve Issue" button displayed along with the comment. 2 Push additional commits to the review that change the commented lines, which causes Critic to automatically mark the comment as "Addressed". This is the preferred choice when the comment asked for the code to be changed, since it makes it easy for both the reviewer and review owner to verify that all requested changes have been made, and also spares someone the trouble of manually marking issues as resolved. Anyone is allowed to explicitly resolve an open issue, including the review owner. This may seem as an opportunity to "cheat" and approve your own changes, and in practice, that is what it is. But the reason is simple: Critic is here to facilitate reviews, not prevent cheating or enforce rules. When a comment is marked as addressed automatically, there's of course the possibility that the change didn't actually address the issue, either because it was a completely unrelated change that just happened to intersect the comment, or because it wasn't what the reviewer had in mind. It may seem easy for issues to get lost because of this, but in practice this ought not be a problem, since the change that caused the comment to be marked as addressed still needs to be reviewed as any other change, which provides the reviewer with ample opportunity to verify that addressed issues were truly addressed. If an issue is incorrectly marked as addressed, it can be reopened. To do this, press the "Reopen Issue" button displayed along with the comment. A dialog will be displayed asking you to select the range of lines in the new version of the code where the issue still exists. When done, the issue will be open again, and the new range of lines will be the lines that need to be changed for the comment to be marked as addressed again. Draft Comments -------------- All actions involving comments, writing, editing, resolving and reopening, are immediately communicated to the server and stored in the database as draft changes. Once stored on the server, you can navigate to a different page in your browser, or reload the page, or crash the browser, without risk losing any data. You will be able to submit the changes, making them visible to all users, on any page related to the review. If no "Submit" button is displayed, you probably just need to reload the page for it to appear. As a rule of thumb: if there's a text input on the screen, any editing you've done in it would be lost if you, for instance, closed the window. As soon as the text input is removed from the screen, whatever was in the text input is stored on the server (unless you used a "Cancel" button, of course.) If the operation to store information on the server fails, an error dialog is displayed and the dialog containing the text input stays open. ================================================ FILE: src/tutorials/search.txt ================================================ Review Quick Search =================== Availability ------------ The review quick search feature is available on every Critic page, by pressing the <code>F</code> key. This opens up a dialog where one enters a query string, and then searches by pressing the <code>ENTER</code> key (or clicking the "Search" button.) Query Syntax ------------ The query string is split into search terms, at white-space characters. A term containing white-space characters can be achieved using quotes (either single or double.) A search term can also be qualified by a <code>keyword:</code> prefix, where the supported keywords are: ? <code>repository</code> (or <code>repo</code> or <code>r</code>) = Filter by repository. ? <code>summary</code> = Filter by searching for sub-string in the review's summary. ? <code>description</code> = Filter by searching for sub-string in the review's description. ? <code>text</code> = Filter by searching for sub-string in the review's summary and/or description. ? <code>branch</code> (or <code>b</code>) = Filter by matching the review branch name. ? <code>path</code> (or <code>p</code>) = Filter by matching the path of files touched by the review. ? <code>user</code> (or <code>u</code>) = Filter by user associated with the review. ? <code>owner</code> (or <code>o</code>) = Filter by user that owns the review. ? <code>reviewer</code> = Filter by user that is assigned to review changes in the review. ? <code>state</code> (or <code>s</code>) = Filter by review state: <code>open</code> (any open review), <code>pending</code> (open and not accepted reviews), <code>accepted</code> (open and accepted reviews), <code>closed</code> or <code>dropped</code>. Filter Value Syntax ------------------- When matching against the review's summary or description, the search term's value is interpreted as a simple glob if it contains either a <code>*</code> or a <code>?</code> character, in which case it's matched against the whole summary or description. Otherwise, it's interpreted as a plain sub-string and is searched for in the summary or description. In other words, the search term "summary:foo" is the same as the term "summary:*foo*", but the term "summary:foo*" only matches reviews whose summaries start with the sub-string "foo". When matching against path or branch names, the search term's value is interpreted as a pattern similar to how paths are interpreted by Critic's filter mechanism: <code>**</code> matches zero or more path segments (<code>foo/</code>, <code>foo/bar/</code>, et c.), <code>*</code> matches zero or more characters except the path separator (<code>/</code>) and <code>?</code> matches exactly one character. A path is only interpreted as absolute if it has a leading <code>/</code>. The other search term types require values that are valid repository names, user names or review states, respectively. Unqualified Search Terms ------------------------ A search term that is not qualified by <code>keyword:</code> is interpreted as follows: Review summaries and descriptions will always be searched. If the search term doesn't contain any white-space characters, review branch names are also searched. If the search term looks like a file path, the search term is also matched against files touched by the review. The term is considered to look like a path if it does not contain any white-space characters, and either contains a path separator or ends with a file name extension. Examples -------- Example query strings: ? <code>example</code> = Finds reviews whose summary, description or branch name contains the sub-string "example". ? <code>search example</code> = Finds reviews whose summary or description contains the sub-string "search" and whose summary or description contains the sub-string "example". ? <code>"search example"</code> = Finds reviews whose summary or description contains the sub-string "search example". ? <code>"search*example*"</code> = Finds reviews whose summary or description start with the sub-string "search" and contains the sub-string "example". ? <code>summary:"search example" state:open</code> = Finds open (accepted or not) reviews whose summary contains the sub-string "search example". ? <code>search_example.py</code> = Finds reviews that touch any file named <code>search_example.py</code>. ? <code>owner:alice reviewer:bob</code> = Finds reviews owned by the user <code>alice</code> where the user <code>bob</code> is assigned to review some or all changes. ================================================ FILE: src/urlutils.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 the Critic contributors, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import json import requests class Response(): def __init__(self, response): self.response = response def __getattr__(self, name): return getattr(self.response, name) def json(self): if hasattr(self.response, "json"): try: if callable(self.response.json): return self.response.json() else: return self.response.json except Exception: return None else: try: return json.loads(self.response.content) except ValueError: return None def get(*args, **kwargs): return Response(requests.get(*args, **kwargs)) def post(*args, **kwargs): return Response(requests.post(*args, **kwargs)) ================================================ FILE: src/wsgi.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. try: import maintenance.configtest except ImportError: import traceback import sys exc_info = sys.exc_info() def application(environ, start_response): start_response("500 Internal Server Error", [("Content-Type", "text/plain")]) header = "Failed to import 'maintenance.configtest' module" return (["%s\n%s\n\n" % (header, "=" * len(header))] + traceback.format_exception(*exc_info)) else: errors, warnings = maintenance.configtest.testConfiguration() if errors: def application(environ, start_response): start_response("500 Internal Server Error", [("Content-Type", "text/plain")]) header = "Invalid system configuration" result = "%s\n%s\n\n" % (header, "=" * len(header)) for error in errors: result += str(error) + "\n\n" for warning in warnings: result += str(warning) + "\n\n" return [result] else: try: import configuration if configuration.debug.COVERAGE_DIR: import coverage def import_critic(): import critic coverage.call("wsgi", import_critic) import critic except ImportError: import traceback import sys exc_info = sys.exc_info() def application(environ, start_response): start_response("500 Internal Server Error", [("Content-Type", "text/plain")]) header = "Failed to import 'critic' module" return (["%s\n%s\n\n" % (header, "=" * len(header))] + traceback.format_exception(*exc_info)) else: def application(environ, start_response): return critic.main(environ, start_response) ================================================ FILE: src/wsgistartup.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. try: import os import os.path import atexit import time import errno # Preload critic.py to reduce initial page load delay. import configuration if configuration.debug.COVERAGE_DIR: import coverage def import_critic(): import critic coverage.call("wsgi", import_critic) else: import critic pidfile_path = os.path.join(configuration.paths.WSGI_PIDFILE_DIR, str(os.getpid())) def deletePidFile(): try: os.unlink(pidfile_path) except: pass try: os.makedirs(os.path.dirname(pidfile_path)) except OSError as error: if error.errno == errno.EEXIST: pass else: raise open(pidfile_path, "w").write(str(time.time())) atexit.register(deletePidFile) except: pass ================================================ FILE: testing/USAGE.md ================================================ Critic Testing Framework ======================== The Critic testing framework installs Critic in a VirtualBox instance, and then runs tests against it. Many assumptions are made about the setup of the VirtualBox instance, the OS running in it, and the system running VirtualBox and the testing framework. In this manual, the system that runs VirtualBox and the testing framework is called the "host" and the system running in VirtualBox is called the "guest". Host Setup ---------- Required software: * Python 2.7 * Git 1.8.5 or later (needs Git bugfix 37cb1dd671e5e22cee363f98637a5a58f16be054) * VirtualBox * Requests (Debian/Ubuntu package: python-requests) * BeautifulSoup 3.x (Debian/Ubuntu package: python-beautifulsoup) The host system is assumed to have a clone of the Critic repository, in which the testing framework is executed. A temporary bare clone of this repository will be created and exported using "git daemon" as part of testing. By default, this "git daemon" process listens on TCP port 9418, which will fail if another "git daemon" process already runs on the host system. If this is a problem, a custom port can be specified using the --git-daemon-port command-line argument. The user that runs the testing framework must have passwordless SSH access to the guest system, and if a different user (name) should be used on the guest system, this needs to be configured in .ssh/config. VirtualBox Setup ---------------- The VirtualBox instance's SSH and HTTP services must be accessible (via network) from the host system. Its hostname must be given as command-line argument to the testing framework, and custom ports for SSH and HTTP can also be given as command-line arguments, if necessary. If the VirtualBox instance is configured to use NAT, it is typically not directly reachable from the host system. In this case, ports on the host system can be forwarded to the VirtualBox instance by VirtualBox, and "localhost" can be used as the hostname. The host system ports that are forwarded to the VirtualBox instance can be given to the testing framework as command-line arguments. Finally, the VirtualBox instance must have a snapshot named "clean" (or named something else if overidden using the --vm-snapshot argument.) This snapshot is restored when testing starts. If the snapshot is taken with the machine powered up and ready, testing will be slightly faster. Critic should not have been installed on the guest system at this point. The software packages that Critic's installation script installs if missing (Apache, PostgreSQL et c.) may be installed before the snapshot is taken; this reduces the time it takes to run tests, but of course means the software installation part of the installation script is not fully tested. For complete testing, two snapshots can be taken, one with the additional software installed and one without, and tests can be run once with each snapshot specified using the --vm-snapshot argument. Important note: When taking a "live" snapshot of an instance, supplying the "--pause" argument to the "VBoxManage snapshot" operation may be required to avoid triggering bugs in VirtualBox that corrupts the instance. Also note that VirtualBox supports having multiple snapshots of the same instance with the same name; make sure there's only one snapshot named "clean". Guest Setup ----------- Required software: * SSH server * Python 2.7 * Git * Sudo The user on the guest system that the host system user that runs the testing framework logs in as over SSH must be allowed to run "sudo" without entering a password. (This is typically not the default in any system, unless the user is "root", so /etc/sudoers typically needs to be edited to achieve this.) The hostname "host" on the guest system must resolve to the host system. This can for instance be accomplished by editing the guest system's /etc/hosts file. Critic will be installed on the guest system from the directory $HOME/critic, which must not exist. (IOW, the VirtualBox instance's "clean" snapshot must be taken at a time when it doesn't exist.) Running Tests ------------- Tests are run in the clone of Critic's repository on the host system. From the root of that repository, run $ python -m testing.main ARGS One argument is required, --vm-identifier=NAME|UUID which specifies which VirtualBox instance to run the tests in. Unless its hostname is the same as the VM instance name specified by the --vm-identifier argument, the argument --vm-hostname=HOSTNAME must be used to specify how to address the VirtualBox instance from the host system. Note: this hostname only needs to work on the host system, and need not be the hostname that the guest OS has been configured with. Also note: if both the host and guest OS:s use Avahi, the VirtualBox instance might be accessible using the name "<hostname>.local" where <hostname> is the hostname that the guest OS has been configured with. The argument --vm-snapshot=NAME|UUID can be used to select a snapshot to restore when starting the VirtualBox instance. A snapshot is always restored; if this argument is not provided, a snapshot named "clean" is restored. The arguments --vm-ssh-port=PORT --vm-http-port=PORT can be used to tweak how the VirtualBox instance's SSH and HTTP services are reached from the host system. The argument --git-daemon-port=PORT can be used to have the "git daemon" process that is automatically started to export the Critic repository listen on a different port (by default it listens on port 9418.) The arguments --commit=SHA1|REF --upgrade-from=SHA1|REF can be used to control which commit to test. The --commit argument defaults to the commit that is checked out in the non-bare repository on the host system. Since this commit's version of the testing framework and the tests is what will be running, it rarely makes any sense to specify any other commit. Doing so might not work as intended because of incompatibilities between the testing framework in the checked out commit and the installed version of Critic. The --upgrade-from argument is more useful. When given, instead of installing the tested commit directly, the commit specified by the --upgrade-from argument is installed, and then the system is upgraded to the tested commit. A typical use-case for this is to test the changes on a topic branch by installing the commit on 'master' from which the topic branch was branched off and then upgrading to the tip of the topic branch, and then run the tests. Note: The tested commit, as well as the commit to upgrade from, if given, must not be earlier than the integration of the testing framework, since the install.py and upgrade.py scripts were extended as part of the testing framework implementation. The arguments --debug --quiet can be used to control the amount of output produced while running tests. With --debug, various rather noisy and not terribly useful debugging output is added. With --quiet, only warnings and errors are output, not basic progress messages. (With --quiet, a successful test run would produce no output at all.) Finally, to run only selected tests, or groups of tests, the paths of these can be provided as additional command-line arguments. These paths should be relative the testing/tests/ directory. Code Coverage Measurement ------------------------- The testing framework also has support for measuring code coverage during testing. To enable this mode, the testing framework needs to be started with the argument --coverage in addition to any other arguments needed, as described above. This argument causes the testing framework's normal output to go the stderr stream instead of the stdout stream, and code coverage data to be written to the stdout stream when testing has finished. The code coverage data is output in the form of a JSON object structure: { "contexts": [ <context1>, <context2>, ... ], <module path>: { <context1>: [ <line1>, <line2>, ... ], <context2>: [ <line1>, <line2>, ... ], ... }, ... } The <contextN> values are strings identifying the context in which the covered code was called, such as "wsgi" for code called via the web-frontend and "changeset", "highlight", et c. for code called via background services. For each (covered) source code file, coverage is then reported as an array of covered lines per context. The line numbers in the array are zero-based. Test Structure -------------- The actual tests are Python scripts in sub-directories of the testing/tests/ directory. All file and directory names under testing/tests/ should, by convention, begin with three digits, followed by a '-', followed by a short identifier of the test or test group. Files and directories are sorted according to the three-digit number in their name, and processed in that order. A directory is processed by processing all files and directories under it, recursively. A file whose name ends with ".py" is processed by executing it (using execfile()). All other files are ignored. There should be no files directly in the testing/tests/ directory. The immediate sub-directories of testing/tests/ are "top-level test groups" and are significant in that each one starts with a clean, restarted VirtualBox instance. There should be a test (typically the first one) in each top-level test group that calls "instance.install()" to install Critic in the VirtualBox instance, and one test (possibly also the first one) that calls "instance.upgrade()" to upgrade to the tested commit. (The "instance.upgrade()" call is a no-op unless the testing framework was started with the --upgrade-from argument.) The organization of tests into test groups is mostly free, but there is one detail worth noting: the directory tree layout implicitly defines dependencies between tests, as such: a test B depends on a test A (IOW, test A must run successfully in order for test B to be runnable) if test A runs before B (due to the basic sorting described above) and is either in the top-level group or is in an ancestor group of test B. For instance, given the tests 001-main/001-testA.py 001-main/002-groupB/001-testB1.py 001-main/002-groupB/002-testB2.py 001-main/003-testC.py 001-main/004-groupD/001-testD.py the test 001-testA.py is a dependency of all other tests, and 003-testC.py is a dependency of the test 004-groupD/001-testD.py, but the tests under 002-groupB/ are not dependencies of the tests 003-testC.py or 001-testD.py, despite normally executing before them, and the test 001-testB1.py is not a dependency of the test 001-testB2.py. ================================================ FILE: testing/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import re import subprocess class Error(Exception): pass class InstanceError(Error): """Error raised when VM instance is in unexpected/unknown state.""" pass class TestFailure(Error): """Error raised for "expected" test failures.""" pass class CommandError(InstanceError): def __init__(self, argv, stdout, stderr=None): self.argv = argv self.command = " ".join(argv) self.stdout = stdout self.stderr = stderr class CriticctlError(TestFailure): """Error raised for failed criticctl usage.""" def __init__(self, command, stdout, stderr=None): super(CriticctlError, self).__init__( "CriticctlError: %s\nOutput:\n%s" % (command, stderr or stdout)) self.command = command self.stdout = stdout self.stderr = stderr class NotSupported(Error): """Error raised when a test is unsupported.""" pass class User(object): RE_DEFINITION = re.compile('var user = new User\\(([^,]+), ([^,]+),') def __init__(self, user_id, name): self.id = user_id self.name = name def __eq__(self, other): if isinstance(other, User): return self.id == other.id and self.name == other.name return False def __repr__(self): if self.id is None: return "<anonymous user>" return "<user '%(name)s' (%(id)d)>" % self @staticmethod def from_script(script): match = User.RE_DEFINITION.match(script) if match: if match.groups() == ("null", "null"): return User.anonymous() return User(int(match.group(1)), eval(match.group(2))) @staticmethod def anonymous(): return User(None, None) class Instance(object): flags_on = [] flags_off = [] # The VirtualBox instance sets this depending on arguments. Other modes # don't support it, so default to False. test_extensions = False # This is used to keep track of which commit is currently running. This is # really only relevant for VM instances when upgrading from an older commit, # so only testing.virtualbox.Instance actually sets this. current_commit = None def __init__(self): self.resetusers() def __enter__(self): return self def __exit__(self, *args): return False def resetusers(self): self.__users = [] self.__user_map = {} def registeruser(self, name): user_id = len(self.__users) + 1 user = User(user_id, name) self.__users.append(user) self.__user_map[user_id] = user self.__user_map[name] = user def user(self, key): return self.__user_map[key] def userid(self, name): return self.user(name).id def filter_service_log(self, service_name, level="warning"): data = self.filter_service_logs(level, [service_name]) if data is None: return [] return data.get(service_name) def check_service_logs(self, level="warning"): data = self.filter_service_logs(level, ["branchtracker", "changeset", "githook", "highlight", "maildelivery", "maintenance", "servicemanager", "watchdog"]) if data is None: return for service_name, entries in data.items(): lines = "\n".join(entries) logger.error( "%s: service log contains unexpected entries:\n %s" % (service_name, "\n ".join(lines.splitlines()))) def executeProcess(self, args, log_stdout=True, log_stderr=True, **kwargs): try: process = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) except OSError as error: raise CommandError(args, None, str(error)) stdout, stderr = process.communicate() if stdout.strip() and log_stdout: logger.log(STDOUT, stdout.rstrip("\n")) if stderr.strip() and log_stderr: logger.log(STDERR, stderr.rstrip("\n")) if process.returncode != 0: raise CommandError(args, stdout, stderr) return stdout def translateUnittestPath(self, module): path = module.split(".") if path[0] == "api": # API unittests are under api/impl/. path.insert(1, "impl") path = os.path.join(*path) if os.path.isdir(os.path.join("src", path)): path = os.path.join(path, "unittest.py") else: path += "_unittest.py" return path def unittest(self, module, tests, args=None): path = self.translateUnittestPath(module) if not args: args = [] for test in tests: logger.info("Running unit test: %s (%s)" % (module, test)) try: output = self.run_unittest([path] + args + [test]) lines = output.strip().splitlines() expected = test + ": ok" matching = filter(lambda line: line == expected, lines) if len(lines) == 0: logger.warning("No unit test output: %s (%s)") elif len(matching) == 0: logger.warning("No unit test confirmation (but some output): %s (%s)" % (module, test)) elif len(matching) > 1: logger.warning("Multiple unit test confirmations: %s (%s)" % (module, test)) if lines and (lines[-1] != expected): logger.warning("Unit test's last line of output isn't unit test confirmation: %s (%s)" % (module, test)) if len(lines) > 0: [logger.info(line) for line in lines[:-1]] except CommandError as error: output = "\n ".join(error.stderr.splitlines()) logger.error( "Unit tests failed: %s: %s\nCommand: %s\nOutput:\n %s" % (module, test, error.command, output)) import local import virtualbox import frontend import expect import repository import mailbox import findtests import utils import quickstart logger = None STREAM = None STDOUT = None STDERR = None def configureLogging(arguments=None, wrap=None): import logging import sys global logger, STREAM, STDOUT, STDERR if not logger: # Essentially same as DEBUG, used when logging the output from commands # run in the guest system. STDOUT = logging.DEBUG + 1 STDERR = logging.DEBUG + 2 logging.addLevelName(STDOUT, "STDOUT") logging.addLevelName(STDERR, "STDERR") if arguments and getattr(arguments, "coverage", False): STREAM = sys.stderr else: STREAM = sys.stdout logging.basicConfig( format="%(asctime)-15s | %(levelname)-7s | %(message)s", stream=STREAM) logger = logging.getLogger("critic") level = logging.INFO if arguments: if getattr(arguments, "debug", False): level = logging.DEBUG elif getattr(arguments, "quiet", False): level = logging.WARNING logger.setLevel(level) if wrap: logger = wrap(logger) return logger def pause(prompt="Press ENTER to continue: "): print >>STREAM try: print >>STREAM, prompt, raw_input() except KeyboardInterrupt: print >>STREAM print >>STREAM raise print >>STREAM class Context(object): def __init__(self, start, finish): self.start = start self.finish = finish def __enter__(self): self.start() return self def __exit__(self, *args): self.finish() return False def exists_at(commit, path): lstree = subprocess.check_output(["git", "ls-tree", commit, path]) return bool(lstree.strip()) def has_flag(commit, flag): if flag == "minimum-password-hash-time": try: subprocess.check_call( ["git", "grep", "--quiet", "-e", "--minimum-password-hash-time", commit, "--", "installation/config.py"]) except subprocess.CalledProcessError: return False else: return True else: return exists_at(commit, "testing/flags/%s.flag" % flag) ================================================ FILE: testing/__main__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import main main.main() ================================================ FILE: testing/expect.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import re import traceback import testing def extract_text(source): result = u"" if source: if isinstance(source, list): for element in source: result += extract_text(element) elif isinstance(source, basestring): result += source elif getattr(source, "string"): result += source.string elif getattr(source, "contents"): result += extract_text(source.contents) else: result += "[%r]" % source return result def deunicode(v): if type(v) == unicode: return v.encode("utf-8") elif type(v) == list: return map(deunicode, v) elif type(v) == dict: return dict([(deunicode(a), deunicode(b)) for a, b in v.items()]) else: return v class FailedCheck(testing.TestFailure): def __init__(self, expected, actual, location=None, message=None): if message is None: message = "check failed" if location is not None: message += ":\n At %s:%d" % location[0] for filename, linenr in location[1:]: message += ",\n called from %s:%d" % (filename, linenr) super(FailedCheck, self).__init__( "%s:\n Expected: %r,\n Actual: %r" % (message, expected, deunicode(actual))) self.expected = expected self.actual = actual @staticmethod def current_location(): location = [] for filename, linenr, _, _ in reversed(traceback.extract_stack()): if filename.startswith("testing/tests/"): location.append((filename[len("testing/tests/"):], linenr)) elif location: break else: location = None return location def simple_equal(expected, actual): return expected == actual def equal(expected, actual, equal=simple_equal, message=None): if not equal(expected, actual): location = FailedCheck.current_location() raise FailedCheck(expected, actual, location=location, message=message) def true(actual, message): if not (actual is True): location = FailedCheck.current_location() raise FailedCheck(True, actual, location=location, message=message) def false(actual, message): if not (actual is False): location = FailedCheck.current_location() raise FailedCheck(False, actual, location=location, message=message) def none(actual, message): if not (actual is None): location = FailedCheck.current_location() raise FailedCheck(None, actual, location=location, message=message) # For backwards compatibility... check = equal def with_class(*names): def check(value): if value is None: return False tokens = set(value.split()) for name in names: if name not in tokens: return False return True return { "class": check } def find_paleyellow(document, index): """Find index:th ".paleyellow" in the document.""" tables = document.findAll(attrs=with_class("paleyellow")) if index >= len(tables): raise FailedCheck("<paleyellow: index=%d>" % index, "<no paleyellow: count=%d>" % len(tables)) return tables[index] def document_title(expected): """Return <title> checker.""" return lambda document: check(expected, document.title.string) def paleyellow_title(index, expected): """Return index:th ".paleyellow" title checker.""" def checker(document): table = find_paleyellow(document, index) actual = "<no title found>" h1 = table.find("h1") if h1 and h1.contents: actual = h1.contents[0] return check(expected, actual) return checker def message(expected_title, expected_body, title_equal=simple_equal, body_equal=simple_equal): """Return <div class="message"> title checker.""" def checker(document): message = document.find( "div", attrs={ "class": lambda value: "message" in value.split() }) actual_title = None actual_body = None if message: title = message.find("h1") actual_title = extract_text(title) if expected_body is not None: body = message.find("p") actual_body = extract_text(body) if not actual_title: actual_title = "<no message title found>" check(expected_title, actual_title, equal=title_equal, message="title check failed") if expected_body is not None: if not actual_body: actual_body = "<no message body found>" check(expected_body, actual_body, equal=body_equal, message="body check failed") return checker def message_title(expected_title): return message(expected_title, None) def no_message(): """Return negative <div class="message"> checker.""" def checker(document): message = document.find( "div", attrs={ "class": lambda value: "message" in value.split() }) if message: actual = "<message: %s>" % message.find("h1").contents[0] else: actual = "<no message found>" return check("<no message found>", actual) return checker def pageheader_links(*scopes): scopes = set(scopes) expected = [] for label, scope in [("Home", "authenticated"), ("Dashboard", None), ("Branches", None), ("Search", None), ("Services", "administrator"), ("Repositories", "administrator"), ("Extensions(?: \\(\\d+\\))?", "extensions"), ("Config", None), ("Tutorial", None), ("News(?: \\(\\d+\\))?", None), ("Sign in", "anonymous"), ("Sign out", "authenticated"), ("Back to Review", "review")]: if scope is None or scope in scopes: expected.append(label) def checker(document): pageheader = document.find("table", attrs={ "class": "pageheader" }) actual = [] for link in pageheader.find("ul").findAll("a"): actual.append(link.string) return check(",".join(expected), ",".join(actual), equal=re.match) return checker def script_user(expected): def checker(document): for script in document.findAll("script"): if script.string: actual = testing.User.from_script(script.string) if actual: testing.expect.equal(expected, actual) return raise FailedCheck(expected, "<no user found>") return checker def script_anonymous_user(): return script_user(testing.User.anonymous()) def script_no_user(): def checker(document): for script in document.findAll("script"): if script.string: actual = testing.User.from_script(script.string) if actual: raise FailedCheck("<no user found>", actual) return checker ================================================ FILE: testing/findtests.py ================================================ import os import re import fnmatch import testing TESTS = None TESTS_BY_FILENAME = {} def automaticDependencies(filename): this_dirname = os.path.dirname(filename) for test in TESTS: other_dirname = os.path.dirname(test.filename) if this_dirname.startswith(other_dirname): if os.path.sep not in other_dirname \ or len(other_dirname) < len(this_dirname): yield test RE_DEPENDENCY = re.compile(r"#\s+@dependency\s+([^\s]+)") RE_FLAG = re.compile(r"#\s+@flag\s+([-\w]+)") RE_IGNORE = re.compile(r"(?:\s*#.*)?\s*$") class Test(object): def __init__(self, filename): self.filename = filename self.groups = [] dirname = filename while True: dirname, basename = os.path.split(dirname) if not dirname: break self.groups.insert(0, dirname) self.dependencies = set() self.flags = set() has_dependency_declarations = [] def process_file(path): path = os.path.join("testing", "tests", path) if not os.path.isfile(path): return with open(path) as source_file: for index, line in enumerate(source_file): match = RE_DEPENDENCY.match(line) if match: has_dependency_declarations.append(True) dependency = match.group(1) if dependency == "none": pass elif dependency not in TESTS_BY_FILENAME: testing.logger.error( "%s:%d: invalid depdency: %s" % (filename, index + 1, dependency)) else: self.dependencies.add(TESTS_BY_FILENAME[dependency]) continue match = RE_FLAG.match(line) if match: self.flags.add(match.group(1)) continue match = RE_IGNORE.match(line) if not match: break process_file(filename) dirname = filename while True: dirname = os.path.dirname(dirname) if not dirname: break process_file(os.path.join(dirname, "__init__.py")) if not has_dependency_declarations: self.dependencies.update(automaticDependencies(filename)) TESTS.append(self) TESTS_BY_FILENAME[self.filename] = self def __str__(self): return self.filename def __hash__(self): return hash(self.filename) def __eq__(self, other): return self.filename == str(other) def __repr__(self): return "Test(%r): %r" % (self.filename, sorted([test.filename for test in self.dependencies])) def findTests(): global TESTS RE_TEST_FILENAME = re.compile(r"/\d\d\d-[^/]*\.py$") RE_IGNORE_FILENAME = re.compile(r"(?:/__init__.py|~)$") TESTS = [] def traverse(dirname): for filename in sorted(os.listdir(dirname)): filename = os.path.join(dirname, filename) if os.path.isdir(filename): traverse(filename) elif RE_TEST_FILENAME.search(filename): Test(os.path.relpath(filename, "testing/tests")) elif not RE_IGNORE_FILENAME.search(filename): testing.logger.warning( "%s: unexpected non-test file under testing/tests/" % filename) traverse("testing/tests") def filterPatterns(patterns): RE_LEADING_TESTS = re.compile("^(?:testing/)?tests(?:/|$)") patterns = [RE_LEADING_TESTS.sub("", pattern) for pattern in patterns] patterns = [pattern.rstrip("/") for pattern in patterns] patterns = filter(None, patterns) return patterns def selectTests(patterns, strict, flags_on=set(), flags_off=set()): if TESTS is None: findTests() patterns = filterPatterns(patterns) if not patterns and not flags_on and not flags_off: return TESTS, set() selected = set() dependencies = set() def select(test, is_dependency=False): if test in selected: # Test already selected. return selected.add(test.filename) if strict: # Don't select dependencies when strict=True. return if is_dependency: dependencies.add(test.filename) for dependency in test.dependencies: select(dependency, True) for test in TESTS: if flags_on - test.flags: continue if flags_off & test.flags: continue if patterns: for pattern in patterns: filename = test.filename while filename: if fnmatch.fnmatch(filename, pattern): select(test) break if strict: break filename = os.path.dirname(filename) if test in selected: break else: select(test) return [test for test in TESTS if test in selected], dependencies ================================================ FILE: testing/flags/addrepository-has-mirror-parameter.flag ================================================ The /addrepository has a parameter named 'mirror' (instead of 'remote'). ================================================ FILE: testing/flags/fixed-batch-preview.flag ================================================ The /showbatch page does not crash in preview mode. ================================================ FILE: testing/flags/is-testing.flag ================================================ The installation (and upgrade) scripts support --is-testing. ================================================ FILE: testing/flags/pwd-independence.flag ================================================ The installation (and upgrade) scripts are independent of $PWD. ================================================ FILE: testing/flags/reliable-admin-newswriter.flag ================================================ The admininistrator user is given the 'newswriter' role on installation. ================================================ FILE: testing/flags/reliable-git-emails.flag ================================================ Newly created users have their primary email address set as their (only) Git email address too. ================================================ FILE: testing/flags/system-recipients.flag ================================================ The installation script supports --system-recipient. ================================================ FILE: testing/flags/web-server-integration.flag ================================================ The installation script supports --web-server-integration. ================================================ FILE: testing/frontend.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import json import contextlib import urllib try: import requests import BeautifulSoup except ImportError: # testing/main.py detects and abort if either of these are missing, so just # ignore errors here. pass import testing class Error(testing.TestFailure): def __init__(self, url, message): super(Error, self).__init__("page '%s' failed: %s" % (url, message)) self.url = url class HTTPError(Error): def __init__(self, url, expected, actual, body=None): message = "HTTP status differs: expected=%r, actual=%r" % (expected, actual) if body: message += "\n" + body super(HTTPError, self).__init__(url, message) self.expected = expected self.actual = actual class PageError(Error): def __init__(self, url, key, expected, actual): super(PageError, self).__init__( url, "%s differs: expected=%r, actual=%r" % (key, expected, actual)) self.key = key self.expected = expected self.actual = actual class OperationError(Error): def __init__(self, url, message=None, key=None, expected=None, actual=None): if message is None: message = "" if key: message += "%s differs: expected=%r, actual=%r" % (key, expected, actual) super(OperationError, self).__init__(url, message) self.key = key self.expected = expected self.actual = actual class SessionBase(object): def apply(self, kwargs): pass def process_response(self, response): if "sid" in response.cookies: raise Error(response.url, "unexpected session cookie set") class NoSession(SessionBase): pass class CookieSession(SessionBase): def __init__(self, sid=None): self.sid = sid def apply(self, kwargs): if self.sid is not None: headers = kwargs.setdefault("headers", {}) headers["Cookie"] = "sid=%s; has_sid=1" % self.sid def process_response(self, response): for name, value in response.cookies.items(): if name == "sid": self.sid = value elif name == "has_sid" and value == "0": # This means we've signed out. The response would also have # deleted the "sid" cookie, but unfortunately we can't really # get that information from the response. self.sid = None class HTTPAuthSession(SessionBase): def __init__(self, username, password): self.username = username self.password = password def apply(self, kwargs): kwargs["auth"] = (self.username, self.password) class Frontend(object): def __init__(self, hostname, http_port=8080): self.hostname = hostname self.http_port = http_port self.sessions = [NoSession()] self.instance = None @property def current_session(self): return self.sessions[-1] def prefix(self, username=None): if username: username += "@" else: username = "" return "http://%s%s:%d" % (username, self.hostname, self.http_port) def page(self, url, params={}, expect={}, expected_content_type="text/html", expected_http_status=200, disable_redirects=False, post=None, put=None, delete=False): full_url = "%s/%s" % (self.prefix(), url) log_url = full_url if params: query = urllib.urlencode(sorted(params.items())) log_url = "%s?%s" % (log_url, query) testing.logger.debug("Fetching page: %s ..." % log_url) kwargs = {} self.current_session.apply(kwargs) if post is not None: kwargs["data"] = post method = "POST" elif put is not None: kwargs["data"] = put method = "PUT" elif delete: method = "DELETE" else: method = "GET" response = requests.request(method, full_url, params=params, allow_redirects=not disable_redirects, **kwargs) self.current_session.process_response(response) def text(response): if hasattr(response, "text"): if callable(response.text): return response.text() else: return response.text else: return response.content if isinstance(expected_http_status, int): expected_http_status = [expected_http_status] try: if response.status_code not in expected_http_status: if response.headers["content-type"].startswith("text/plain"): body = text(response) else: body = None raise HTTPError(url, expected_http_status, response.status_code, body) except testing.TestFailure as error: testing.logger.error("Page '%s': %s" % (url, error.message)) raise testing.TestFailure if response.status_code >= 400 and 200 in expected_http_status: # The caller expected a successful load or an error. Signal errors # by returning None. return None if response.status_code >= 300 and response.status_code < 400 \ and disable_redirects: # Redirection, and the caller disabled following it. The caller is # interested in the redirect itself, so return the whole response. return response testing.logger.debug("Fetched page: %s" % log_url) document = text(response) content_type, _, _ = response.headers["content-type"].partition(";") if isinstance(expected_content_type, str): expected_content_type = (expected_content_type,) if response.status_code == 200: if content_type not in expected_content_type: testing.logger.error( "Page '%s': wrong content type: %s" % (url, content_type)) raise testing.TestFailure if content_type == "text/html": document = BeautifulSoup.BeautifulSoup(document) div_fatal = document.find("div", attrs={ "class": "fatal" }) if div_fatal: message = div_fatal.find("pre") testing.logger.error( "Page '%s': crash during incremental page generation:\n%s" % (url, message.string if message else "<no message found>")) raise testing.TestFailure if expect: testing.logger.debug("Checking page: %s ..." % log_url) failed_checks = False for key, check in expect.items(): try: check(document) except testing.expect.FailedCheck as failed_check: print text(response) testing.logger.error("Page '%s', test '%s': %s" % (url, key, failed_check.message)) failed_checks = True except Exception as error: raise Error(url, "'%s' checker failed: %s" % (key, str(error))) if failed_checks: raise testing.TestFailure testing.logger.debug("Checked page: %s ..." % log_url) return document def operation(self, url, data, expect={}): full_url = "%s/%s" % (self.prefix(), url) testing.logger.debug("Executing operation: %s ..." % full_url) kwargs = {} self.current_session.apply(kwargs) if not isinstance(data, basestring): data = json.dumps(data) kwargs.setdefault("headers", {})["Content-Type"] = "text/json" response = requests.post(full_url, data=data, **kwargs) try: if response.status_code != 200: raise HTTPError(url, 200, response.status_code) if expect is None: result = response.content elif hasattr(response, "json"): if callable(response.json): try: result = response.json() except: raise OperationError(url, message="malformed response (not JSON)") else: result = response.json if result is None: raise OperationError(url, message="malformed response (not JSON)") else: try: result = json.loads(response.content) except ValueError: raise OperationError(url, message="malformed response (not JSON)") except testing.TestFailure as error: testing.logger.error("Operation '%s': %s" % (url, error.message)) raise testing.TestFailure self.current_session.process_response(response) testing.logger.debug("Executed operation: %s" % full_url) if expect is not None: testing.logger.debug("Checking operation: %s" % full_url) # Check result["status"] first; if it doesn't have the expected value, # it's likely all other expected keys are simply missing from the # result, and thus produce rather meaningless errors. expected = expect.get("status", "ok") actual = result.get("status") if actual != expected: if actual == "error": extra = "\nError:\n %s" % "\n ".join(result.get("error").splitlines()) elif actual == "failure": extra = " (code=%r)" % result.get("code") else: extra = "" testing.logger.error( "Operation '%s', key 'status': check failed: " "expected=%r, actual=%r%s" % (url, expected, actual, extra)) raise testing.TestFailure failed_checks = False # Then check any other expected keys. for key, expected in expect.items(): if key != "status": actual = result.get(key) if callable(expected): checked = expected(actual) if checked: expected, actual = checked else: continue if expected != actual: testing.logger.error( "Operation '%s', key '%s': check failed: " "expected=%r, actual=%r" % (url, key, expected, actual)) failed_checks = True if failed_checks: raise testing.TestFailure testing.logger.debug("Checked operation: %s" % full_url) return result def json(self, path, expect=None, params={}, expected_http_status=200, post=None, put=None, delete=False): url = "api/v1/" + path full_url = "http://%s:%d/%s" % (self.hostname, self.http_port, url) log_url = full_url if params: query = urllib.urlencode(sorted(params.items())) log_url = "%s?%s" % (log_url, query) kwargs = { "params": params, "headers": { "Accept": "application/vnd.api+json" } } method = "GET" self.current_session.apply(kwargs) if post is not None: method = "POST" kwargs["data"] = json.dumps(post) elif put is not None: method = "PUT" kwargs["data"] = json.dumps(put) elif delete: method = "DELETE" testing.logger.debug("Accessing JSON API: %s %s ..." % (method, log_url)) response = requests.request(method, full_url, **kwargs) testing.logger.debug("Accessed JSON API: %s %s ..." % (method, log_url)) self.current_session.process_response(response) def response_json(): if hasattr(response, "json"): if callable(response.json): try: result = response.json() except: raise OperationError(url, message="malformed response (not JSON)") else: result = response.json if result is None: raise OperationError(url, message="malformed response (not JSON)") else: try: result = json.loads(response.content) except ValueError: raise OperationError(url, message="malformed response (not JSON)") return result if isinstance(expected_http_status, int): expected_http_status = [expected_http_status] try: if response.status_code not in expected_http_status: if response.status_code in (400, 404): try: error = response_json()["error"] except OperationError: testing.logger.exception("Unexpected response") except KeyError: testing.logger.error("Malformed JSON error response") else: testing.logger.error( "JSON error:\n Title: %s\n Message: %s" % (error["title"], error["message"])) raise HTTPError(url, expected_http_status, response.status_code) if response.status_code == 204: # No content. return None if hasattr(response, "json"): if callable(response.json): try: result = response.json() except: raise OperationError(url, message="malformed response (not JSON)") else: result = response.json if result is None: raise OperationError(url, message="malformed response (not JSON)") else: try: result = json.loads(response.content) except ValueError: raise OperationError(url, message="malformed response (not JSON)") except testing.TestFailure as error: testing.logger.error("JSON '%s': %s" % (path, error.message)) raise testing.TestFailure def deunicode(value): if isinstance(value, list): return [deunicode(v) for v in value] elif isinstance(value, dict): return { deunicode(k): deunicode(v) for k, v in value.items() } elif isinstance(value, unicode): return value.encode("utf-8") return value result = deunicode(result) if expect is None: return result testing.logger.debug("Checking JSON: %s" % log_url) errors = [] def describe(value): if isinstance(value, dict) or value is dict: return "object" if isinstance(value, list) or value is list: return "array" if isinstance(value, set): return "one of: " % ",".join(sorted(value)) if isinstance(value, type): return { int: "integer", float: "float", str: "string" }[value] if isinstance(value, (str, int, float)): return repr(value) if value is None: return "null" return "unexpected" def check_object(path, expected, actual): if not isinstance(actual, dict): errors.append("%s: value is %s, expected object" % (path, describe(actual))) return if expected is dict: return expected_keys = set(expected.keys()) actual_keys = set(actual.keys()) if "*" in expected_keys: expected_keys.remove("*") elif actual_keys - expected_keys: errors.append("%s: unexpected keys: %r" % (path, tuple(actual_keys - expected_keys))) if expected_keys - actual_keys: errors.append("%s: missing keys: %r" % (path, tuple(expected_keys - actual_keys))) for key in sorted(expected_keys & actual_keys): check("%s/%s" % (path, key), expected[key], actual[key]) def check_array(path, expected, actual): if not isinstance(actual, list): errors.append("%s: value is %s, expected array" % (path, describe(actual))) if expected is list: return if len(actual) != len(expected): errors.append("%s: wrong array length: got %s, expected %s" % (path, len(actual), len(expected))) return for index, (expected, actual) in enumerate(zip(expected, actual)): check("%s[%d]" % (path, index), expected, actual) def check_set(path, expected, actual): if not isinstance(actual, str): errors.append("%s: value is %s, expected string" % (path, describe(actual))) if actual not in expected: errors.append("%s: value is %s, expected %s" % (path, describe(actual), describe(expected))) def check_null(path, actual): if actual is not None: errors.append("%s: value is %s, expected null" % (path, describe(actual))) def check_value(path, expected, actual): if isinstance(actual, (dict, list)): errors.append("%s: value is %s, expected %s" % (path, describe(actual), describe(expected))) if isinstance(expected, type): if not isinstance(actual, expected): errors.append("%s: wrong value: got %r, expected %r" % (path, actual, describe(expected))) elif actual != expected: errors.append("%s: wrong value: got %r, expected %r" % (path, actual, expected)) def check(path, expected, actual): errors_before = len(errors) if callable(expected) and not isinstance(expected, type): errors.extend(expected(path, actual, check) or ()) elif isinstance(expected, dict) or expected is dict: check_object(path, expected, actual) elif isinstance(expected, list) or expected is list: check_array(path, expected, actual) elif isinstance(expected, set): check_set(path, expected, actual) elif expected is None: check_null(path, actual) else: check_value(path, expected, actual) return errors_before == len(errors) check(path, expect, result) if errors: testing.logger.error("Wrong JSON received for %s:\n %s" % (path, "\n ".join(errors))) testing.logger.error("Received JSON: %r" % result) testing.logger.debug("Checked JSON: %s" % log_url) return result @contextlib.contextmanager def cookie_session(self, signout): if self.current_session.sid is None: testing.expect.check("<signed in>", "<no session cookie received>") testing.logger.debug("Starting cookie session") try: yield finally: # Sign out unless we seem to have signed out already. Some tests may # want to do the signout explicitly, which is fine. if self.current_session.sid is not None: try: signout() except testing.TestFailure as failure: if failure.message: testing.logger.error(failure.message) except Exception: testing.logger.exception("Failed to sign out!") if self.current_session.sid is not None: testing.expect.check("<signed out>", "<session cookie not removed>") # Dropping the cookie effectively signs out even if the "endsession" # operation failed. self.sessions.pop() testing.logger.debug("Ended cookie session") @contextlib.contextmanager def no_session(self): self.sessions.append(NoSession()) try: yield finally: self.sessions.pop() def collect_session_cookie(self): self.sessions.append(CookieSession()) def validatelogin(self, username, password, expect_failure=False): data = { "fields": { "username": username, "password": password }} # Check if the current commit predates the user authentication # restructuring that added the "fields" wrapper. if self.instance.current_commit: if not testing.exists_at( self.instance.current_commit, "src/auth/database.py"): data = data["fields"] if expect_failure: expect = { "message": expect_failure } else: expect = { "message": None } self.operation( "validatelogin", data=data, expect=expect) @contextlib.contextmanager def signin(self, username="admin", password="testing", use_httpauth=False, use_json_api=False, access_token=None): if access_token: username = access_token["part1"] password = access_token["part2"] use_httpauth = True if use_httpauth: self.sessions.append(HTTPAuthSession(username, password)) try: yield finally: self.sessions.pop() else: with self.no_session(): self.collect_session_cookie() if use_json_api: self.json( "sessions", post={ "username": username, "password": password }, expect={ "user": self.instance.userid(username), "type": "normal", "*": "*" }) def signout(): self.json( "sessions/current", delete=True, expected_http_status=204) else: self.validatelogin(username, password) def signout(): self.operation("endsession", data={}) with self.cookie_session(signout): yield def run_basic_tests(self): # The /tutorials page is essentially static content and doesn't require # a signed in user, so a good test-case for checking if the site is up # and accessible at all. self.page("tutorial", expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials") }) # The /validatelogin operation is a) necessary for most meaningful # additional testing, and b) a simple enough operation to test. with self.signin(): # Load /home to determine whether /validatelogin successfully signed in # (and that we stored the session id cookie correctly.) self.page("home", expect={ "document_title": testing.expect.document_title(u"Testing Administrator's Home"), "content_title": testing.expect.paleyellow_title(0, u"Testing Administrator's Home") }) ================================================ FILE: testing/input/SystemExtension/MANIFEST ================================================ Author = "Jens Lindstr\u00f6m" Description = "Extension used to test system extension support." [Page check] Description = "Simple page to check that the extension is installed and working." Script = check.js Function = check ================================================ FILE: testing/input/SystemExtension/check.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function check() { writeln(200); writeln("Content-Type: text/json"); writeln(); writeln(JSON.stringify({ "status": "ok" })); } ================================================ FILE: testing/input/SystemExtension/resources/HelloWorld.txt ================================================ Hello world! ================================================ FILE: testing/input/TestExtension/MANIFEST ================================================ Author = "Jens Lindstr\u00f6m" Description = "Extension used to test extension support." [Page echo] Description = "Echoes function argument list in JSON form." Script = echo.js Function = echo [Page evaluate] Description = "Evaluates expression and returns result." Script = evaluate.js Function = evaluate [Page nothandled] Description = "Page that is not handled." Script = nothandled.js Function = nothandled [Page empty] Description = "Empty page." Script = empty.js Function = empty [Page version] Description = "Outputs extension version." Script = version.js Function = version [Page restrictions] Description = "Checks various restrictions." Script = restrictions.js Function = restrictions [Page error.compilation] Description = "A script that doesn't compile." Script = error.compilation.js Function = irrelevant [Page error.runtime] Description = "A function that throws an exception." Script = error.runtime.js Function = test [Page Review.list] Description = "Testing critic.Review.list()." Script = Review.list.js Function = test [Page MailTransaction] Description = "Testing MailTransaction." Script = MailTransaction.js Function = test [Inject home] Description = "Basic injection testing." Script = inject.js Function = inject [Inject home] Description = "Custom injection testing." Script = inject.js Function = injectCustom [Inject critic/*] Description = "Inject path handling test." Script = inject.js Function = showcommitShort [Inject showcommit] Description = "Inject path handling test." Script = inject.js Function = showcommitLong [ProcessCommits] Description = "Basic commits processing testing." Script = processcommits.js Function = processcommits [FilterHook echo] Description = "Filter hook that echoes its arguments via a mail." DataDescription = "Some random data." Script = filterhook.js Function = filterhook ================================================ FILE: testing/input/TestExtension/MailTransaction.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function test() { var data = JSON.parse(read()); var transaction = new critic.MailTransaction; var message = null; try { data.mails.forEach( function (data) { transaction.add(data); }); transaction.finish(); } catch (error) { message = error.message; } writeln(200); writeln("Content-Type: text/json"); writeln(); writeln(JSON.stringify({ status: "ok", message: message })); } ================================================ FILE: testing/input/TestExtension/Review.list.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function test() { var passed = []; var failed = []; function correct(test, fn) { try { var reviews = fn(); passed.push({ test: test, result: format("%d reviews", reviews.length) }); } catch (error) { failed.push({ test: test, message: error.message }); } } function incorrect(test, fn, expected) { try { fn(); failed.push({ test: test, message: "no exception thrown" }); } catch (error) { if (error.message != expected) failed.push({ test: test, message: format( "wrong error: expected=%r, actual=%r", expected, error.message) }); else passed.push({ test: test, result: "call failed as expected" }); } } /* For now we're only testing that the various calls produce correct database queries in so far that the database doesn't outright reject them. */ correct("no filtering", function () { return critic.Review.list(); }); correct("filter by repository (instance)", function () { return critic.Review.list({ repository: new critic.Repository("critic") }); }); correct("filter by repository (id)", function () { return critic.Review.list({ repository: 1 }); }); correct("filter by repository (name)", function () { return critic.Review.list({ repository: "critic" }); }); correct("filter by state (open)", function () { return critic.Review.list({ state: "open" }); }); correct("filter by state (closed)", function () { return critic.Review.list({ state: "closed" }); }); correct("filter by state (dropped)", function () { return critic.Review.list({ state: "dropped" }); }); correct("filter by owner (instance)", function () { return critic.Review.list({ owner: new critic.User("alice") }); }); correct("filter by owner (id)", function () { return critic.Review.list({ owner: 1 }); }); correct("filter by owner (name)", function () { return critic.Review.list({ owner: "alice" }); }); /* Only check 'id' and 'name' variants from now on; the 'instance' variant is really just an alternative way to specify the id. */ correct("filter by repository (id) and state", function () { return critic.Review.list({ repository: 1, state: "open" }); }); correct("filter by repository (name) and state", function () { return critic.Review.list({ repository: "critic", state: "open" }); }); correct("filter by repository (id) and owner (id)", function () { return critic.Review.list({ repository: 1, owner: 1 }); }); correct("filter by repository (name) and owner (id)", function () { return critic.Review.list({ repository: "critic", owner: 1 }); }); correct("filter by repository (id) and owner (name)", function () { return critic.Review.list({ repository: 1, owner: "alice" }); }); correct("filter by repository (name) and owner (name)", function () { return critic.Review.list({ repository: "critic", owner: "alice" }); }); correct("filter by state and owner (id)", function () { return critic.Review.list({ state: "open", owner: 1 }); }); correct("filter by state and owner (name)", function () { return critic.Review.list({ state: "open", owner: "alice" }); }); incorrect("filter by bogus state", function () { return critic.Review.list({ state: "bogus" }); }, "invalid argument: data.state=\"bogus\" not valid"); writeln(200); writeln("Content-Type: text/json"); writeln(); writeln(JSON.stringify({ status: "ok", passed: passed, failed: failed })); } ================================================ FILE: testing/input/TestExtension/echo.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function echo() { writeln(200); writeln("Content-Type: text/json"); writeln(); writeln(JSON.stringify({ "status": "ok", "arguments": [].slice.call(arguments, 0, 3), "headers": arguments[3], "stdin": read() })); } ================================================ FILE: testing/input/TestExtension/empty.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function empty() { writeln(200); writeln("Content-Type: text/plain"); writeln(); } ================================================ FILE: testing/input/TestExtension/error.compilation.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; /* Strict mode disallows duplicated parameter names. */ function wrong(x, x) { } function irrelevant() { writeln(200); writeln("Content-Type: text/plain"); writeln(); } ================================================ FILE: testing/input/TestExtension/error.runtime.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function test() { writeln(200); writeln("Content-Type: text/plain"); writeln(); writeln(new critic.User({ name: "nosuchuser" }).fullname); } ================================================ FILE: testing/input/TestExtension/evaluate.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function evaluate() { var data = JSON.parse(read()); writeln(200); writeln("Content-Type: text/json"); writeln(); try { var source = "(function () { " + data["source"] + " })"; var fn = eval(source); writeln(JSON.stringify({ "status": "ok", "result": fn() })); } catch (error) { writeln(JSON.stringify({ "status": "error", "source": source, "error": String(error) })); } } ================================================ FILE: testing/input/TestExtension/filterhook.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function filterhook(data, review, user, commits, files) { files.forEach( function (file) { if (file.path == "015-filterhook/include/explode") throw Error("Boom!"); }); var transaction = new critic.MailTransaction; transaction.add({ to: critic.User.current, subject: "filterhook.js::filterhook()", review: review, body: format("data: %r\n" + "review.id: %d\n" + "user.name: %s\n" + "commits: %r\n" + "files: %r\n", data, review.id, user.name, commits.map(function (commit) { return commit.message; }), files.map(function (file) { return file.path; }).sort()) }); transaction.finish(); } ================================================ FILE: testing/input/TestExtension/inject.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function inject() { writeln("script %r", format("data:text/javascript,var injected=%r;", [].slice.call(arguments))); } function showcommitShort() { writeln("script %r", format("data:text/javascript,var showcommitShort=%r;", [].slice.call(arguments))); } function showcommitLong() { writeln("script %r", format("data:text/javascript,var showcommitLong=%r;", [].slice.call(arguments))); } function injectCustom(path, query) { if (query && query.params.expr) { writeln("script %r", format("data:text/javascript,var injectedCustom=%r;", eval(query.params.expr))); } } ================================================ FILE: testing/input/TestExtension/nothandled.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function nothandled() { } ================================================ FILE: testing/input/TestExtension/processcommits.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function processcommits(review, changeset, commitset) { writeln("r/%d", review.id); writeln("%s..%s", changeset.parent.sha1.substring(0, 8), changeset.child.sha1.substring(0, 8)); writeln("%s", commitset.map(function (commit) { return commit.sha1.substring(0, 8); })); } ================================================ FILE: testing/input/TestExtension/resources/hello.world.js ================================================ window.onload = function () { alert("Hello world!"); }; ================================================ FILE: testing/input/TestExtension/resources/helloworld.css ================================================ h1 { color: lime } ================================================ FILE: testing/input/TestExtension/resources/helloworld.html ================================================ <!DOCTYPE html> <link rel=stylesheet href=helloworld.css> <script src=helloworld.js></script> <h1>Hello world!</h1> ================================================ FILE: testing/input/TestExtension/restrictions.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function checkDatabaseConnection() { try { new PostgreSQL.Connection({ dbname: "critic", user: "critic" }); return "no error"; } catch (error) { return error.message; } } function restrictions() { writeln(200); writeln("Content-Type: text/json"); writeln(); writeln("%r", { status: "ok", database_connection: checkDatabaseConnection() }); } ================================================ FILE: testing/input/TestExtension/version.js ================================================ /* -*- mode: js; indent-tabs-mode: nil -*- */ "use strict"; function version() { writeln(200); writeln("Content-Type: text/plain"); writeln(); write(IO.File.read("version.txt")); } ================================================ FILE: testing/input/customization/githook.py ================================================ import sys import json class Reject(Exception): pass def update(repository_path, ref_name, old_value, new_value): data = json.dumps({ "repository_path": repository_path, "ref_name": ref_name, "old_value": old_value, "new_value": new_value }) if ref_name == "refs/heads/reject-create" and old_value is None: raise Reject("REJECT:" + data) elif ref_name == "refs/heads/reject-delete" and new_value is None: raise Reject("REJECT:" + data) elif ref_name == "refs/heads/reject-update" \ and not (old_value is None or new_value is None): raise Reject("REJECT:" + data) else: sys.stdout.write("ACCEPT:" + data + "\n") ================================================ FILE: testing/input/customization/linktypes.py ================================================ import linkify class IssueLink(linkify.LinkType): def __init__(self): super(IssueLink, self).__init__("#[0-9]+") def linkify(self, word, context): return "https://issuetracker.example.com/showIssue?id=" + word[1:] IssueLink() ================================================ FILE: testing/input/empty.txt ================================================ ================================================ FILE: testing/input/service_log_filter.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import logging import re import json def level_value(level): return getattr(logging, level.upper()) filter_level = level_value(sys.argv[1]) logfile_paths = sys.argv[2:] def include_entry(entry_level): return filter_level <= level_value(entry_level) HEADER = r"\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d\d\d - *" RE_ENTRY = re.compile( "{header}(([A-Z]+) - .*?)(?=$|\n{header}[A-Z]+ - )".format(header=HEADER), re.DOTALL) data = {} for logfile_path in logfile_paths: with open(logfile_path) as logfile: log = logfile.read() if os.path.isfile(logfile_path + ".skip"): with open(logfile_path + ".skip") as logfile_skip: skip = int(logfile_skip.read()) else: skip = 0 entries = [] for index, match in enumerate(RE_ENTRY.finditer(log)): if index < skip: continue entry, entry_level = match.groups() if include_entry(entry_level): entries.append(entry) if entries: data[logfile_path] = entries skip = index + 1 with open(logfile_path + ".skip", "w") as logfile_skip: logfile_skip.write(str(skip)) if data: json.dump(data, sys.stdout) sys.exit(0) sys.exit(1) ================================================ FILE: testing/input/service_synchronization_helper.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import time pidfile_path, signal, timeout = sys.argv[1:] with open(pidfile_path) as pidfile: pid = int(pidfile.read().strip()) with open(pidfile_path + ".busy", "w"): pass os.kill(pid, int(signal)) deadline = time.time() + int(timeout) while os.path.isfile(pidfile_path + ".busy"): if time.time() > deadline: sys.exit(1) time.sleep(0.01) ================================================ FILE: testing/input/syntaxhighlight/example.cpp ================================================ /* This is a comment that spans multiple lines. */ /* This one does not. */ // Neither does this. /* Or this. */ #if !defined(FOO) # define FOO BAR // Comment # define FOO \ BAR \ FIE #endif int main(int argc, char** argv) { double x = float(5.5) + int(3); char* s = "this is a string"; char c = 'c'; // <= that's a character return x != 10; } ================================================ FILE: testing/local.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2014 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys import testing class Instance(testing.Instance): flags_on = ["local"] def has_flag(self, flag): return testing.has_flag("HEAD", flag) def run_unittest(self, args): PYTHONPATH = os.path.join(os.getcwd(), "src") argv = [sys.executable, "-u", "-m", "run_unittest"] + args return self.executeProcess(argv, cwd="src", log_stderr=False, env={ "PYTHONPATH": PYTHONPATH }) def filter_service_logs(self, level, service_names): # We have no services. pass ================================================ FILE: testing/mailbox.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import socket import threading import time import re import email import base64 import testing class MissingMail(testing.TestFailure): def __init__(self, criteria): super(MissingMail, self).__init__( "No mail matching %r received" % criteria) self.criteria = criteria class User(object): def __init__(self, name, address): self.name = name self.address = address class Mail(object): def __init__(self, return_path): self.return_path = return_path self.recipient = None self.headers = {} self.lines = [] def header(self, name, default=None): if name.lower() in self.headers: return self.headers[name.lower()][0]["value"] else: return default def all_headers(self): for header_name in sorted(self.headers.keys()): for header in self.headers[header_name]: yield (header["name"], header["value"]) def __str__(self): return "%s\n\n%s" % ("\n".join(("%s: %s" % header) for header in self.all_headers()), "\n".join(self.lines)) class EOF(Exception): pass class Quit(Exception): pass class Error(Exception): pass class ParseError(Error): def __init__(self, line): super(ParseError, self).__init__("line=%r" % line) self.line = line class Client(threading.Thread): def __init__(self, mailbox, client, debug_mails): super(Client, self).__init__() self.mailbox = mailbox self.credentials = mailbox.credentials self.client = client self.client.settimeout(None) self.debug_mails = debug_mails self.buffered = "" self.start() def sendline(self, string): self.client.sendall("%s\r\n" % string) def recvline(self): while "\r\n" not in self.buffered: data = self.client.recv(4096) if not data: raise EOF self.buffered += data line, self.buffered = self.buffered.split("\r\n", 1) return line def expectline(self, pattern): line = self.recvline() match = re.match(pattern, line, re.IGNORECASE) if not match: raise ParseError(line) return match.groups() def handshake(self): self.sendline("220 critic.example.org I'm the Critic Testing Framework") line = self.recvline() if re.match(r"helo\s+(\S+)$", line, re.IGNORECASE): if self.credentials: raise Error self.sendline("250 critic.example.org") elif re.match(r"ehlo\s+(\S+)$", line, re.IGNORECASE): if self.credentials: self.sendline("250-critic.example.org") self.sendline("250 AUTH LOGIN") line = self.recvline() match = re.match(r"auth\s+login(?:\s+(.+))?$", line, re.IGNORECASE) if not match: raise ParseError(line) (username_b64,) = match.groups() if not username_b64: self.sendline("334 %s" % base64.b64encode("Username:")) username_b64 = self.recvline() self.sendline("334 %s" % base64.b64encode("Password:")) password_b64 = self.recvline() try: username = base64.b64decode(username_b64) except TypeError: raise Error("Invalid base64: %r" % username_b64) try: password = base64.b64decode(password_b64) except TypeError: raise Error("Invalid base64: %r" % password_b64) if username != self.credentials["username"] \ or password != self.credentials["password"]: raise Error("Wrong credentials: %r / %r" % (username, password)) self.sendline("235 Welcome, %s!" % username) testing.logger.debug("Mailbox: Client authenticated.") else: self.sendline("250 critic.example.org") else: raise Error def receive(self): try: (return_path,) = self.expectline(r"mail\s+from:<([^>]+)>(?:\s+size=\d+)?$") except ParseError as error: if error.line.lower() == "quit": self.sendline("221 critic.example.org Bye, bye") raise Quit raise self.sendline("250 OK") mail = Mail(return_path) # For simplicity we only support a single recipient. Critic (currently) # never sends mails with multiple recipients. (It often sends identical # mails to multiple recipients, but on the SMTP level, they are multiple # single-recipient mails.) (mail.recipient,) = self.expectline(r"rcpt\s+to:<([^>]+)>$") testing.logger.debug("Mailbox: Mail to <%s>." % mail.recipient) self.sendline("250 OK") self.expectline("data") self.sendline("354 Right") message_source = "" while True: line = self.recvline() if line == ".": break message_source += line + "\r\n" message = email.message_from_string(message_source) for name in message.keys(): headers = mail.headers.setdefault(name.lower(), []) for value in message.get_all(name): value = re.sub("\r\n[ \t]+", " ", value) headers.append({ "name": name, "value": value }) mail.lines = message.get_payload(decode=True).splitlines() testing.logger.debug("Received mail to: <%s> \"%s\"" % (mail.recipient, mail.header("Subject"))) if self.debug_mails: source = "--------------------------------------------------\n" for name, value in message.items(): source += "%s: %s\n" % (name, value) source += "\n" for line in mail.lines: source += line + "\n" source += "--------------------------------------------------" testing.logger.debug(source) self.mailbox.add(mail) self.sendline("250 OK") def run(self): try: testing.logger.debug("Mailbox: Client connected.") self.handshake() testing.logger.debug("Mailbox: Client ready.") while True: self.receive() except Error as error: testing.logger.error("Mailbox: Client error: %s" % error.message) except Quit: testing.logger.debug("Mailbox: Client quit.") except EOF: testing.logger.debug("Mailbox: Client disconnected prematurely.") except Exception: testing.logger.exception("Mailbox: Client error!") self.close() def close(self): try: self.client.close() except socket.error: pass class Listener(threading.Thread): def __init__(self, mailbox, debug_mails): super(Listener, self).__init__() self.daemon = True self.mailbox = mailbox self.debug_mails = debug_mails self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.settimeout(0.1) self.socket.bind(("", 0)) self.socket.listen(1) self.stopped = False self.start() def run(self): while not self.stopped: try: client, _ = self.socket.accept() except socket.timeout: pass else: Client(self.mailbox, client, self.debug_mails) def stop(self): self.stopped = True class Mailbox(object): def __init__(self, instance, credentials=None, debug_mails=False): self.instance = instance self.credentials = credentials self.queued = [] self.errors = [] self.condition = threading.Condition() self.listener = Listener(self, debug_mails) def add(self, mail): with self.condition: self.queued.append(mail) self.condition.notify() def pop(self, accept=None): def is_accepted(mail): if accept is None: return True if callable(accept): return accept(mail) for fn in accept: if not fn(mail): return False return True def find_mail(): with self.condition: for mail in self.queued: if is_accepted(mail): self.queued.remove(mail) return mail mail = find_mail() if mail: return mail if accept is not None and self.instance: # Wait until the instance's mail delivery service is idle, which # means it has delivered all pending mail. After that, the mail # should be here, or it never will be. self.instance.synchronize_service("maildelivery") mail = find_mail() if mail: return mail raise MissingMail(accept) def reset(self): with self.condition: self.queued = [] def pop_error(self): with self.condition: return self.errors.pop(0) def stop(self): self.listener.stop() def check_empty(self): try: while True: unexpected = self.pop() testing.logger.error("Unexpected mail to <%s>:\n%s" % (unexpected.recipient, unexpected)) except MissingMail: pass @property def port(self): return self.listener.socket.getsockname()[1] def __enter__(self): return self def __exit__(self, *args): self.stop() return False class WithSubject(object): def __init__(self, value): self.regexp = re.compile(value) def __call__(self, mail): return self.regexp.match(mail.header("Subject")) is not None def __repr__(self): return "subject=%r" % self.regexp.pattern class ToRecipient(object): def __init__(self, address): self.address = address def __call__(self, mail): return mail.recipient == self.address def __repr__(self): return "recipient=<%s>" % self.address ================================================ FILE: testing/main.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import argparse import logging import re import subprocess import traceback import time import datetime import testing class Counters: def __init__(self): self.tests_run = 0 self.tests_failed = 0 self.errors_logged = 0 self.warnings_logged = 0 counters = Counters() logger = None class TestingAborted(Exception): pass def run(): global logger parser = argparse.ArgumentParser(description="Critic testing framework") parser.add_argument("--debug", action="store_true", help="Enable DEBUG level logging") parser.add_argument("--debug-mails", action="store_true", help="Log every mail sent by the tested system") parser.add_argument("--quiet", action="store_true", help="Disable INFO level logging") parser.add_argument("--quickstart", action="store_true", help="Test against a quick-start instance") parser.add_argument("--coverage", action="store_true", help="Enable coverage measurement mode") parser.add_argument("--commit", help="Commit (symbolic ref or SHA-1) to test [default=HEAD]") parser.add_argument("--upgrade-from", help="Commit (symbolic ref or SHA-1) to install first and upgrade from") parser.add_argument("--strict-fs-permissions", action="store_true", help="Set strict file-system permissions in guest OS") parser.add_argument("--test-extensions", action="store_true", help="Test extensions") parser.add_argument("--local", action="store_true", help="Run local standalone tests only") parser.add_argument("--vbox-host", default="host", help="Host that's running VirtualBox [default=host]") parser.add_argument("--vm-identifier", help="VirtualBox instance name or UUID") parser.add_argument("--vm-hostname", help="VirtualBox instance hostname [default=VM_IDENTIFIER") parser.add_argument("--vm-snapshot", default="clean", help="VirtualBox snapshot (name or UUID) to restore [default=clean]") parser.add_argument("--vm-ssh-port", type=int, default=22, help="VirtualBox instance SSH port [default=22]") parser.add_argument("--vm-http-port", type=int, default=80, help="VirtualBox instance HTTP port [default=80]") parser.add_argument("--vm-web-server", choices=("apache", "nginx+uwsgi", "uwsgi"), help="Web server to tell Critic to install and configure") parser.add_argument("--git-daemon-port", type=int, help="Port to tell 'git daemon' to bind to") parser.add_argument("--cache-dir", default="testing/cache", help="Directory where cache files are stored") parser.add_argument("--upgrade-after", help="Upgrade after specified test") parser.add_argument("--pause-before", action="append", help="Pause testing before specified test(s)") parser.add_argument("--pause-after", action="append", help="Pause testing before specified test(s)") parser.add_argument("--pause-on-failure", action="store_true", help="Pause testing after each failed test") parser.add_argument("--pause-upgrade-loop", action="store_true", help="Support upgrading the tested system while paused") parser.add_argument("--pause-upgrade-retry", action="store_true", help=("Support upgrading the tested system while paused " "after a failed test, and retrying the failed test")) parser.add_argument("--pause-upgrade-hook", action="append", help="Command to run (locally) before upgrading") parser.add_argument("test", nargs="*", help="Specific tests to run [default=all]") arguments = parser.parse_args() class CountingLogger(object): def __init__(self, real, counters): self.real = real self.counters = counters def log(self, level, message): if level == logging.ERROR: self.counters.errors_logged += 1 elif level == logging.WARNING: self.counters.warnings_logged += 1 for line in message.splitlines() or [""]: self.real.log(level, line) def debug(self, message): self.log(logging.DEBUG, message) def info(self, message): self.log(logging.INFO, message) def warning(self, message): self.log(logging.WARNING, message) def error(self, message): self.log(logging.ERROR, message) def exception(self, message): self.log(logging.ERROR, message + "\n" + traceback.format_exc()) logger = testing.configureLogging( arguments, wrap=lambda logger: CountingLogger(logger, counters)) logger.info("""\ Critic Testing Framework ======================== """) key_arguments = [arguments.local, arguments.quickstart, arguments.vm_identifier] if len(filter(None, key_arguments)) != 1: logger.error("Must specify exactly one of --local, --quickstart and " "--vm-identifier!") return if arguments.local or arguments.quickstart: incompatible_arguments = [] # This is not a complete list; just those that are most significantly # incompatible or irrelevant with --local/--quickstart. if arguments.commit: incompatible_arguments.append("--commit") if arguments.upgrade_from: incompatible_arguments.append("--upgrade-from") if arguments.coverage: incompatible_arguments.append("--coverage") if arguments.test_extensions: incompatible_arguments.append("--strict-fs-permissions") if arguments.test_extensions: incompatible_arguments.append("--test-extensions") if arguments.vm_identifier: incompatible_arguments.append("--vm-identifier") if incompatible_arguments: logger.error("These arguments can't be combined with " "--local/--quickstart:\n " + "\n ".join(incompatible_arguments)) return import_errors = False try: import requests except ImportError: logger.error("Failed to import 'requests'!") import_errors = True try: import BeautifulSoup except ImportError: logger.error("Failed to import 'BeautifulSoup'!") import_errors = True git_version = subprocess.check_output(["git", "--version"]).strip() m = re.search("(\d+)\.(\d+)\.(\d+)(?:[^\d]+|$)", git_version) if not m: logger.warning("Failed to parse host-side git version number: '%s'" % git_version) else: version_tuple = tuple(map(int, m.groups())) if version_tuple >= (1, 8, 5): logger.debug("Using Git version %s on host." % git_version) else: logger.error("Git version on host machine must be version 1.8.5 or above (detected version %s)." % git_version) logger.error("Earlier Git versions crashed with SIGBUS causing test suite flakiness.") import_errors = True if import_errors: logger.error("Required software missing; see testing/USAGE.md for details.") return if arguments.test_extensions: # Check that the v8-jsshell submodule is checked out if extension # testing was requested. output = subprocess.check_output(["git", "submodule", "status", "installation/externals/v8-jsshell"]) if output.startswith("-"): logger.error("""\ The v8-jsshell submodule must be checked for extension testing. Please run git submodule update --init installation/externals/v8-jsshell first or run this script without --test-extensions.""") return if arguments.vm_identifier: # Note: we are not ignoring typical temporary editor files such as the # ".#<name>" files created by Emacs when a buffer has unsaved changes. # This is because unsaved changes in an editor is probably also # something you don't want to test with. locally_modified_paths = [] status_output = subprocess.check_output( ["git", "status", "--porcelain"]) for line in status_output.splitlines(): locally_modified_paths.extend(line[3:].split(" -> ")) tests_modified = [] input_modified = [] other_modified = [] for path in locally_modified_paths: if path.startswith("testing/input/"): input_modified.append(path) elif path.startswith("testing/"): tests_modified.append(path) else: other_modified.append(path) if input_modified: logger.error("Test input files locally modified:\n " + "\n ".join(input_modified)) if other_modified: logger.error("Critic files locally modified:\n " + "\n ".join(other_modified)) if input_modified or other_modified: logger.error("Please commit or stash local modifications before " "running tests.") return if tests_modified: logger.warning("Running tests using locally modified files:\n " + "\n ".join(tests_modified)) tested_commit = subprocess.check_output( ["git", "rev-parse", "--verify", arguments.commit or "HEAD"]).strip() if arguments.upgrade_from: install_commit = subprocess.check_output( ["git", "rev-parse", "--verify", arguments.upgrade_from]).strip() upgrade_commit = tested_commit else: install_commit = tested_commit upgrade_commit = None install_commit_description = subprocess.check_output( ["git", "log", "--oneline", "-1", install_commit]).strip() if upgrade_commit: upgrade_commit_description = subprocess.check_output( ["git", "log", "--oneline", "-1", upgrade_commit]).strip() else: upgrade_commit_description = None flags_on = set() flags_off = set() try: if arguments.local: frontend = None instance = testing.local.Instance() else: frontend = testing.frontend.Frontend( hostname=arguments.vm_hostname or arguments.vm_identifier, http_port=arguments.vm_http_port) if arguments.quickstart: instance = testing.quickstart.Instance( frontend=frontend) else: instance = testing.virtualbox.Instance( arguments, install_commit=(install_commit, install_commit_description), upgrade_commit=(upgrade_commit, upgrade_commit_description), frontend=frontend) frontend.instance = instance except testing.Error as error: logger.error(error.message) return if not arguments.test_extensions: flags_off.add("extensions") flags_on.update(instance.flags_on) flags_off.update(instance.flags_off) tests, dependencies = testing.findtests.selectTests( arguments.test, strict=False, flags_on=flags_on, flags_off=flags_off) if not tests: logger.error("No tests selected!") return if arguments.upgrade_after: upgrade_after = testing.findtests.filterPatterns([arguments.upgrade_after]) upgrade_after_tests, _ = testing.findtests.selectTests(upgrade_after, strict=True) upgrade_after_tests = set(upgrade_after_tests) upgrade_after_groups = set(upgrade_after) def maybe_upgrade_after(test): def do_upgrade(what): logger.info("Upgrading after: %s" % what) instance.upgrade(is_after_test=True) if test in upgrade_after_tests: do_upgrade(test) else: for group in test.groups: if group in upgrade_after_groups \ and test == all_groups[group][-1]: do_upgrade(group) break else: def maybe_upgrade_after(test): pass def pause(failed_test=None): if arguments.pause_upgrade_loop \ or (failed_test and arguments.pause_upgrade_retry): print "Testing paused." while True: if failed_test and arguments.pause_upgrade_retry: testing.pause("Press ENTER to upgrade (to HEAD) and " "retry %s, CTRL-c to stop: " % os.path.basename(failed_test)) else: testing.pause("Press ENTER to upgrade (to HEAD), " "CTRL-c to stop: ") if arguments.pause_upgrade_hook: for command in arguments.pause_upgrade_hook: subprocess.check_call(command, shell=True) if arguments.quickstart: instance.restart() elif not arguments.local: repository.push("HEAD") instance.execute(["git", "fetch", "origin", "master"], cwd="critic") instance.upgrade_commit = "FETCH_HEAD" instance.upgrade() if failed_test and arguments.pause_upgrade_retry: return "retry" else: testing.pause("Testing paused. Press ENTER to continue: ") if arguments.pause_before: pause_before = testing.findtests.filterPatterns(arguments.pause_before) pause_before_tests, _ = testing.findtests.selectTests(pause_before, strict=True) pause_before_tests = set(pause_before_tests) pause_before_groups = set(pause_before) def maybe_pause_before(test): def do_pause(what): logger.info("Pausing before: %s" % what) pause() if test in pause_before_tests: do_pause(test) else: for group in test.groups: if group in pause_before_groups \ and test == all_groups[group][0]: do_pause(group) break else: def maybe_pause_before(test): pass if arguments.pause_after: pause_after = testing.findtests.filterPatterns(arguments.pause_after) pause_after_tests, _ = testing.findtests.selectTests(pause_after, strict=True) pause_after_tests = set(pause_after_tests) pause_after_groups = set(pause_after) def maybe_pause_after(test): def do_pause(what): logger.info("Pausing after: %s" % what) pause() if test in pause_after_tests: do_pause(test) else: for group in test.groups: if group in pause_after_groups \ and test == all_groups[group][-1]: do_pause(group) break else: def maybe_pause_after(test): pass root_groups = {} all_groups = {} for test in tests: for group in test.groups: all_groups.setdefault(group, []).append(test) root_groups.setdefault(test.groups[0], []).append(test) failed_tests = set() def run_test(test, scope): prefix = "testing/tests" def run_file(filename): try: execfile(os.path.join(prefix, filename), scope) except testing.Error: raise except Exception as error: logger.exception("Unexpected exception!") raise testing.TestFailure path = "" for component in test.filename.split("/")[:-1]: path = os.path.join(path, component) init_filename = os.path.join(path, "__init__.py") if os.path.isfile(os.path.join(prefix, init_filename)): logger.debug("Including: %s" % init_filename) run_file(init_filename) run_file(test.filename) def run_group(group_name, tests): scope = { "testing": testing, "logger": logger, "instance": instance } if not arguments.local: scope.update({ "frontend": frontend, "repository": repository, "mailbox": mailbox }) try: for test in tests: if test.dependencies & failed_tests: logger.info("Skipping %s (failed dependency)" % test) continue maybe_pause_before(test) if test in dependencies: logger.info("Running: %s (dependency)" % test) else: logger.info("Running: %s" % test) counters.tests_run += 1 while True: try: errors_before = counters.errors_logged run_test(test, scope.copy()) if mailbox: mailbox.check_empty() instance.check_service_logs() if errors_before < counters.errors_logged: raise testing.TestFailure except testing.Error as error: counters.tests_failed += 1 failed_tests.add(test) if not isinstance(error, testing.TestFailure): raise if error.message: logger.error(error.message) if mailbox: try: while True: mail = mailbox.pop( accept=testing.mailbox.ToRecipient( "system@example.org")) logger.error("System message: %s\n %s" % (mail.header("Subject"), "\n ".join(mail.lines))) except testing.mailbox.MissingMail: pass instance.check_service_logs() if arguments.pause_on_failure \ or arguments.pause_upgrade_retry: if pause(test.filename) == "retry": # Re-run test due to --pause-upgrade-retry. continue except testing.NotSupported as not_supported: failed_tests.add(test) logger.info("Test not supported: %s" % not_supported.message) else: maybe_upgrade_after(test) maybe_pause_after(test) break except KeyboardInterrupt: raise TestingAborted except testing.Error as error: if error.message: logger.exception(error.message) if arguments.pause_on_failure: pause() return False except Exception: logger.exception("Unexpected exception!") if arguments.pause_on_failure: pause() return False else: return True for group_name in sorted(root_groups.keys()): if arguments.local: repository = None mailbox = None if not run_group(group_name, all_groups[group_name]): return False else: repository = testing.repository.Repository( "localhost" if arguments.quickstart else arguments.vbox_host, arguments.git_daemon_port, tested_commit, instance) mailbox = testing.mailbox.Mailbox(instance, { "username": "smtp_username", "password": "SmTp_PaSsWoRd" }, arguments.debug_mails) with repository: with mailbox: if not repository.export(): return False with instance: instance.mailbox = mailbox testing.utils.instance = instance testing.utils.frontend = frontend if not run_group(group_name, all_groups[group_name]): return False instance.finish() mailbox.instance = None mailbox.check_empty() return True def main(): start_time = time.time() try: run_failed = not run() if run_failed: logger.error("Tests did not run as expected.") time_taken = str(datetime.timedelta(seconds=round(time.time() - start_time))) logger.info(""" Test summary ============ Tests run: %9d Tests failed: %9d Errors logged: %9d Warnings logged: %9d Time taken: %9s """ % (counters.tests_run, counters.tests_failed, counters.errors_logged, counters.warnings_logged, time_taken)) if run_failed or counters.tests_failed or counters.errors_logged: sys.exit(1) except TestingAborted: logger.error("Testing aborted.") sys.exit(1) if __name__ == "__main__": main() ================================================ FILE: testing/password-invalid ================================================ #!/bin/sh echo -n invalid ================================================ FILE: testing/password-testing ================================================ #!/bin/sh echo -n testing ================================================ FILE: testing/quickstart.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import subprocess import signal import threading import time import json import testing class RepositoryURL(object): def __init__(self, path, name): self.path = path self.name = name class Instance(testing.Instance): flags_off = ["full", "postgresql", "extensions", "upgrade", "uninstall"] install_commit = "HEAD" tested_commit = "HEAD" def __init__(self, frontend): super(Instance, self).__init__() self.frontend = frontend self.mailbox = None self.process = None self.hostname = "localhost" self.registeruser("admin") def __enter__(self): return self def __exit__(self, *args): if self.process: self.stop() return False @property def etc_dir(self): return os.path.join(self.state_dir, "etc") def start(self): pass def stop(self): testing.logger.debug("Stopping ...") self.process.send_signal(signal.SIGINT) self.process.wait() self.process = None testing.logger.debug("Stopped") def execute(self, *args, **kwargs): raise testing.NotSupported("quick-started instance doesn't support execute()") def criticctl(self, argv): for index, arg in enumerate(argv): if arg[0] == "'" == arg[-1]: argv[index] = arg[1:-1] argv = [os.path.join(self.state_dir, "bin", "criticctl")] + argv testing.logger.debug("Running: %s" % " ".join(argv)) process = subprocess.Popen( argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() for line in stdout.splitlines(): testing.logger.log(testing.STDOUT, line) for line in stderr.splitlines(): testing.logger.log(testing.STDERR, line) if process.returncode == 0: return stdout else: raise testing.CriticctlError(" ".join(argv), stdout, stderr) def adduser(self, name, email=None, fullname=None, password=None): if email is None: email = "%s@example.org" % name if fullname is None: fullname = "%s von Testing" % name.capitalize() if password is None: password = "testing" self.criticctl(["adduser", "--name", name, "--email", email, "--fullname", fullname, "--password", password]) self.registeruser(name) def has_flag(self, flag): return testing.has_flag("HEAD", flag) def repository_path(self, repository="critic"): return os.path.join(self.state_dir, "git/%s.git" % repository) def repository_url(self, name=None, repository="critic"): path = self.repository_path(repository) if name is None: return path return RepositoryURL(path, name) def install(self, repository, override_arguments={}, other_cwd=False, quick=False, interactive=False): argv = [sys.executable, "-u", "quickstart.py", "--testing", "--admin-username", "admin", "--admin-fullname", "Testing Administrator", "--admin-email", "admin@example.org", "--admin-password", "testing", "--system-recipient", "system@example.org", "--http-port", "0", # Use a random port. "--smtp-port", str(self.mailbox.port), "--smtp-username", self.mailbox.credentials["username"], "--smtp-password", self.mailbox.credentials["password"]] testing.logger.debug("Running: %s" % " ".join(argv)) self.process = subprocess.Popen( argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE) def consume(stream, loglevel): try: while True: line = stream.readline() if not line: break testing.logger.log(loglevel, line.rstrip()) except IOError: pass stderr_thread = threading.Thread( target=consume, args=(self.process.stderr, testing.STDERR)) stderr_thread.daemon = True stderr_thread.start() line = self.process.stdout.readline().strip() key, _, value = line.partition("=") if key != "STATE": raise testing.InstanceError("Unexpected output: %r" % line) self.state_dir = value testing.logger.debug("State directory: %s" % value) line = self.process.stdout.readline().strip() key, _, value = line.partition("=") if key != "HTTP": raise testing.InstanceError("Unexpected output: %r" % line) hostname, _, port = value.partition(":") self.frontend.hostname = hostname self.frontend.http_port = int(port) testing.logger.debug("HTTP address: %s:%s" % (hostname, port)) line = self.process.stdout.readline().strip() if line != "STARTED": raise testing.InstanceError("Unexpected output: %r" % line) # Add some regular users. for name in ("alice", "bob", "dave", "erin"): self.adduser(name) self.adduser("howard") self.criticctl(["addrole", "--name", "howard", "--role", "newswriter"]) try: self.frontend.run_basic_tests() self.mailbox.check_empty() except testing.TestFailure as error: if error.message: testing.logger.error("Basic test: %s" % error.message) # If basic tests fail, there's no reason to further test this # instance; it seems to be properly broken. raise testing.InstanceError testing.logger.info("Quick-started Critic in %s (%d)" % (self.state_dir, self.frontend.http_port)) def check_upgrade(self): raise testing.NotSupported("quick-started instance can't be upgraded") def upgrade(self, override_arguments={}, other_cwd=False, quick=False, interactive=False): pass def check_extend(self, repository, pre_upgrade=False): raise testing.NotSupported("quick-started instance doesn't support extensions") def extend(self, repository): self.check_extend(repository) def uninstall(self): raise testing.NotSupported("quick-started instance can't be uninstalled") def finish(self): pass def run_unittest(self, args): PYTHONPATH = ":".join([os.path.join(self.state_dir, "etc/main"), os.path.join(os.getcwd(), "src"), os.getcwd()]) argv = [sys.executable, "-u", "-m", "run_unittest"] + args return self.executeProcess(argv, cwd="src", log_stderr=False, env={ "PYTHONPATH": PYTHONPATH }) def gc(self, repository): self.executeProcess( ["git", "gc", "--prune=now"], cwd=os.path.join(self.state_dir, "git", repository)) def synchronize_service(self, service_name, force_maintenance=False, timeout=30): helper = "testing/input/service_synchronization_helper.py" testing.logger.debug("Synchronizing service: %s" % service_name) pidfile_path = os.path.join( self.state_dir, "run/main", service_name + ".pid") if force_maintenance: signum = signal.SIGUSR2 else: signum = signal.SIGUSR1 before = time.time() self.executeProcess( ["python", helper, pidfile_path, str(signum), str(timeout)]) after = time.time() testing.logger.debug("Synchronized service: %s in %.2f seconds" % (service_name, after - before)) def filter_service_logs(self, level, service_names): helper = "testing/input/service_log_filter.py" logfile_paths = { os.path.join( self.state_dir, "log/main", service_name + ".log"): service_name for service_name in service_names } try: data = json.loads(self.executeProcess( ["python", helper, level] + logfile_paths.keys(), log_stdout=False)) return { logfile_paths[logfile_path]: entries for logfile_path, entries in sorted(data.items()) } except testing.CommandError: return None def restart(self): self.process.send_signal(signal.SIGUSR1) line = self.process.stdout.readline().strip() if line != "RESTARTED": raise testing.InstanceError("Unexpected output: %r" % line) ================================================ FILE: testing/repository.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import time import tempfile import shutil import subprocess import re import testing class GitCommandError(testing.TestFailure): def __init__(self, command, output): super(GitCommandError, self).__init__( "GitCommandError: %s\nOutput:\n %s" % (command, "\n ".join(output.strip().splitlines()))) self.command = command self.output = output def _git(args, **kwargs): argv = ["git"] + args if "cwd" in kwargs: cwd = " (in %s)" % kwargs["cwd"] else: cwd = "" testing.logger.debug("Running: %s%s" % (" ".join(argv), cwd)) env = os.environ.copy() for name, value in kwargs.get("env", {}).items(): if value is None: if name in env: del env[name] else: env[name] = value env.setdefault("GIT_AUTHOR_NAME", "Critic Tester") env.setdefault("GIT_COMMITTER_NAME", "Critic Tester") env.setdefault("GIT_AUTHOR_EMAIL", "tester@example.org") env.setdefault("GIT_COMMITTER_EMAIL", "tester@example.org") kwargs["env"] = env try: return subprocess.check_output( argv, stdin=open("/dev/null"), stderr=subprocess.STDOUT, **kwargs) except subprocess.CalledProcessError as error: raise GitCommandError(" ".join(argv), error.output) def submodule_sha1(repository_path, parent_sha1, submodule_path): try: lstree = _git(["ls-tree", parent_sha1, submodule_path], cwd=repository_path) except GitCommandError: # Sub-module doesn't exist? Will probably fail later, but doesn't need # to fail here. return None mode, object_type, sha1, path = lstree.strip().split(None, 3) if object_type != "commit": # Odd. The repository doesn't look at all like we expect. return None return sha1 class Repository(object): def __init__(self, host, port, tested_commit, instance): self.host = host self.port = port self.instance = instance self.base_path = tempfile.mkdtemp() self.path = os.path.join(self.base_path, "critic.git") self.work = os.path.join(self.base_path, "work") if port: self.url = "git://%s:%d/critic.git" % (host, port) else: self.url = "git://%s/critic.git" % host testing.logger.debug("Creating temporary repositories in: %s" % self.base_path) _git(["clone", "--bare", os.getcwd(), "critic.git"], cwd=self.base_path) _git(["config", "receive.denyDeletes", "false"], cwd=self.path) _git(["config", "receive.denyNonFastforwards", "false"], cwd=self.path) self.push(tested_commit) self.v8_jsshell_path = None self.v8_path = None self.v8_url = None if instance.test_extensions: if os.path.exists("installation/externals/v8-jsshell/.git"): v8_jsshell_path = os.path.join(os.getcwd(), "installation/externals/v8-jsshell") _git(["clone", "--bare", v8_jsshell_path, "v8-jsshell.git"], cwd=self.base_path) self.v8_jsshell_path = os.path.join(self.base_path, "v8-jsshell.git") v8_jsshell_sha1 = submodule_sha1(os.getcwd(), tested_commit, "installation/externals/v8-jsshell") if v8_jsshell_sha1: _git(["push", "--quiet", "--force", self.v8_jsshell_path, v8_jsshell_sha1 + ":refs/heads/master"], cwd=v8_jsshell_path) else: v8_jsshell_sha1 = None if os.path.exists("installation/externals/v8-jsshell/v8/.git"): v8_path = os.path.join(os.getcwd(), "installation/externals/v8-jsshell/v8") _git(["clone", "--bare", v8_path, "v8/v8.git"], cwd=self.base_path) self.v8_path = os.path.join(self.base_path, "v8/v8.git") if port: self.v8_url = "git://%s:%d/v8/v8.git" % (host, port) else: self.v8_url = "git://%s/v8/v8.git" % host if v8_jsshell_sha1: v8_sha1 = submodule_sha1("installation/externals/v8-jsshell", v8_jsshell_sha1, "v8") if v8_sha1: _git(["push", "--quiet", "--force", self.v8_path, v8_sha1 + ":refs/heads/master"], cwd=v8_path) def push(self, commit): _git(["push", "--quiet", "--force", self.path, "%s:refs/heads/master" % commit]) def export(self): argv = ["git", "daemon", "--reuseaddr", "--export-all", "--base-path=%s" % self.base_path] if self.port: argv.append("--port=%d" % self.port) argv.append(self.path) if self.v8_jsshell_path: argv.append(self.v8_jsshell_path) if self.v8_path: argv.append(self.v8_path) self.daemon = subprocess.Popen(argv) time.sleep(1) pid, status = os.waitpid(self.daemon.pid, os.WNOHANG) if pid != 0: self.daemon = None testing.logger.error("Failed to export repository!") return False testing.logger.debug("Exported repository: %s" % self.path) if self.v8_jsshell_path: testing.logger.debug("Exported repository: %s" % self.v8_jsshell_path) if self.v8_path: testing.logger.debug("Exported repository: %s" % self.v8_path) return True def run(self, args, cwd=None, env=None): if cwd is None: cwd = self.path if env is None: env = {} if isinstance(self.instance, testing.quickstart.Instance): for index, arg in enumerate(args[1:]): if isinstance(arg, testing.quickstart.RepositoryURL): args[index + 1] = arg.path env["REMOTE_USER"] = arg.name return _git(args, cwd=cwd, env=env) def workcopy(self, name="critic", empty=False): master = self class Workcopy(testing.Context): def __init__(self, path, start, finish): super(Workcopy, self).__init__(start, finish) self.path = path def run(self, args, **kwargs): if kwargs: env = {} for name in kwargs.keys(): if name.lower() != name == name.upper(): env[name] = kwargs[name] del kwargs[name] else: env = None return master.run(args, cwd=self.path, env=env, **kwargs) path = os.path.join(self.work, name) if os.path.exists(path): raise testing.InstanceError( "Can't create work copy; path already exists!") def start(): if not os.path.isdir(self.work): os.mkdir(self.work) if not empty: _git(["clone", self.path, name], cwd=self.work) else: os.mkdir(path) _git(["init"], cwd=path) def finish(): shutil.rmtree(path) return Workcopy(path, start, finish) def __enter__(self): return self def __exit__(self, *args): try: if self.daemon: self.daemon.terminate() self.daemon.wait() except: testing.logger.exception("Repository clean-up failed!") try: shutil.rmtree(self.base_path) except: testing.logger.exception("Repository clean-up failed!") return False ================================================ FILE: testing/tests/001-main/000-install.py ================================================ # Start instance and install (and upgrade, optionally) Critic with the default # arguments. instance.start() instance.install(repository) instance.upgrade() ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/001-dashboard.py ================================================ frontend.page("dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard"), "message_title": testing.expect.message_title(u"No reviews!"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/002-branches.py ================================================ frontend.page( "branches", expect={ "document_title": testing.expect.document_title(u"Branches"), "content_title": testing.expect.paleyellow_title(0, u"Branches"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/003-search.py ================================================ frontend.page( "search", expect={ "document_title": testing.expect.document_title(u"Review Search"), "content_title": testing.expect.paleyellow_title(0, u"Review Search"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/004-config.py ================================================ frontend.page( "config", expect={ "document_title": testing.expect.document_title(u"User preferences"), "content_title": testing.expect.paleyellow_title(0, u"User preferences"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/005-tutorial.py ================================================ frontend.page("tutorial", expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "request" }, expect={ "document_title": testing.expect.document_title(u"Requesting a Review"), "content_title": testing.expect.paleyellow_title(0, u"Requesting a Review"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "review" }, expect={ "document_title": testing.expect.document_title(u"Reviewing Changes"), "content_title": testing.expect.paleyellow_title(0, u"Reviewing Changes"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "filters" }, expect={ "document_title": testing.expect.document_title(u"Filters"), "content_title": testing.expect.paleyellow_title(0, u"Filters"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "archival" }, expect={ "document_title": testing.expect.document_title(u"Review branch archival"), "content_title": testing.expect.paleyellow_title(0, u"Review branch archival"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "viewer" }, expect={ "document_title": testing.expect.document_title(u"Repository Viewer"), "content_title": testing.expect.paleyellow_title(0, u"Repository Viewer"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "reconfigure" }, expect={ "document_title": testing.expect.document_title(u"Reconfiguring Critic"), "content_title": testing.expect.paleyellow_title(0, u"Reconfiguring Critic"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "rebase" }, expect={ "document_title": testing.expect.document_title(u"Rebasing a Review"), "content_title": testing.expect.paleyellow_title(0, u"Rebasing a Review"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "administration" }, expect={ "document_title": testing.expect.document_title(u"System Administration"), "content_title": testing.expect.paleyellow_title(0, u"System Administration"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "customization" }, expect={ "document_title": testing.expect.document_title(u"System Customization"), "content_title": testing.expect.paleyellow_title(0, u"System Customization"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "search" }, expect={ "document_title": testing.expect.document_title(u"Review Quick Search"), "content_title": testing.expect.paleyellow_title(0, u"Review Quick Search"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) # Unknown items are ignored and the main Tutorials page is returned instead. frontend.page("tutorial", params={ "item": "nonexisting" }, expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/006-news.py ================================================ with_class = testing.expect.with_class extract_text = testing.expect.extract_text frontend.page( "news", expect={ "document_title": testing.expect.document_title(u"News"), "content_title": testing.expect.paleyellow_title(0, u"News"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) # Load all news items to make sure they are syntactically correct. # # There may not be any, and we can't easily test that the right set of news # items are listed, since this depends on whether we upgraded and from what. # But this testing is still somewhat meaningful. document = frontend.page("news", params={ "display": "all" }) items = document.findAll(attrs=with_class("item")) for item in items: item_id = item["critic-item-id"] item_title = extract_text(item.find(attrs=with_class("title"))) frontend.page( "news", params={ "item": item_id }, expect={ "document_title": testing.expect.document_title(item_title), "content_title": testing.expect.paleyellow_title(0, item_title), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/007-home.py ================================================ # Note: /home redirects to /login for anonymous users. frontend.page("home", expect={ "document_title": testing.expect.document_title(u"Sign in"), "content_title": testing.expect.paleyellow_title(0, u"Sign in"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/008-repositories.py ================================================ frontend.page("repositories", expect={ "document_title": testing.expect.document_title(u"Repositories"), "content_title": testing.expect.paleyellow_title(0, u"Repositories"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/009-services.py ================================================ frontend.page("services", expect={ "document_title": testing.expect.document_title(u"Services"), "content_title": testing.expect.paleyellow_title(0, u"Services"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/010-createreview.py ================================================ # Note: /createreview redirects to /login for anonymous users. frontend.page("createreview", expect={ "document_title": testing.expect.document_title(u"Sign in"), "content_title": testing.expect.paleyellow_title(0, u"Sign in"), "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/011-manageextensions.py ================================================ # Only available if extension support has been enabled. (The body of the # message is quite long, and particularly interesting to check here.) expected_message = testing.expect.message("Extension support not enabled", None) frontend.page( "manageextensions", expect={ "message": expected_message }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/012-statistics.py ================================================ # The /statistics page has no <title>, and has a pale yellow table that doesn't # have the 'paleyellow' class, and which has five (!) different main headings. # Its generation should be fixed, but for now, just skip testing the common page # elements that it's missing. frontend.page("statistics", expect={ "pageheader_links": testing.expect.pageheader_links("anonymous"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/013-static-resource.py ================================================ def check_user_constructor(document): expected = "<User constructor found>" if "\nfunction User(" in document: actual = expected else: actual = "<no User constructor found>" testing.expect.check(expected, actual) def check_jquery_foundation(document): expected = "<jQuery header found>" if "jQuery Foundation, Inc" in document: actual = expected else: actual = "<no jQuery header found>" testing.expect.check(expected, actual) # Test a basic regular file. frontend.page( "static-resource/basic.js", expected_content_type=("application/javascript", "text/javascript"), expect={ "user_constructor": check_user_constructor }) # Test jquery.js, which is a symlink to the current version. frontend.page( "static-resource/third-party/jquery.js", expected_content_type=("application/javascript", "text/javascript"), expect={ "jquery_foundation": check_jquery_foundation }) # Test a non-existing file. frontend.page( "static-resource/does-not-exist.js", expected_http_status=404) # Test that directory listing is not enabled. frontend.page( "static-resource/", expected_http_status=403) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/100-preferences/001-commit.diff.rulerColumn.py ================================================ # Check existence of preference commit.diff.rulerColumn, added by # # http://critic-review.org/r/57 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="commit.diff.rulerColumn:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_input(document): input = document.find("input", attrs={ "name": "commit.diff.rulerColumn" }) if not input: testing.expect.check("<preference input>", "<expected content not found>") testing.expect.check("number", input["type"]) testing.expect.check("0", input["value"]) testing.expect.check("0", input["critic-default"]) frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_input }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/100-preferences/002-review.defaultOptOut.py ================================================ # Check existence of preference review.defaultOptOut, added by # # http://critic-review.org/r/40 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="review.defaultOptOut:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_input(document): input = document.find("input", attrs={ "name": "review.defaultOptOut" }) if not input: testing.expect.check("<preference input>", "<expected content not found>") testing.expect.check("checkbox", input["type"]) testing.expect.check(False, input.has_key("checked")) testing.expect.check("false", input["critic-default"]) frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_input }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/100-preferences/003-timezone.py ================================================ # Check existence of preference review.defaultOptOut, added by # # http://critic-review.org/r/40 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="timezone:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_select(document): select = document.find("select", attrs={ "name": "timezone" }) if not select: testing.expect.check("<preference select>", "<expected content not found>") testing.expect.check('"Universal/UTC"', select["critic-default"]) option = select.find("option", attrs={ "selected": "selected" }) if not option: testing.expect.check("<pre-selected option>", "<expected content not found>") testing.expect.check("Universal/UTC", option["value"]) testing.expect.check("UTC (UTC / UTC+00:00)", option.string) frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_select }) ================================================ FILE: testing/tests/001-main/001-empty/001-anonymous/100-preferences/__init__.py ================================================ # Tests in this directory don't depend on anything but having installed Critic. # @dependency 001-main/000-install.py ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/001-dashboard.py ================================================ with frontend.signin(): frontend.page( "dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard"), "message_title": testing.expect.message_title(u"No reviews!"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/002-branches.py ================================================ with frontend.signin(): frontend.page( "branches", expect={ "document_title": testing.expect.document_title(u"Branches"), "content_title": testing.expect.paleyellow_title(0, u"Branches"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/003-search.py ================================================ with frontend.signin(): frontend.page( "search", expect={ "document_title": testing.expect.document_title(u"Review Search"), "content_title": testing.expect.paleyellow_title(0, u"Review Search"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/004-config.py ================================================ with frontend.signin(): frontend.page( "config", expect={ "document_title": testing.expect.document_title(u"User preferences"), "content_title": testing.expect.paleyellow_title(0, u"User preferences"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) frontend.page( "config", params={ "defaults": "yes" }, expect={ "document_title": testing.expect.document_title(u"User preferences"), "content_title": testing.expect.paleyellow_title(0, u"User preferences"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/005-tutorial.py ================================================ with frontend.signin(): frontend.page("tutorial", expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "request" }, expect={ "document_title": testing.expect.document_title(u"Requesting a Review"), "content_title": testing.expect.paleyellow_title(0, u"Requesting a Review"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "review" }, expect={ "document_title": testing.expect.document_title(u"Reviewing Changes"), "content_title": testing.expect.paleyellow_title(0, u"Reviewing Changes"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "filters" }, expect={ "document_title": testing.expect.document_title(u"Filters"), "content_title": testing.expect.paleyellow_title(0, u"Filters"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "archival" }, expect={ "document_title": testing.expect.document_title(u"Review branch archival"), "content_title": testing.expect.paleyellow_title(0, u"Review branch archival"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "viewer" }, expect={ "document_title": testing.expect.document_title(u"Repository Viewer"), "content_title": testing.expect.paleyellow_title(0, u"Repository Viewer"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "reconfigure" }, expect={ "document_title": testing.expect.document_title(u"Reconfiguring Critic"), "content_title": testing.expect.paleyellow_title(0, u"Reconfiguring Critic"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "rebase" }, expect={ "document_title": testing.expect.document_title(u"Rebasing a Review"), "content_title": testing.expect.paleyellow_title(0, u"Rebasing a Review"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "administration" }, expect={ "document_title": testing.expect.document_title(u"System Administration"), "content_title": testing.expect.paleyellow_title(0, u"System Administration"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "customization" }, expect={ "document_title": testing.expect.document_title(u"System Customization"), "content_title": testing.expect.paleyellow_title(0, u"System Customization"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "search" }, expect={ "document_title": testing.expect.document_title(u"Review Quick Search"), "content_title": testing.expect.paleyellow_title(0, u"Review Quick Search"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) # Unknown items are ignored and the main Tutorials page is returned instead. frontend.page("tutorial", params={ "item": "nonexisting" }, expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/006-news.py ================================================ with_class = testing.expect.with_class extract_text = testing.expect.extract_text with frontend.signin(): frontend.page( "news", expect={ "document_title": testing.expect.document_title(u"News"), "content_title": testing.expect.paleyellow_title(0, u"News"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) # Load all news items to make sure they are syntactically correct. # # There may not be any, and we can't easily test that the right # set of news items are listed, since this depends on whether we # upgraded and from what. But this testing is still somewhat # meaningful. document = frontend.page("news", params={ "display": "all" }) items = document.findAll(attrs=with_class("item")) for item in items: item_id = item["critic-item-id"] item_title = extract_text(item.find(attrs=with_class("title"))) frontend.page( "news", params={ "item": item_id }, expect={ "document_title": testing.expect.document_title(item_title), "content_title": testing.expect.paleyellow_title(0, item_title), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/007-home.py ================================================ with frontend.signin(): frontend.page( "home", expect={ "document_title": testing.expect.document_title(u"Testing Administrator's Home"), "content_title": testing.expect.paleyellow_title(0, u"Testing Administrator's Home"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) with frontend.signin("bob"): frontend.operation( "changepassword", data={ "current_pw": "testing", "new_pw": "gnitset" }) frontend.operation( "validatelogin", data={ "fields": { "username": "bob", "password": "testing" }}, expect={ "message": "Wrong password" }) with frontend.signin("bob", "gnitset"): pass with frontend.signin(): frontend.operation( "changepassword", data={ "subject": "bob", "new_pw": "testing" }) with frontend.signin("bob"): pass ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/008-repositories.py ================================================ with frontend.signin(): frontend.page( "repositories", expect={ "document_title": testing.expect.document_title(u"Repositories"), "content_title": testing.expect.paleyellow_title(0, u"Repositories"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/009-services.py ================================================ import time services = {} with_class = testing.expect.with_class extract_text = testing.expect.extract_text def check_services(services, restarted=frozenset()): if isinstance(restarted, basestring): restarted = frozenset([restarted]) def checker(document): expected = set(services.keys()) for service_tr in document.findAll("tr", attrs=with_class("service")): td_name = service_tr.find("td", attrs=with_class("name")) td_pid = service_tr.find("td", attrs=with_class("pid")) td_rss = service_tr.find("td", attrs=with_class("rss")) name = str(extract_text(td_name)) pid = extract_text(td_pid) rss = extract_text(td_rss) # These only start up fully when extensions are enabled. if name.startswith("extension"): continue try: pid = int(pid) except ValueError: if pid == "(not running)": testing.logger.error("Service %r is not running!" % name) else: testing.logger.error( "Service %r has unexpected PID value: %r" % (name, pid)) else: if rss == "N/A": testing.logger.error("Service %r is not running " "(and the PID value is stale...)!" % name) if name in restarted: if pid == services[name]: testing.logger.error( "Service %r not restarted as expected!" % name) elif name in services: testing.expect.check(services[name], pid, message="service unexpectedly restarted") if name in expected: expected.remove(name) services[name] = pid if expected: testing.logger.error("Service(s) have gone missing: %r" % ", ".join(expected)) return checker with frontend.signin(): services = {} frontend.page( "services", expect={ "document_title": testing.expect.document_title(u"Services"), "content_title": testing.expect.paleyellow_title(0, u"Services"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")), "services": check_services(services) }) all_services = set(["manager"]) for service_name in services.keys(): if service_name not in ("manager", "extensiontasks") \ and not service_name.startswith("wsgi:"): all_services.add(service_name) frontend.operation( "restartservice", data={ "service_name": service_name }) frontend.page( "services", expect={ "services": check_services(services, service_name) }) # Need to give the last service(s) restarted some time to actually start up; # otherwise they might receive their TERM signal before they register a # signal handler. time.sleep(0.5) frontend.operation( "restartservice", data={ "service_name": "manager" }) # Need to give the service manager some time to actually restart. Or rather # time to stop; once it has stopped, the /services page has code that waits # (up to 10 seconds) for it to start up again, should it not be up and # running already. time.sleep(0.5) frontend.page( "services", expect={ "services": check_services(services, all_services) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/010-createreview.py ================================================ with frontend.signin(): frontend.page( "createreview", expect={ "document_title": testing.expect.document_title(u"Create Review"), "content_title": testing.expect.paleyellow_title(0, u"Create Review"), "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_user(instance.user("admin")) }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/011-manageextensions.py ================================================ # Only available if extension support has been enabled. (The body of the # message is quite long, and particularly interesting to check here.) expected_message = testing.expect.message("Extension support not enabled", None) with frontend.signin(): frontend.page( "manageextensions", expect={ "message": expected_message }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/012-statistics.py ================================================ # The /statistics page has no <title>, and has a pale yellow table that doesn't # have the 'paleyellow' class, and which has five (!) different main headings. # Its generation should be fixed, but for now, just skip testing the common page # elements that it's missing. with frontend.signin(): frontend.page("statistics", expect={ "pageheader_links": testing.expect.pageheader_links("authenticated", "administrator"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/100-preferences/001-commit.diff.rulerColumn.py ================================================ # Check existence of preference commit.diff.rulerColumn, added by # # http://critic-review.org/r/57 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="commit.diff.rulerColumn:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_input(document): input = document.find("input", attrs={ "name": "commit.diff.rulerColumn" }) if not input: testing.expect.check("<preference input>", "<expected content not found>") testing.expect.check("number", input["type"]) testing.expect.check("0", input["value"]) testing.expect.check("0", input["critic-default"]) with frontend.signin(): frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_input }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/100-preferences/002-review.defaultOptOut.py ================================================ # Check existence of preference review.defaultOptOut, added by # # http://critic-review.org/r/40 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="review.defaultOptOut:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_input(document): input = document.find("input", attrs={ "name": "review.defaultOptOut" }) if not input: testing.expect.check("<preference input>", "<expected content not found>") testing.expect.check("checkbox", input["type"]) testing.expect.check(False, input.has_key("checked")) testing.expect.check("false", input["critic-default"]) with frontend.signin(): frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_input }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/100-preferences/003-timezone.py ================================================ # Check existence of preference review.defaultOptOut, added by # # http://critic-review.org/r/40 def check_heading(document): headings = document.findAll("td", attrs={ "class": "heading" }) for heading in headings: if heading.find(text="timezone:"): return testing.expect.check("<preference heading>", "<expected content not found>") def check_select(document): select = document.find("select", attrs={ "name": "timezone" }) if not select: testing.expect.check("<preference select>", "<expected content not found>") testing.expect.check('"Universal/UTC"', select["critic-default"]) option = select.find("option", attrs={ "selected": "selected" }) if not option: testing.expect.check("<pre-selected option>", "<expected content not found>") testing.expect.check("Universal/UTC", option["value"]) testing.expect.check("UTC (UTC / UTC+00:00)", option.string) with frontend.signin(): frontend.page("config", expect={ "preference_heading": check_heading, "preference_input": check_select }) ================================================ FILE: testing/tests/001-main/001-empty/002-authenticated/100-preferences/__init__.py ================================================ # Tests in this directory don't depend on anything but having installed Critic. # @dependency 001-main/000-install.py ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/001-basic.py ================================================ def expect_success(argv, expected_output_lines=[]): try: output = instance.criticctl(argv) except testing.CriticctlError as error: logger.error("'criticctl %s': correct criticctl usage failed:\n%s" % (" ".join(argv), error.stdout)) return [] else: output_lines = set(map(str.strip, output.splitlines())) for line in expected_output_lines: if line.strip() not in output_lines: logger.error("'%s': Expected output line not found:\n %r" % (" ".join(argv), line)) return output_lines def expect_failure(argv, expected_output_lines=[]): try: instance.criticctl(argv) except testing.CriticctlError as error: output_lines = set(map(str.strip, error.stderr.splitlines())) for line in expected_output_lines: if line.strip() not in output_lines: logger.error("'%s': Expected output line not found:\n %r" % (" ".join(argv), line)) return output_lines else: logger.error("'criticctl %s': incorrect criticctl usage did not fail" % " ".join(argv)) return [] try: instance.execute(["criticctl"]) except testing.NotSupported: # When testing with --quickstart, we can run criticctl, but we're # not running it as root, so this particular check is irrelevant. pass except testing.virtualbox.GuestCommandError as error: output_lines = set(map(str.strip, error.stderr.splitlines())) if "ERROR: Failed to set UID = critic. Run as root?" not in output_lines: logger.error("Running 'criticctl' as non-root failed with unexpected " "error message") else: logger.error("Running 'criticctl' as non-root did not fail") # Test -h/--help argument. usage_lines = expect_success([], ["Critic administration interface", "Available commands are:"]) expect_success(["-h"], usage_lines) expect_success(["--help"], usage_lines) # Test --etc-dir/-e argument. expect_success(["--etc-dir", instance.etc_dir], usage_lines) expect_success(["--etc-dir=" + instance.etc_dir], usage_lines) expect_success(["-e", instance.etc_dir], usage_lines) expect_success(["-e" + instance.etc_dir], usage_lines) lines = expect_failure(["--etc-dir", "/etc/wrong"], ["ERROR: Directory is inaccessible: /etc/wrong"]) expect_failure(["-e", "/etc/wrong"], lines) lines = expect_failure(["--etc-dir"], ["criticctl: error: argument --etc-dir/-e: " "expected one argument"]) expect_failure(["-e"], lines) # Test --identity/-i argument. expect_success(["--identity", "main"], usage_lines) expect_success(["--identity=main"], usage_lines) expect_success(["-i", "main"], usage_lines) expect_success(["-imain"], usage_lines) lines = expect_failure(["--identity", "wrong"], ["ERROR: Invalid identity: wrong"]) expect_failure(["-i", "wrong"], lines) lines = expect_failure(["--identity"], ["criticctl: error: argument --identity/-i: " "expected one argument"]) expect_failure(["-i"], lines) # Test unknown arguments. expect_failure(["-x"], ["criticctl: error: unrecognized arguments: -x"]) expect_failure(["--xxx"], ["criticctl: error: unrecognized arguments: --xxx"]) # Test unknown command. lines = expect_failure(["foo"], ["ERROR: Invalid command: foo"]) expect_failure(["-e", instance.etc_dir, "foo"], lines) expect_failure(["-e" + instance.etc_dir, "foo"], lines) expect_failure(["-i", "main", "foo"], lines) expect_failure(["-imain", "foo"], lines) expect_failure(["-e", instance.etc_dir, "-i", "main", "foo"], lines) expect_failure(["-e" + instance.etc_dir, "-imain", "foo"], lines) expect_failure(["-i", "main", "-e", instance.etc_dir, "foo"], lines) expect_failure(["-imain", "-e" + instance.etc_dir, "foo"], lines) ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/002-adduser-deluser.py ================================================ # Scenario: Try to add a user 'alice' (already exists). try: instance.criticctl( ["adduser", "--name", "alice", "--email", "alice@example.org", "--fullname", "'Alice von Testing'", "--password", "testing"]) except testing.CriticctlError as error: if "alice: user exists" not in error.stderr.splitlines(): logger.error("criticctl failed with unexpected error message:\n%s" % error.stdout) else: logger.error("incorrect criticctl usage did not fail") # Scenario: Try to delete the user 'nosuchuser' (no such user). try: instance.criticctl( ["deluser", "--name", "nosuchuser"]) except testing.CriticctlError as error: if "nosuchuser: no such user" not in error.stderr.splitlines(): logger.error("criticctl failed with unexpected error message:\n%s" % error.stdout) else: logger.error("incorrect criticctl usage did not fail") # Scenario: Add a user 'extra' and then delete the user again. try: instance.criticctl( ["adduser", "--name", "extra", "--email", "extra@example.org", "--fullname", "'Extra von Testing'", "--password", "testing"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: instance.registeruser("extra") try: instance.criticctl( ["deluser", "--name", "extra"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/003-addrole-delrole.py ================================================ ROLES = ["administrator", "developer", "newswriter", "repositories"] # Scenario: Try to add a role that 'admin' already has. try: output = instance.criticctl( ["addrole", "--name", "admin", "--role", "administrator"]) expected_output = "admin: user already has role 'administrator'" if expected_output not in output.splitlines(): logger.error("Expected output not found: %r\n%s" % (expected_output, output)) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) # Scenario: Try to delete a role 'alice' doesn't have. try: output = instance.criticctl( ["delrole", "--name", "alice", "--role", "administrator"]) expected_output = "alice: user doesn't have role 'administrator'" if expected_output not in output.splitlines(): logger.error("Expected output not found: %r\n%s" % (expected_output, output)) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) # Scenario: Try to add a role to a non-existing user. try: instance.criticctl( ["addrole", "--name", "nosuchuser", "--role", "administrator"]) except testing.CriticctlError as error: if "nosuchuser: no such user" not in error.stderr.splitlines(): logger.error("criticctl failed with unexpected error message:\n%s" % error.stdout) else: logger.error("incorrect criticctl usage did not fail: " "addrole, non-existing user") # Scenario: Try to delete a role from a non-existing user. try: instance.criticctl( ["delrole", "--name", "nosuchuser", "--role", "administrator"]) except testing.CriticctlError as error: if "nosuchuser: no such user" not in error.stderr.splitlines(): logger.error("criticctl failed with unexpected error message:\n%s" % error.stdout) else: logger.error("incorrect criticctl usage did not fail: " "delrole, non-existing user") # Scenario: Try to add an invalid role. try: instance.criticctl( ["addrole", "--name", "alice", "--role", "joker"]) except testing.CriticctlError as error: if "invalid choice: 'joker'" not in error.stderr: logger.error("criticctl failed with unexpected error message:\n%s" % error.stderr) else: logger.error("incorrect criticctl usage did not fail: " "addrole, invalid role") # Scenario: Try to delete an invalid role. try: instance.criticctl( ["delrole", "--name", "alice", "--role", "joker"]) except testing.CriticctlError as error: if "invalid choice: 'joker'" not in error.stderr: logger.error("criticctl failed with unexpected error message:\n%s" % error.stderr) else: logger.error("incorrect criticctl usage did not fail: " "delrole, invalid role") # Scenario: Add and then delete each role. def test_role(role): try: instance.criticctl( ["addrole", "--name", "alice", "--role", role]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: try: instance.criticctl( ["delrole", "--name", "alice", "--role", role]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) for role in ROLES: test_role(role) ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/004-listusers.py ================================================ # Scenario: Invalid format. try: instance.criticctl( ["listusers", "--format", "oranges"]) except testing.CriticctlError as error: if "invalid choice: 'oranges'" not in error.stderr: logger.error("criticctl failed with unexpected error message:\n%s" % error.stderr) else: logger.error("incorrect criticctl usage did not fail") expected = """\ id | name | email | fullname | status -----+------------+--------------------------------+--------------------------------+-------- 1 | admin | admin@example.org | Testing Administrator | current 2 | alice | alice@example.org | Alice von Testing | current 3 | bob | bob@example.org | Bob von Testing | current 4 | dave | dave@example.org | Dave von Testing | current 5 | erin | erin@example.org | Erin von Testing | current 6 | howard | howard@example.org | Howard von Testing | current 7 | extra | extra@example.org | Extra von Testing | retired """ # Scenario: Default / human readable format. try: output = instance.criticctl(["listusers"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) try: output = instance.criticctl(["listusers", "-f", "table"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) try: output = instance.criticctl(["listusers", "--format", "table"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) expected = """\ # id, name, email, fullname, status [ (1, 'admin', 'admin@example.org', 'Testing Administrator', 'current'), (2, 'alice', 'alice@example.org', 'Alice von Testing', 'current'), (3, 'bob', 'bob@example.org', 'Bob von Testing', 'current'), (4, 'dave', 'dave@example.org', 'Dave von Testing', 'current'), (5, 'erin', 'erin@example.org', 'Erin von Testing', 'current'), (6, 'howard', 'howard@example.org', 'Howard von Testing', 'current'), (7, 'extra', 'extra@example.org', 'Extra von Testing', 'retired'), ] """ # Scenario: Tuples format. try: output = instance.criticctl(["listusers", "-f", "tuples"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) try: output = instance.criticctl(["listusers", "--format", "tuples"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) expected = """\ [ {'id': 1, 'name': 'admin', 'email': 'admin@example.org', 'fullname': 'Testing Administrator', 'status': 'current'}, {'id': 2, 'name': 'alice', 'email': 'alice@example.org', 'fullname': 'Alice von Testing', 'status': 'current'}, {'id': 3, 'name': 'bob', 'email': 'bob@example.org', 'fullname': 'Bob von Testing', 'status': 'current'}, {'id': 4, 'name': 'dave', 'email': 'dave@example.org', 'fullname': 'Dave von Testing', 'status': 'current'}, {'id': 5, 'name': 'erin', 'email': 'erin@example.org', 'fullname': 'Erin von Testing', 'status': 'current'}, {'id': 6, 'name': 'howard', 'email': 'howard@example.org', 'fullname': 'Howard von Testing', 'status': 'current'}, {'id': 7, 'name': 'extra', 'email': 'extra@example.org', 'fullname': 'Extra von Testing', 'status': 'retired'}, ] """ # Scenario: Dicts format. try: output = instance.criticctl(["listusers", "-f", "dicts"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) try: output = instance.criticctl(["listusers", "--format", "dicts"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check(expected, output) ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/005-configtest.py ================================================ try: output = instance.criticctl(["configtest"]) except testing.CriticctlError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: testing.expect.check("System configuration valid.\n", output) ================================================ FILE: testing/tests/001-main/001-empty/003-criticctl/006-restart.py ================================================ # Need a VM (full installation) to restart Critic. # @flag full try: output = instance.execute(["sudo", "criticctl", "restart"]) except testing.virtualbox.GuestCommandError as error: logger.error("correct criticctl usage failed:\n%s" % error.stdout) else: # Expected output is system dependent, so don't check. pass # Check that all services are responding. As a bonus, this also tests that the # synchronization mechanism is working for all of them. instance.synchronize_service("highlight") instance.synchronize_service("changeset") instance.synchronize_service("githook") instance.synchronize_service("branchtracker") instance.synchronize_service("maildelivery") instance.synchronize_service("watchdog") instance.synchronize_service("maintenance") ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/001-newswriter.py ================================================ import re def unread_news_count(document): pageheader = document.find("table", attrs={ "class": "pageheader" }) for link in pageheader.find("ul").findAll("a"): m = re.match("News \((\d+)\)", link.string) if m: return int(m.group(1)) return 0 NEWSTEXT = "I'm as mad as hell, and I'm not going to take this anymore." with frontend.signin("alice"): dashboard = frontend.page("dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard") }) initial_unread = unread_news_count(dashboard) with frontend.signin("howard"): response = frontend.operation( "addnewsitem", data={ "text": "I'm as mad as hell" }) newsitem_id = response["item_id"] frontend.operation( "editnewsitem", data={ "item_id": newsitem_id, "text": NEWSTEXT }) with frontend.signin("alice"): dashboard = frontend.page("dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard") }) testing.expect.check(initial_unread + 1, unread_news_count(dashboard)) newsitem = frontend.page("news", params={ "item": newsitem_id }) newstext = newsitem.find("td", attrs={ "class": "text" }) testing.expect.check(NEWSTEXT, testing.expect.extract_text(newstext).strip()) dashboard = frontend.page("dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard") }) testing.expect.check(initial_unread, unread_news_count(dashboard)) frontend.operation( "addnewsitem", data={ "text": "Quid quid latine dictum sit, altum viditur." }, expect={ "status": "failure", "code": "notallowed" }) frontend.operation( "editnewsitem", data={ "item_id": newsitem_id, "text": "It's all hat, no cattle." }, expect={ "status": "failure", "code": "notallowed" }) with frontend.signin("bob"): # Howard's news item should still be unread by bob. dashboard = frontend.page("dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard") }) testing.expect.check(initial_unread + 1, unread_news_count(dashboard)) # Anonymous users should not be able to add or edit news items. frontend.operation( "addnewsitem", data={ "text": "If you have a lifetime warranty on something, it is also a hammer." }, expect={ "status": "failure", "code": "mustlogin" }) frontend.operation( "editnewsitem", data={ "item_id": newsitem_id, "text": "The only completely consistent people are dead." }, expect={ "status": "failure", "code": "mustlogin" }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/002-email.py ================================================ import re with_class = testing.expect.with_class extract_text = testing.expect.extract_text def extract_addresses(document): addresses = [] for address in document.findAll(attrs=with_class("address")): email_id = int(address["data-email-id"]) selected = "selected" in address["class"].split() value = address.find(attrs=with_class("value")).string if address.find(attrs=with_class("verified")): verified = "verified" elif address.find(attrs=with_class("unverified")): verified = "unverified" else: verified = None addresses.append((email_id, selected, value, verified)) return addresses def emails(expected): def check(document): actual = [(selected, value, verified) for _, selected, value, verified in extract_addresses(document)] testing.expect.check(expected, actual) return check def no_emails(document): row = document.find("tr", attrs=with_class("email")) testing.expect.check( "No email address", extract_text(row.find("td", attrs=with_class("value")).find("i"))) ALICE_ID = 2 ALICE_AT_EXAMPLE = "alice@example.org" ALICE_AT_WONDERLAND = "alice@wonderland.net" RE_ALICE_AT_WONDERLAND = r"alice@wonderland\.net" with frontend.signin("alice"): # Check initial state. frontend.page( "home", expect={ "addresses": emails([(True, ALICE_AT_EXAMPLE, None)]) }) # Add another, initially unverified, email address. frontend.operation( "addemailaddress", data={ "subject_id": ALICE_ID, "email": "alice@wonderland.net" }) document = frontend.page( "home", expect={ "addresses": emails( [(True, ALICE_AT_EXAMPLE, None), (False, ALICE_AT_WONDERLAND, "unverified")]) }) addresses = extract_addresses(document) alice_at_example_id = addresses[0][0] alice_at_wonderland_id = addresses[1][0] # Check that we got a verification mail. subject = r"\[Critic\] Please verify your email: " + RE_ALICE_AT_WONDERLAND verification_mail = mailbox.pop( accept=[testing.mailbox.ToRecipient(ALICE_AT_WONDERLAND), testing.mailbox.WithSubject(subject)]) # Extract the verification link from the verification mail. for line in verification_mail.lines: match = re.match( r"\s+http://[^/]+/verifyemail\?email=([^&]+)&token=([^&]+)", line) if match: email, token = match.groups() testing.expect.check(ALICE_AT_WONDERLAND, email) break else: testing.expect.check( "<verification link in verification mail>", "<expected content not found>") # Request another verification mail. frontend.operation( "requestverificationemail", data={ "email_id": alice_at_wonderland_id }) mailbox.pop(accept=[testing.mailbox.ToRecipient(ALICE_AT_WONDERLAND), testing.mailbox.WithSubject(subject)]) # Verify the new email address. response = frontend.page( "verifyemail", params={ "email": ALICE_AT_WONDERLAND, "token": token }, disable_redirects=True, expected_http_status=307) testing.expect.check( "/home?email_verified=%d" % alice_at_wonderland_id, response.headers["Location"]) # Check that it's now displayed as verified. frontend.page( "home", params={ "email_verified": str(alice_at_wonderland_id) }, expect={ "addresses": emails( [(True, ALICE_AT_EXAMPLE, None), (False, ALICE_AT_WONDERLAND, "verified")]) }) # Make the new address the selected one. frontend.operation( "selectemailaddress", data={ "email_id": alice_at_wonderland_id }) frontend.page( "home", expect={ "addresses": emails( [(False, ALICE_AT_EXAMPLE, None), (True, ALICE_AT_WONDERLAND, "verified")]) }) # Try to delete the now selected address. frontend.operation( "deleteemailaddress", data={ "email_id": alice_at_wonderland_id }, expect={ "status": "failure", "code": "notallowed" }) frontend.page( "home", expect={ "addresses": emails( [(False, ALICE_AT_EXAMPLE, None), (True, ALICE_AT_WONDERLAND, "verified")]) }) # Delete the other address instead. frontend.operation( "deleteemailaddress", data={ "email_id": alice_at_example_id }) frontend.page( "home", expect={ "addresses": emails( [(True, ALICE_AT_WONDERLAND, "verified")]) }) # Now delete the single, selected address. frontend.operation( "deleteemailaddress", data={ "email_id": alice_at_wonderland_id }) frontend.page( "home", expect={ "addresses": no_emails }) with frontend.signin(): # Re-add Alice's original address as the system administrator. frontend.operation( "addemailaddress", data={ "subject_id": ALICE_ID, "email": ALICE_AT_EXAMPLE }) frontend.page( "home", params={ "user": "alice" }, expect={ "addresses": emails([(True, ALICE_AT_EXAMPLE, None)]) }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/003-oauth.py ================================================ import re import urllib import urlparse def externalauthURL(name): return "externalauth/%s?%s" % (name, urllib.urlencode({ "target": "/" })) def signout(): frontend.operation("endsession", data={}) with_class = testing.expect.with_class def isprefix(expected, actual): return actual.startswith(expected) def issuffix(expected, actual): return actual.endswith(expected) def expect_system_mail(subject): system_mail = mailbox.pop(testing.mailbox.ToRecipient("system@example.org")) testing.expect.check(subject, system_mail.headers["subject"][0]["value"]) def start_externalauth(name): response = frontend.page( externalauthURL(name), disable_redirects=True, expected_http_status=302) redirect_url = response.headers["Location"] testing.expect.check("https://example.com/authorize?", redirect_url, equal=isprefix) parsed_url = urlparse.urlparse(redirect_url) parsed_query = urlparse.parse_qs(parsed_url.query) state = parsed_query.get("state", ["no state received"])[0] if state == "no state received": testing.expect.check("<state parameter in authorize URI query>", "<no state parameter: %r>" % parsed_url.query) return state def finish_externalauth(name, state): response = frontend.page( "oauth/" + name, params={ "state": state, "code": "correct" }, disable_redirects=True, expected_http_status=302) return response.headers["Location"] # Check that all the expected links to external providers are present # on the "Sign in" page. NAMES = ["alice", "carol", "felix", "gina"] def oauth_links(document): providers = document.findAll("div", attrs=with_class("provider")) names = set(NAMES) expected_text = "Sign in using your " for provider in providers: testing.expect.check(expected_text, provider.contents[0]) expected_text = "or " link = provider.find("a") words = link.string.split() testing.expect.check(2, len(words)) testing.expect.check("account", words[-1], equal=issuffix) name = words[0].lower() if name in names: testing.expect.check("/" + externalauthURL(name), link["href"]) names.remove(name) else: testing.logger.error("Unexpected provider: %r" % name) if names: testing.expect.check("<link to providers: %r>" % names, "<no links found>") frontend.page( "login", expect={ "oauth links": oauth_links }) # # Try to sign in using the 'alice' provider, then connect alice's # account manually, and try again. Make some mistakes along the way. # state = start_externalauth("alice") # Try with the wrong state. frontend.page( "oauth/alice", params={ "state": "not the right state", "code": "irrelevant" }, expect={ "message": testing.expect.message("Authentication failed", "Invalid OAuth state", body_equal=re.search) }) expect_system_mail( "wsgi: InvalidRequest: Invalid OAuth state: not the right state") # Try with the wrong code (the right code is always "correct".) frontend.page( "oauth/alice", params={ "state": state, "code": "incorrect" }, expect={ "message": testing.expect.message("Authentication failed", "Incorrect code", body_equal=re.search) }) expect_system_mail("wsgi: Failure: Incorrect code") redirect_url = finish_externalauth("alice", state) message_check = testing.expect.message("User registration not enabled", None) frontend.page( redirect_url, expect={ "message": message_check }) # Connect the account manually. instance.criticctl(["connect", "--name", "alice", "--provider", "alice", "--account", "account-alice"]) # Sign in for real now. state = start_externalauth("alice") frontend.collect_session_cookie() redirect_url = finish_externalauth("alice", state) testing.expect.check("/", redirect_url) with frontend.cookie_session(signout): document_title_check = testing.expect.document_title( "Alice von Testing's Home") frontend.page( "home", expect={ "document title": document_title_check }) # # Create user 'carol' by signing in using the 'carol' provider. # state = start_externalauth("carol") redirect_url = finish_externalauth("carol", state) testing.expect.check("/createuser?", redirect_url, equal=isprefix) parsed_url = urlparse.urlparse(redirect_url) parsed_query = urlparse.parse_qs(parsed_url.query) testing.expect.check(["carol"], parsed_query.get("provider")) testing.expect.check(["account-carol"], parsed_query.get("account")) testing.expect.check(1, len(parsed_query.get("token"))) testing.expect.check(["/"], parsed_query.get("target")) testing.expect.check(["carol"], parsed_query.get("username")) testing.expect.check(["carol@example.org"], parsed_query.get("email")) testing.expect.check(["Carol von Testing"], parsed_query.get("fullname")) token = parsed_query.get("token")[0] # Try with wrong account name. frontend.operation( "registeruser", data={ "username": "carol", "fullname": "Carol von Testing", "email": "carol@example.org", "external": { "provider": "carol", "account": "wrong-carol", "token": token }}, expect={ "message": "Invalid external authentication state." }) # Try with wrong token. frontend.operation( "registeruser", data={ "username": "carol", "fullname": "Carol von Testing", "email": "carol@example.org", "external": { "provider": "carol", "account": "account-carol", "token": "wrong token" }}, expect={ "message": "Invalid external authentication state." }) frontend.collect_session_cookie() # Use right account and token. This should leave us signed in as carol. frontend.operation( "registeruser", data={ "username": "carol", "fullname": "Carol von Testing", "email": "carol@example.org", "external": { "provider": "carol", "account": "account-carol", "token": token }}) instance.registeruser("carol") with frontend.cookie_session(signout): # Check that the email address isn't unverified. def email_not_unverified(document): address = document.find(attrs=with_class("address")) if address.find(attrs=with_class("unverified")): testing.expect.check("<carol's email is not unverified>", "<carol's email is unverified>") document_title_check = testing.expect.document_title( "Carol von Testing's Home") frontend.page( "home", expect={ "document title": document_title_check, "email not unverified": email_not_unverified }) expect_system_mail("wsgi[registeruser]: User 'carol' registered") # # Create user 'felix' by signin in using the 'felix' provider, which # has 'bypass_createuser' set, so this will be quick. # state = start_externalauth("felix") frontend.collect_session_cookie() redirect_url = finish_externalauth("felix", state) instance.registeruser("felix") with frontend.cookie_session(signout): document_title_check = testing.expect.document_title( "Felix von Testing's Home") frontend.page( "home", expect={ "document title": document_title_check, "email not unverified": email_not_unverified }) expect_system_mail("wsgi[oauth/felix]: User 'felix' registered") # # Create user 'gina' by signin in using the 'gina' provider, which # has 'verify_email_addresses' set. # state = start_externalauth("gina") redirect_url = finish_externalauth("gina", state) testing.expect.check("/createuser?", redirect_url, equal=isprefix) parsed_url = urlparse.urlparse(redirect_url) parsed_query = urlparse.parse_qs(parsed_url.query) token = parsed_query.get("token")[0] frontend.collect_session_cookie() # Use right account and token. This should leave us signed in as carol. frontend.operation( "registeruser", data={ "username": "gina", "fullname": "Gina von Testing", "email": "gina@example.org", "external": { "provider": "gina", "account": "account-gina", "token": token }}) instance.registeruser("gina") with frontend.cookie_session(signout): # Check that the email address is unverified. def email_unverified(document): address = document.find(attrs=with_class("address")) if not address.find(attrs=with_class("unverified")): testing.expect.check("<carol's email unverified>", "<carol's email is not unverified>") document_title_check = testing.expect.document_title( "Gina von Testing's Home") frontend.page( "home", expect={ "document title": document_title_check, "email unverified": email_unverified }) expect_system_mail("wsgi[registeruser]: User 'gina' registered") subject = r"\[Critic\] Please verify your email: gina@example\.org" mailbox.pop(accept=[testing.mailbox.ToRecipient("gina@example.org"), testing.mailbox.WithSubject(subject)]) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/004-password.py ================================================ # Create user 'iris' with no password. instance.criticctl(["adduser", "--name", "iris", "--email", "iris@example.org", "--fullname", "'Iris von Testing'", "--no-password"]) instance.registeruser("iris") with_class = testing.expect.with_class def check_password_ui(expected_value, expected_action): def check(document): row = document.find("tr", attrs=with_class("password")) cell = row.find("td", attrs=with_class("value")) button = cell.find("button") testing.expect.check(expected_value, cell.contents[0]) testing.expect.check( expected_action, button.string if button else "(no action)") return check with frontend.signin("alice"): frontend.page( "home", params={ "user": "iris" }, expect={ "password UI": check_password_ui("not set", "(no action)") }) with frontend.signin(): frontend.page( "home", params={ "user": "iris", "readonly": "no" }, expect={ "password UI": check_password_ui("not set", "Set password") }) frontend.operation( "changepassword", data={ "subject": "iris", "new_pw": "testing" }) frontend.page( "home", params={ "user": "iris", "readonly": "no" }, expect={ "password UI": check_password_ui("****", "Set password") }) with frontend.signin("alice"): frontend.page( "home", params={ "user": "iris" }, expect={ "password UI": check_password_ui("****", "(no action)") }) with frontend.signin("iris"): frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) frontend.operation( "changepassword", data={ "subject": "alice", "new_pw": "custom" }, expect={ "status": "failure", "code": "notallowed" }) frontend.operation( "changepassword", data={ "subject": "iris", "new_pw": "custom" }, expect={ "status": "failure", "message": "The provided current password is not correct." }) frontend.operation( "changepassword", data={ "subject": "iris", "current_pw": "wrong", "new_pw": "custom" }, expect={ "status": "failure", "message": "The provided current password is not correct." }) frontend.operation( "changepassword", data={ "subject": "iris", "current_pw": "testing", "new_pw": "custom" }) frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) with frontend.signin("iris", "custom"): instance.criticctl(["passwd", "--name", "iris", "--no-password"]) frontend.page( "home", expect={ "password UI": check_password_ui("not set", "Set password") }) frontend.operation( "changepassword", data={ "subject": "iris", "current_pw": "wrong", "new_pw": "testing" }, expect={ "status": "failure", "message": "The provided current password is not correct." }) frontend.operation( "changepassword", data={ "subject": "iris", "new_pw": "testing" }) frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) instance.criticctl(["passwd", "--name", "iris", "--password", "other"]) with frontend.signin("iris", "other"): frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) # Try changing admin's password too. with frontend.signin(): frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) frontend.operation( "changepassword", data={ "new_pw": "custom" }, expect={ "status": "failure", "message": "The provided current password is not correct." }) frontend.operation( "changepassword", data={ "current_pw": "wrong", "new_pw": "custom" }, expect={ "status": "failure", "message": "The provided current password is not correct." }) frontend.operation( "changepassword", data={ "current_pw": "testing", "new_pw": "custom" }) frontend.page( "home", expect={ "password UI": check_password_ui("****", "Change password") }) # Better change it back again, or we'd break lots of following tests... frontend.operation( "changepassword", data={ "current_pw": "custom", "new_pw": "testing" }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/005-accesstoken.py ================================================ # Sign in and create an access token. with frontend.signin("alice"): check_user(alice) access_token = frontend.json( "users/%d/accesstokens" % alice.id, post={ "title": "005-accesstoken" }, expect={ "id": int, "access_type": "user", "user": alice.id, "title": "005-accesstoken", "part1": str, "part2": str, "profile": { "http": { "rule": "allow", "exceptions": [] }, "repositories": { "rule": "allow", "exceptions": [] }, "extensions": { "rule": "allow", "exceptions": [] } } }) token_id = access_token["id"] username = access_token["part1"] password = access_token["part2"] # Get the access token and its components. frontend.json( "accesstokens/%d" % token_id, expect=access_token) frontend.json( "users/me/accesstokens/%d" % token_id, expect=access_token) frontend.json( "accesstokens/%d/profile" % token_id, expect={ "profile": access_token["profile"] }) frontend.json( "accesstokens/%d/profile/http" % token_id, expect={ "profile/http": access_token["profile"]["http"] }) frontend.json( "accesstokens/%d/profile/repositories" % token_id, expect={ "profile/repositories": access_token["profile"]["repositories"] }) frontend.json( "accesstokens/%d/profile/extensions" % token_id, expect={ "profile/extensions": access_token["profile"]["extensions"] }) check_user(anonymous) # Check that Alice can't authenticate using the token via the regular login # page. frontend.validatelogin(username, password, expect_failure="Invalid username") check_user(anonymous) # Check that Alice can authenticate using the token and HTTP authentication. with frontend.signin(access_token=access_token): check_user(alice, "accesstoken") check_user(anonymous) # Check that Bob can't access Alice's access tokens. with frontend.signin("bob"): frontend.json( "users/%d/accesstokens" % alice.id, expected_http_status=403) frontend.json( "users/%d/accesstokens/%d" % (alice.id, token_id), expected_http_status=403) frontend.json( "accesstokens", expected_http_status=403) frontend.json( "accesstokens/%d" % token_id, expected_http_status=403) # Check that an administrator can access Alice's access tokens. with frontend.signin(): frontend.json( "users/%d/accesstokens" % alice.id, expect={ "accesstokens": [access_token] }) frontend.json( "users/%d/accesstokens/%d" % (alice.id, token_id), expect=access_token) frontend.json( "accesstokens", expect={ "accesstokens": [access_token] }) frontend.json( "accesstokens/%d" % token_id, expect=access_token) check_user(anonymous) # Sign in and delete the access token. with frontend.signin("alice"): check_user(alice) frontend.json( "users/%d/accesstokens/%d" % (alice.id, token_id), delete=True, expected_http_status=204) check_user(anonymous) # Check that Alice can no longer authenticate using the token and HTTP # authentication. with frontend.signin(access_token=access_token): # Using invalid HTTP authentication should trigger a 401 Unauthorized (and # not lead to anonymous access.) frontend.page( "tutorial", expected_http_status=401) check_user(anonymous) # Sign in as admin and create an access token for anonymous access. with frontend.signin(): check_user(admin) access_token = frontend.json( "accesstokens", post={ "access_type": "anonymous", "title": "005-accesstoken (anonymous)" }, expect={ "id": int, "access_type": "anonymous", "user": None, "title": "005-accesstoken (anonymous)", "part1": str, "part2": str, "profile": { "http": { "rule": "allow", "exceptions": [] }, "repositories": { "rule": "allow", "exceptions": [] }, "extensions": { "rule": "allow", "exceptions": [] } } }) token_id = access_token["id"] check_user(anonymous) # Check that we can authenticate using the token and HTTP authentication, and # that we're then anonymous. # # This is somewhat silly; we were anonymous before, so it's difficult to know if # authentication succeeded or not. This kind of access token is mostly useful # in a system that doesn't otherwise allow anonymous access. with frontend.signin(access_token=access_token): check_user(anonymous, "accesstoken") check_user(anonymous) # Sign in and delete the access token. with frontend.signin(): check_user(admin) access_token = frontend.json( "accesstokens/%d" % token_id, delete=True, expected_http_status=204) # Check that Alice (not an administrator) can't create an anonymous token. with frontend.signin("alice"): check_user(alice) access_token = frontend.json( "accesstokens", post={ "access_type": "anonymous" }, expected_http_status=403, expect={ "error": { "title": "Permission denied", "message": "Must be an administrator" } }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/006-accesscontrol-http.py ================================================ # Create an access token, and restrict it to not allow loading /home. with frontend.signin("alice"): access_token = frontend.json( "users/me/accesstokens", post={ "title": "token #1 for 006-accesscontrol-http.py" }, expect={ "profile": { "http": { "rule": "allow", "exceptions": [] }, "repositories": { "rule": "allow", "exceptions": [] }, "extensions": { "rule": "allow", "exceptions": [] } }, # use lenient checking "*": "*" }) frontend.json( "users/me/accesstokens/%d/profile/http" % access_token["id"], put={ "exceptions": [ { "path_pattern": "home" } ] }, expect={ "profile/http": { "rule": "allow", "exceptions": [ { "id": int, "request_method": None, "path_pattern": "home" } ] } }) # Just to make sure: check that Alice can (still) access /home when # authenticating normally. frontend.page("home") with frontend.signin(access_token=access_token): # /home should now return "403 Forbidden". frontend.page( "home", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: GET /home") }) # A POST request should also return "403 Forbidden" (even though it wouldn't # have worked anyway.) frontend.page( "home", post="", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: POST /home") }) # /dashboard should still work, of course. frontend.page( "dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard"), "script_user": testing.expect.script_user(instance.user("alice")) }) # Update the access token to deny all requests except "GET /home" instead. with frontend.signin("alice"): result = frontend.json( "users/me/accesstokens/%d/profile" % access_token["id"], put={ "http": { "rule": "deny", "exceptions": [ { "request_method": "GET", "path_pattern": "home" } ] } }, expect={ "profile": { "http": { "rule": "deny", "exceptions": [ { "id": int, "request_method": "GET", "path_pattern": "home" } ] }, "repositories": { "rule": "allow", "exceptions": [] }, "extensions": { "rule": "allow", "exceptions": [] } } }) home_exception = result["profile"]["http"]["exceptions"][0] with frontend.signin(access_token=access_token): # /home should now be allowed. frontend.page( "home", expect={ "document_title": testing.expect.document_title(u"Alice von Testing's Home"), "script_user": testing.expect.script_user(instance.user("alice")) }) # A POST request should still return "403 Forbidden" though. frontend.page( "home", post="", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: POST /home") }) # /dashboard should no longer work. frontend.page( "dashboard", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: GET /dashboard") }) # Update the access token to also allow access to "GET /dashboard". with frontend.signin("alice"): frontend.json( "users/me/accesstokens/%d/profile/http/exceptions" % access_token["id"], post={ "request_method": "GET", "path_pattern": "dashboard" }, expect={ "profile/http/exceptions": [ { "id": int, "request_method": "GET", "path_pattern": "home" }, { "id": int, "request_method": "GET", "path_pattern": "dashboard" } ] }) with frontend.signin(access_token=access_token): # /dashboard should now work again. frontend.page( "dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard"), "script_user": testing.expect.script_user(instance.user("alice")) }) # Update the access token by deleting the /home exception. with frontend.signin("alice"): frontend.json( ("users/me/accesstokens/%d/profile/http/exceptions/%d" % (access_token["id"], home_exception["id"])), delete=True, expect={ "profile": { "http": { "rule": "deny", "exceptions": [ { "id": int, "request_method": "GET", "path_pattern": "dashboard" } ] }, "repositories": { "rule": "allow", "exceptions": [] }, "extensions": { "rule": "allow", "exceptions": [] } }, # use lenient checking "*": "*" }) with frontend.signin(access_token=access_token): # /home should now return "403 Forbidden" again. frontend.page( "home", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: GET /home") }) # /dashboard should still work. frontend.page( "dashboard", expect={ "document_title": testing.expect.document_title(u"Dashboard"), "script_user": testing.expect.script_user(instance.user("alice")) }) # Create an access token for anonymous access, and restrict it to not allow # loading /branches. with frontend.signin(): access_token = frontend.json( "accesstokens", post={ "title": "token #2 for 006-accesscontrol-http.py", "access_type": "anonymous", "profile": { "http": { "rule": "allow", "exceptions": [{ "path_pattern": "branches" }] } }, }) with frontend.signin(access_token=access_token): check_user(anonymous, "accesstoken") # /branches should now return "403 Forbidden". frontend.page( "branches", expected_http_status=403, expect={ "message_title": testing.expect.message( u"Access denied", u"Access denied: GET /branches") }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/007-json-session.py ================================================ frontend.json( "sessions/current", expect={ "user": None, "type": None, "fields": [{ "identifier": "username", "label": "Username:", "hidden": False, "description": None }, { "identifier": "password", "label": "Password:", "hidden": True, "description": None }] }) # Sign in as alice. with frontend.signin("alice", use_json_api=True): check_user(alice) # Sign out prematurely, just to make sure the signout actually works as # expected. # # Exiting the frontend.signin() scope will do the same, but will cause the # session cookie to be "deleted" regardless of what the server does, so # could hide signout failures. frontend.json( "sessions/current", delete=True, expected_http_status=204) check_user(anonymous) with testing.utils.access_token("alice", profile={}) as access_token: with frontend.signin(access_token=access_token): check_user(alice, "accesstoken") frontend.json( "sessions", expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Resource requires an argument: v1/sessions" } }) frontend.json( "sessions/invalid", expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": 'Resource argument must be "current"' } }) frontend.json( "sessions", post={ "username": "alice", "password": "wrong" }, expected_http_status=403, expect={ "error": { "title": "Session error", "message": "Wrong password" } }) frontend.json( "sessions", post={ "username": "bobsicle", "password": "testing" }, expected_http_status=403, expect={ "error": { "title": "Session error", "message": "Invalid username" } }) ================================================ FILE: testing/tests/001-main/001-empty/004-mixed/__init__.py ================================================ def check_user(user, session_type=False): frontend.page( "dashboard", expect={ "script_user": testing.expect.script_user(user) }) if session_type is False: if user.id is None: # Anonymous user. session_type = None else: session_type = "normal" frontend.json( "sessions/current", params={ "fields": "user,type" }, expect={ "user": user.id, "type": session_type }) anonymous = testing.User.anonymous() alice = instance.user("alice") admin = instance.user("admin") check_user(anonymous) ================================================ FILE: testing/tests/001-main/002-createrepository.py ================================================ import time def check_repository(document): rows = document.findAll("tr", attrs=testing.expect.with_class("repository")) testing.expect.check(1, len(rows)) def check_cell(row, class_name, expected_string, inline_element_type=None): cells = row.findAll("td", attrs=testing.expect.with_class(class_name)) testing.expect.check(1, len(cells)) if inline_element_type: testing.expect.check(1, len(cells[0].findAll(inline_element_type))) string = cells[0].findAll("i")[0].string else: string = cells[0].string if string is None: string = "" testing.expect.check(expected_string, string) check_cell(rows[0], "name", "critic") check_cell(rows[0], "location", "http://%s/critic.git" % instance.hostname) check_cell(rows[0], "upstream", " ") rows = document.findAll("tr", attrs=testing.expect.with_class("details")) testing.expect.check(1, len(rows)) tables = rows[0].findAll("table", attrs=testing.expect.with_class("trackedbranches")) testing.expect.check(1, len(tables)) # Would like to use 'tables[0].findAll()' here, but BeautifulSoup apparently # doesn't parse nested tables correctly, so these rows aren't actually part # of the 'trackedbranches' table according to it. rows = document.findAll("tr", attrs=testing.expect.with_class("branch")) testing.expect.check(2, len(rows)) check_cell(rows[0], "localname", "Tags", inline_element_type="i") check_cell(rows[0], "remote", repository.url) check_cell(rows[0], "remotename", "N/A", inline_element_type="i") check_cell(rows[0], "enabled", "Yes") check_cell(rows[0], "users", "") check_cell(rows[1], "localname", "master") check_cell(rows[1], "remote", repository.url) check_cell(rows[1], "remotename", "master") check_cell(rows[1], "enabled", "Yes") check_cell(rows[1], "users", "") with frontend.signin(): # Check that this URL isn't handled already. We're using it later to detect # that the repository has been created and the tracked branch fetched, and # if it's already handled for some reason, that check won't be reliable. frontend.page("critic/master", expected_http_status=404) frontend.operation("addrepository", data={ "name": "critic", "path": "critic", "mirror": { "remote_url": repository.url, "remote_branch": "master", "local_branch": "master" } }) instance.synchronize_service("branchtracker") with repository.workcopy(empty=True) as work: REMOTE_URL = instance.repository_url("alice") try: work.run( ["ls-remote", "--exit-code", REMOTE_URL, "refs/heads/master"]) except testing.repository.GitCommandError: logger.error("Repository main branch ('refs/heads/master') " "not fetched as expected.") raise testing.TestFailure # Check that /repositories still loads correctly now that there's a # repository in the system. frontend.page( "repositories", expect={ "document_title": testing.expect.document_title(u"Repositories"), "content_title": testing.expect.paleyellow_title(0, u"Repositories"), "repository": check_repository }) # Add another repository. This time, without a tracking branch, but we'll # actually push the same branch (IOW our current branch of critic.git) to # it, simply because we don't really have another available with anything # useful in it. frontend.operation("addrepository", data={ "name": "other", "path": "other" }) repository.run( ["push", instance.repository_url("alice", repository="other"), "HEAD:refs/heads/master"]) frontend.operation("addrepository", data={ "name": "a" * 65, "path": "validpath2" }, expect={ "status": "failure", "code": "paramtoolong:data.name" }) frontend.operation("addrepository", data={ "name": "", "path": "validpath1" }, expect={ "status": "failure", "code": "paramtooshort:data.name" }) frontend.operation("addrepository", data={ "name": "a/b", "path": "validpath3" }, expect={ "status": "failure", "code": "paramcontainsillegalchar:data.name", "message": "invalid input: short name may not contain the character '/'" }) frontend.operation("addrepository", data={ "name": "critic.git", "path": "validpath3" }, expect={ "status": "failure", "code": "badsuffix_name" }) frontend.operation("addrepository", data={ "name": "r", "path": "validpath" }, expect={ "status": "failure", "code": "invalid_name" }) frontend.operation("addrepository", data={ "name": "validname", "path": "" }, expect={ "status": "failure", "code": "paramtooshort:data.path" }) ================================================ FILE: testing/tests/001-main/003-self/001-rulerColumn.py ================================================ import re # This is an arbitrary (and fairly small) commit on master: COMMIT = "927e2ba833cb0c9ce588b5f59c42bbb246e3e20c" def check_rulerColumn(document): for script in document.findAll("script"): # Ignore external scripts. if script.has_key("src"): continue if re.match(r"var\s+rulerColumn\s*=\s*0;", script.string): break else: testing.expect.check("<rulerColumn script>", "<expected content not found>") frontend.page("critic/%s" % COMMIT, expect={ "rulerColumn_script": check_rulerColumn }) ================================================ FILE: testing/tests/001-main/003-self/002-emptyfile.py ================================================ # This is the commit that adds testing/input/empty.txt. COMMIT = "47c6cea51af517107c403d96810fce946825aacc" def check_description(document): actual = "<expected content not found>" for row in document.findAll("tr"): cells = row.findAll("td") if len(cells) >= 2 \ and cells[0].has_key("class") \ and cells[0]["class"] == "path" \ and cells[0].a \ and cells[0].a.string \ and cells[0].a.string.endswith("/empty.txt") \ and cells[1].i \ and cells[1].i.string: actual = cells[1].i.string break testing.expect.check(u"empty", actual) with frontend.signin(): frontend.page("showcommit", params={ "repository": "critic", "sha1": COMMIT }, expect={ "description": check_description }) ================================================ FILE: testing/tests/001-main/003-self/003-binaryfile.py ================================================ # This is the commit that adds testing/input/binary. COMMIT = "47c6cea51af517107c403d96810fce946825aacc" def check_description(document): actual = "<expected content not found>" for row in document.findAll("tr"): cells = row.findAll("td") if len(cells) >= 2 \ and cells[0].has_key("class") \ and cells[0]["class"] == "path" \ and cells[0].a \ and cells[0].a.string \ and cells[0].a.string.endswith("/binary") \ and cells[1].i \ and cells[1].i.string: actual = cells[1].i.string break testing.expect.check(u"binary", actual) with frontend.signin(): frontend.page("showcommit", params={ "repository": "critic", "sha1": COMMIT }, expect={ "description": check_description }) ================================================ FILE: testing/tests/001-main/003-self/004-createreview.py ================================================ # Scenario: Alice creates a review of a single commit with review filters that # make Bob a reviewer and Dave a watcher, and then pushes a second commit to # that review. # # Checks: Mostly that this doesn't fail completely, and that the expected mails # appear to be sent. import re # Random commit on master: COMMIT_SHA1 = "f771149aba230c4712c9cb9c6af4ccfea2b7967d" COMMIT_SUMMARY = "Minor /dashboard query optimizations" # The next commit on master: FOLLOWUP_SHA1 = "e0892183f38932cec0d33408bdfebb290a13f8f3" def check_summary_input(document): input = document.find("input", attrs={ "id": "summary" }) if not input: testing.expect.check("<review summary input>", "<expected content not found>") testing.expect.check(COMMIT_SUMMARY, input["value"]) with frontend.signin("alice"): # Loading /createreview first is not really necessary, but might as well try # that as well. document = frontend.page( "createreview", expect={ "document_title": testing.expect.document_title(u"Create Review") }) document = frontend.page( "createreview", params={ "repository": "critic", "commits": COMMIT_SHA1 }, expect={ "document_title": testing.expect.document_title(u"Create Review"), "summary_input": check_summary_input }) scripts = document.findAll("script") for script in scripts: if script.has_key("src"): continue match = re.search( r"^\s*var review_data\s*=\s*\{\s*commit_ids:\s*\[\s*(\d+)\s*\]", script.string, re.MULTILINE) if match: commit_id = int(match.group(1)) break else: testing.expect.check("<data script>", "<expected content not found>") result = frontend.operation( "submitreview", data={ "repository_id": 1, "commit_ids": [commit_id], "branch": "r/004-createreview", "summary": COMMIT_SUMMARY, "applyfilters": True, "applyparentfilters": True, "reviewfilters": [{ "username": "bob", "type": "reviewer", "path": "/" }, { "username": "dave", "type": "watcher", "path": "/" }, { "username": "erin", "type": "watcher", "path": "/" }], "recipientfilters": { "mode": "opt-out" }}, expect={ "review_id": 1 }) def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def check_initial(mail): testing.expect.check("New Review: %s" % COMMIT_SUMMARY, mail.header("Subject")) line = "Commit: %s" % COMMIT_SHA1 if line not in to_alice.lines: testing.expect.check("<%r line>" % line, "<expected content not found>") to_alice = mailbox.pop(accept=to("alice")) check_initial(to_alice) testing.expect.check("owner", to_alice.header("OperaCritic-Association")) to_bob = mailbox.pop(accept=to("bob")) check_initial(to_bob) testing.expect.check("reviewer", to_bob.header("OperaCritic-Association")) to_dave = mailbox.pop(accept=to("dave")) check_initial(to_dave) testing.expect.check("watcher", to_dave.header("OperaCritic-Association")) to_erin = mailbox.pop(accept=to("erin")) check_initial(to_erin) testing.expect.check("watcher", to_erin.header("OperaCritic-Association")) mailbox.check_empty() with repository.workcopy() as work: work.run(["checkout", "-q", "-b", "r/004-createreview", COMMIT_SHA1]) work.run(["cherry-pick", FOLLOWUP_SHA1]) followup_sha1 = work.run(["rev-parse", "HEAD"]).strip() SETTINGS = { "email.subjectLine.updatedReview.commitsPushed": "" } with testing.utils.settings("erin", SETTINGS): work.run( ["push", "-q", instance.repository_url("alice"), "HEAD:refs/heads/r/004-createreview"]) def check_followup(mail): testing.expect.check("Updated Review: %s" % COMMIT_SUMMARY, mail.header("Subject")) line = "Commit: %s" % followup_sha1 if line not in mail.lines: testing.expect.check("<%r line>" % line, "<expected content not found>") to_alice = mailbox.pop(accept=to("alice")) check_followup(to_alice) to_bob = mailbox.pop(accept=to("bob")) check_followup(to_bob) to_dave = mailbox.pop(accept=to("dave")) check_followup(to_dave) # Note: Erin is a watcher too, but because of the empty subject line # preference set above, she shouldn't receive this email. mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/004-first-review-created/001-addreviewfilters-bogus.py ================================================ INVALID_USER_ID = 0 with frontend.signin("alice"): frontend.operation( "addreviewfilters", data={ "review_id": 1, "filters": [{ "type": "watcher", "user_ids": [INVALID_USER_ID], "paths": ["/"] }] }, expect={ "status": "failure", "code": "invaliduserid" }) ================================================ FILE: testing/tests/001-main/003-self/004-first-review-created/002-review-archival.py ================================================ with repository.workcopy() as work, frontend.signin("alice"): REMOTE_URL = instance.repository_url("alice") def assert_branch_state(archived): # Check that the branch is or isn't flagged as archived on the review # front-page. document = frontend.page("r/1") basic = testing.expect.find_paleyellow(document, 0) branch = basic.find(attrs=testing.expect.with_class("branch")) actual = "archived" in branch["class"].split() testing.expect.check(archived, actual) branch_name = testing.expect.extract_text(branch) # Also check that the branch's ref exists or doesn't exist in the # repository. expected = "ref is missing" if archived else "ref is present" try: work.run(["ls-remote", "--exit-code", REMOTE_URL, "refs/heads/" + branch_name]) actual = "ref is present" except testing.repository.GitCommandError: actual = "ref is missing" testing.expect.check(expected, actual) # Check that the branch isn't already archived (no reason whatsoever it # should be.) assert_branch_state(archived=False) # Check that the operations fail as expected on an open review whose branch # is not already archived. frontend.operation( "archivebranch", data={ "review_id": 1 }, expect={ "status": "failure", "code": "invalidstate" }) frontend.operation( "resurrectbranch", data={ "review_id": 1 }, expect={ "status": "failure", "code": "invalidstate" }) # Drop the review, archive the branch, and check that it became archived. frontend.operation( "dropreview", data={ "review_id": 1 }) frontend.operation( "archivebranch", data={ "review_id": 1 }) assert_branch_state(archived=True) # Check that this operation now fails. frontend.operation( "archivebranch", data={ "review_id": 1 }, expect={ "status": "failure", "code": "invalidstate" }) # Resurrect the branch and check that it becomes not archived again. frontend.operation( "resurrectbranch", data={ "review_id": 1 }) assert_branch_state(archived=False) # Schedule an archival in -1 days (i.e. ASAP), force maintenance to run, and # check that the branch was archived. frontend.operation( "schedulebrancharchival", data={ "review_id": 1, "delay": -1 }) assert_branch_state(archived=False) instance.synchronize_service("maintenance", force_maintenance=True) assert_branch_state(archived=True) # Resurrect branch again. frontend.operation( "resurrectbranch", data={ "review_id": 1 }) assert_branch_state(archived=False) # Schedule an archival in 1 day (i.e. not now), force maintenance to run, # and check that the branch wasn't archived. frontend.operation( "schedulebrancharchival", data={ "review_id": 1, "delay": 1 }) instance.synchronize_service("maintenance", force_maintenance=True) assert_branch_state(archived=False) # Schedule another archival in -1 days (i.e. ASAP), then reopen the review, # force maintenance to run, and check that the branch wasn't archived. frontend.operation( "schedulebrancharchival", data={ "review_id": 1, "delay": -1 }) frontend.operation( "reopenreview", data={ "review_id": 1 }) instance.synchronize_service("maintenance", force_maintenance=True) assert_branch_state(archived=False) # Drop the review, archive the branch, and check that it became archived, # then reopen the review, and check that the branch was resurrected # automatically. frontend.operation( "dropreview", data={ "review_id": 1 }) frontend.operation( "archivebranch", data={ "review_id": 1 }) assert_branch_state(archived=True) frontend.operation( "reopenreview", data={ "review_id": 1 }) assert_branch_state(archived=False) ================================================ FILE: testing/tests/001-main/003-self/004-first-review-created/__init__.py ================================================ # @dependency 001-main/003-self/004-createreview.py ================================================ FILE: testing/tests/001-main/003-self/005-checkbranch.py ================================================ # Random commit on master: COMMIT_SHA1 = "bc661163b11234e85ec7b0efe1195cce473f234a" document_title = testing.expect.document_title("Check branch review status") content_title = testing.expect.paleyellow_title(0, "Check branch review status") # First load /checkbranch without parameters; this just returns a form. frontend.page( url="checkbranch", expect={ "document_title": document_title, "content_title": content_title }) # Create some branches. The commits on them are not really that relevant, but # they should not be on master. We generate some such commits simply by # reverting some commits that are on master. # # One branch is pushed to Critic's repository, but also to "origin" where it # has one additional commit. # # Another branch is not pushed to Critic's repository, only to "origin". with repository.workcopy() as work: work.run(["checkout", "-q", "-b", "005-checkbranch", COMMIT_SHA1]) first_sha1 = work.run(["rev-parse", "HEAD"]).strip() second_sha1 = work.run(["rev-parse", "HEAD^"]).strip() third_sha1 = work.run(["rev-parse", "HEAD^^"]).strip() work.run(["revert", "--no-edit", first_sha1]) work.run(["push", "-q", instance.repository_url("alice"), "HEAD:refs/heads/005-checkbranch-1"]) work.run(["revert", "--no-edit", second_sha1]) work.run(["push", "-q", "origin", "HEAD:refs/heads/005-checkbranch-1"]) work.run(["revert", "--no-edit", third_sha1]) work.run(["push", "-q", "origin", "HEAD:refs/heads/005-checkbranch-2"]) document_title = testing.expect.document_title("Branch review status: 005-checkbranch-1") content_title = testing.expect.paleyellow_title(0, "Unmerged Commits (1)") # Load /checkbranch with fetch=no checking the first branch. frontend.page( url="checkbranch", params={ "repository": "critic", "commit": "005-checkbranch-1", "upstream": "master" }, expect={ "document_title": document_title, "content_title": content_title }) content_title = testing.expect.paleyellow_title(0, "Unmerged Commits (2)") # Load /checkbranch with fetch=yes checking the first branch. frontend.page( url="checkbranch", params={ "repository": "critic", "commit": "005-checkbranch-1", "fetch": "yes", "upstream": "master" }, expect={ "document_title": document_title, "content_title": content_title }) message_title = testing.expect.message_title( "Unable to interpret '005-checkbranch-2' as a commit reference.") # Load /checkbranch with fetch=no checking the second branch. This essentially # fails, since we didn't push this branch to Critic's repository. frontend.page( url="checkbranch", params={ "repository": "critic", "commit": "005-checkbranch-2", "upstream": "master" }, expect={ "message_title": message_title }) document_title = testing.expect.document_title("Branch review status: 005-checkbranch-2") content_title = testing.expect.paleyellow_title(0, "Unmerged Commits (3)") # Load /checkbranch with fetch=yes checking the second branch. frontend.page( url="checkbranch", params={ "repository": "critic", "commit": "005-checkbranch-2", "fetch": "yes", "upstream": "master" }, expect={ "document_title": document_title, "content_title": content_title }) content_title = testing.expect.paleyellow_title(0, "Unmerged Commits (1)") # Load /checkbranch checking the second branch, using the first branch as the # upstream instead of master. frontend.page( url="checkbranch", params={ "repository": "critic", "commit": "005-checkbranch-2", "upstream": "005-checkbranch-1" }, expect={ "document_title": document_title, "content_title": content_title }) # Load /checkbranchtext checking the first branch. document = frontend.page( url="checkbranchtext", expected_content_type="text/plain", params={ "repository": "critic", "commit": "005-checkbranch-1", "upstream": "master" }) testing.expect.check('Revert "Fix typo s/orderIndeces/orderIndices/": REVIEW STATUS UNKNOWN!\n' + 'Revert "Delete unused commented out code": REVIEW STATUS UNKNOWN!\n', str(document)) ================================================ FILE: testing/tests/001-main/003-self/006-showreview-reviewfilter.py ================================================ # Scenario: Alice creates a review of a single commit with a review filter with # empty path. Loading the review front-page after that should not crash, but # did due to a problem introduced by the filter system rewrite. import re # Random commit on master: COMMIT_SHA1 = "f771149aba230c4712c9cb9c6af4ccfea2b7967d" REVIEW_SUMMARY = "006-showreview-reviewfilter.py" with frontend.signin("alice"): # Loading /createreview is not really necessary, but might as well try that # as well. document = frontend.page( "createreview", params={ "repository": "critic", "commits": COMMIT_SHA1 }) scripts = document.findAll("script") for script in scripts: if script.has_key("src"): continue match = re.search( r"^\s*var review_data\s*=\s*\{\s*commit_ids:\s*\[\s*(\d+)\s*\]", script.string, re.MULTILINE) if match: commit_id = int(match.group(1)) break else: testing.expect.check("<data script>", "<expected content not found>") result = frontend.operation( "submitreview", data={ "repository": 1, "commit_ids": [commit_id], "branch": "r/006-showreview-reviewfilter", "summary": REVIEW_SUMMARY, "applyfilters": True, "applyparentfilters": True, "reviewfilters": [{ "username": "bob", "type": "reviewer", "path": "" }, { "username": "dave", "type": "watcher", "path": "" }, { "username": "erin", "type": "watcher", "path": "" } ], "recipientfilters": { "mode": "opt-out", "excluded": ["erin"] }}) def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) mailbox.pop(accept=to("alice")) mailbox.pop(accept=to("bob")) mailbox.pop(accept=to("dave")) mailbox.check_empty() review_id = result["review_id"] document_title = "r/%d (No progress) - %s - Opera Critic" % (review_id, REVIEW_SUMMARY) with frontend.signin("admin"): frontend.page( "r/%d" % review_id, expect={ "document_title": testing.expect.document_title(document_title) }) ================================================ FILE: testing/tests/001-main/003-self/007-http-backend.py ================================================ import os import subprocess import tempfile import shutil environ = os.environ.copy() def git(args, cwd=None): argv = ["git"] argv.extend(args) logger.debug("Running: %s" % " ".join(argv)) output = subprocess.check_output( argv, cwd=cwd, env=environ, stderr=subprocess.STDOUT) if output.strip(): logger.debug("Output:\n%s" % output.rstrip()) work_dir = tempfile.mkdtemp() try: # Set invalid password so that authentication (if required) fails. environ["GIT_ASKPASS"] = os.path.abspath("testing/password-invalid") # This should not require a password. try: git(["clone", "--quiet", "--branch", "master", "%s/critic.git" % frontend.prefix("alice")], cwd=work_dir) except subprocess.CalledProcessError as error: logger.error("'git clone' failed: %s\n%s" % (str(error), error.output.rstrip())) # This should require a password. try: git(["push", "--quiet", "origin", "HEAD:007-http-backend-1"], cwd=os.path.join(work_dir, "critic")) logger.error("Unauthenticated push (apparently) accepted!") except subprocess.CalledProcessError: pass # Set valid password so that authentication succeeds. environ["GIT_ASKPASS"] = os.path.abspath("testing/password-testing") # This should require a password. try: git(["push", "--quiet", "origin", "HEAD:007-http-backend-2"], cwd=os.path.join(work_dir, "critic")) except subprocess.CalledProcessError as error: logger.error("'git push' failed: %s\n%s" % (str(error), error.output.rstrip())) finally: shutil.rmtree(work_dir) # Same thing again, only with a repository URL without ".git" suffix. work_dir = tempfile.mkdtemp() try: # Set invalid password so that authentication (if required) fails. environ["GIT_ASKPASS"] = os.path.abspath("testing/password-invalid") # This should not require a password. try: git(["clone", "--quiet", "--branch", "master", "%s/critic" % frontend.prefix("alice")], cwd=work_dir) except subprocess.CalledProcessError as error: logger.error("'git clone' failed: %s\n%s" % (str(error), error.output.rstrip())) # This should require a password. try: git(["push", "--quiet", "origin", "HEAD:007-http-backend-3"], cwd=os.path.join(work_dir, "critic")) logger.error("Unauthenticated push (apparently) accepted!") except subprocess.CalledProcessError: pass # Set valid password so that authentication succeeds. environ["GIT_ASKPASS"] = os.path.abspath("testing/password-testing") # This should require a password. try: git(["push", "--quiet", "origin", "HEAD:007-http-backend-4"], cwd=os.path.join(work_dir, "critic")) except subprocess.CalledProcessError as error: logger.error("'git push' failed: %s\n%s" % (str(error), error.output.rstrip())) finally: shutil.rmtree(work_dir) ================================================ FILE: testing/tests/001-main/003-self/008-initial-commit-diff.py ================================================ import os import re def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) FILENAME = "008-root-commit-pending.txt" SUMMARY = "Added %s" % FILENAME review_id = None commits = {} first_commit = None second_commit = None third_commit = None SETTINGS = { "review.createViaPush": True } with testing.utils.settings("alice", SETTINGS), frontend.signin("alice"): with repository.workcopy(empty=True) as work: REMOTE_URL = instance.repository_url("alice") def commit(fixup_message=None): if fixup_message: full_message = "fixup! %s\n\n%s" % (SUMMARY, fixup_message) message = fixup_message else: full_message = message = SUMMARY work.run(["add", FILENAME]) work.run(["commit", "-m", full_message], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") sha1 = work.run(["rev-parse", "HEAD"]).strip() commits[sha1] = message return sha1 def push(): work.run(["push", "-q", REMOTE_URL, "HEAD:refs/heads/r/008-root-commit-pending"]) with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "First line." first_commit = commit() push() to_alice = mailbox.pop(accept=to("alice")) testing.expect.check("New Review: %s" % SUMMARY, to_alice.header("Subject")) for line in to_alice.lines: match = re.search( r"\bhttp://[^/]+/r/(\d+)\b", line) if match: review_id = int(match.group(1)) break else: testing.expect.check("<review URL in mail>", "<expected content not found>") with open(os.path.join(work.path, FILENAME), "a") as text_file: print >>text_file, "Second line." second_commit = commit("Added second line") with open(os.path.join(work.path, FILENAME), "a") as text_file: print >>text_file, "Third line." third_commit = commit("Added third line") push() to_alice = mailbox.pop(accept=to("alice")) testing.expect.check("Updated Review: %s" % SUMMARY, to_alice.header("Subject")) frontend.operation( "addreviewfilters", data={ "review_id": review_id, "filters": [{ "type": "reviewer", "user_names": ["bob"], "paths": ["/"] }] }) mailbox.pop(accept=(to("bob"), about(r"New\(ish\) Review: %s" % SUMMARY))) mailbox.pop(accept=(to("bob"), about("Updated Review: %s" % SUMMARY))) def check_squashed_history(sha1s): def check(document): table = document.find("table", attrs=testing.expect.with_class("log")) if not table: testing.expect.check("<table class='log'>", "<expected content not found>") links = table.findAll("a", attrs=testing.expect.with_class("commit")) for link in links: testing.expect.check( "%s?review=%d" % (sha1s[-1][:8], review_id), link["href"]) del sha1s[-1] if sha1s: logger.error( "Commits missing from 'Squashed history':\n %s" % ("\n ".join(commits[sha1] for sha1 in sha1s))) return check frontend.page("r/%d" % review_id) frontend.page("showcommit?sha1=%s&review=%d" % (first_commit, review_id)) frontend.page( ("showcommit?first=%s&last=%s&review=%d" % (first_commit, second_commit, review_id)), expect={ "squashed_history": check_squashed_history([first_commit, second_commit]) }) frontend.page( ("showcommit?first=%s&last=%s&review=%d" % (second_commit, third_commit, review_id)), expect={ "squashed_history": check_squashed_history([second_commit, third_commit]) }) frontend.page( ("showcommit?first=%s&last=%s&review=%d" % (first_commit, third_commit, review_id)), expect={ "squashed_history": check_squashed_history([first_commit, second_commit, third_commit]) }) def check_path(document): table = document.find("table", attrs=testing.expect.with_class("filter")) if not table: testing.expect.check("<table class='filter'>", "<expected content not found>") for cell in table.findAll("td", attrs=testing.expect.with_class("path")): if cell.string and cell.string == FILENAME: break else: testing.expect.check("<td class='path'>%s</td>" % FILENAME, "<expected content not found>") frontend.page( "filterchanges?review=%d" % review_id, expect={ "path": check_path }) frontend.page( ("filterchanges?first=%s&last=%s&review=%d" % (first_commit, second_commit, review_id)), expect={ "path": check_path }) frontend.page( ("filterchanges?first=%s&last=%s&review=%d" % (second_commit, third_commit, review_id)), expect={ "path": check_path }) frontend.page( ("filterchanges?first=%s&last=%s&review=%d" % (first_commit, third_commit, review_id)), expect={ "path": check_path }) with frontend.signin("bob"): frontend.page( "showcommit?review=%d&filter=pending" % review_id, expect={ "squashed_history": check_squashed_history([first_commit, second_commit, third_commit]) }) frontend.page( "showcommit?review=%d&filter=reviewable" % review_id, expect={ "squashed_history": check_squashed_history([first_commit, second_commit, third_commit]) }) frontend.page( "showcommit?review=%d&filter=relevant" % review_id, expect={ "squashed_history": check_squashed_history([first_commit, second_commit, third_commit]) }) ================================================ FILE: testing/tests/001-main/003-self/009-fetchremotebranch.py ================================================ import os TESTNAME = "009-fetchremotebranch" FILENAME = "%s.txt" % TESTNAME with repository.workcopy() as work: upstream_sha1 = work.run(["rev-parse", "HEAD"]).strip() work.run(["branch", "%s/upstream" % TESTNAME]) work.run(["checkout", "-b", "%s/branch" % TESTNAME]) with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "This is a text file." work.run(["add", FILENAME]) work.run(["commit", "-m", "Add %s" % FILENAME], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") head_sha1 = work.run(["rev-parse", "HEAD"]).strip() work.run(["push", "origin", "%s/upstream" % TESTNAME]) work.run(["push", "origin", "%s/branch" % TESTNAME]) with frontend.signin("alice"): result = frontend.operation( "fetchremotebranch", data={ "repository_name": "critic", "remote": repository.url, "branch": "refs/heads/%s/branch" % TESTNAME, "upstream": "refs/heads/%s/upstream" % TESTNAME }, expect={ "head_sha1": head_sha1, "upstream_sha1": upstream_sha1 }) commit_ids = result["commit_ids"] def check_commit_ids(value): if set(value) != set(commit_ids): return repr(sorted(value)), repr(sorted(commit_ids)) frontend.operation( "fetchremotebranch", data={ "repository_name": "critic", "remote": repository.url, "branch": "%s/branch" % TESTNAME, "upstream": "refs/heads/%s/upstream" % TESTNAME }, expect={ "head_sha1": head_sha1, "upstream_sha1": upstream_sha1, "commit_ids": check_commit_ids }) ================================================ FILE: testing/tests/001-main/003-self/010-linkification.py ================================================ import os TESTNAME = "010-linkification" FILENAME = "%s.txt" % TESTNAME LONG_SHA1_1 = "ca89553db7a2ba22fef70535a65beedf33c97216" SHORT_SHA1_1 = LONG_SHA1_1[:8] LONG_SHA1_2 = "132dbfb7c2ac0f4333fb483a70f1e8cce0333d11" SHORT_SHA1_2 = LONG_SHA1_2[:8] MESSAGE = """\ Add %(FILENAME)s The rest of this commit message contains various things that should be turned into links by the automatic linkification. A plain HTTP URL: http://critic-review.org/tutorials. A "wrapped" URL: <URL:mailto:jl@critic-review.org>. A full SHA-1: %(LONG_SHA1_1)s. A shortened SHA-1: %(SHORT_SHA1_1)s. A diff (full SHA-1s): %(LONG_SHA1_2)s..%(LONG_SHA1_1)s. A diff (shortened SHA-1s): %(SHORT_SHA1_2)s..%(SHORT_SHA1_1)s, should work too. A review link: r/123 (it doesn't matter if the review exists or not.) No review link: harrharr/1337 No SHA-1: g%(SHORT_SHA1_1)s Also no SHA-1: %(SHORT_SHA1_1)sg """ % { "FILENAME": FILENAME, "LONG_SHA1_1": LONG_SHA1_1, "SHORT_SHA1_1": SHORT_SHA1_1, "LONG_SHA1_2": LONG_SHA1_2, "SHORT_SHA1_2": SHORT_SHA1_2 } with repository.workcopy() as work: work.run(["checkout", "-b", TESTNAME]) with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "This line is not significant." work.run(["add", FILENAME]) work.run(["commit", "-m", MESSAGE]) work.run(["push", instance.repository_url("alice"), "HEAD"]) LINKS = { "A plain HTTP URL": ("http://critic-review.org/tutorials", "http://critic-review.org/tutorials" ), 'A "wrapped" URL': ("mailto:jl@critic-review.org", "<URL:mailto:jl@critic-review.org>"), "A full SHA-1": ("/critic/%s" % LONG_SHA1_1, LONG_SHA1_1), "A shortened SHA-1": ("/critic/%s" % LONG_SHA1_1, SHORT_SHA1_1), "A diff (full SHA-1s)": ("/critic/%s..%s" % (LONG_SHA1_2, LONG_SHA1_1), "%s..%s" % (LONG_SHA1_2, LONG_SHA1_1)), "A diff (shortened SHA-1s)": ("/critic/%s..%s" % (LONG_SHA1_2, LONG_SHA1_1), "%s..%s" % (SHORT_SHA1_2, SHORT_SHA1_1)), "A review link": ("/r/123", "r/123") } def check_link(label, expected_href, expected_string): def check(document): line_attrs = testing.expect.with_class("line", "commit-msg") for line in document.findAll("td", attrs=line_attrs): if not isinstance(line.contents[0], basestring): continue if not line.contents[0].startswith(label + ": "): continue if len(line.contents) < 2: continue link = line.contents[1] try: if link.name != "a": continue except AttributeError: continue break else: testing.expect.check("line: '%s: <a ...>...</a>'" % label, "<expected content not found>") testing.expect.check(expected_href, link["href"]) testing.expect.check(expected_string, link.string) return check def check_nonlink(text): def check(document): line_attrs = testing.expect.with_class("line", "commit-msg") for line in document.findAll("td", attrs=line_attrs): if line.string == text: break else: testing.expect.check("line: %r" % text, "<expected content not found>") return check expect = dict((label, check_link(label, href, string)) for label, (href, string) in LINKS.items()) expect["No review link"] = check_nonlink("No review link: harrharr/1337") expect["No SHA-1"] = check_nonlink("No SHA-1: g%s" % SHORT_SHA1_1) expect["Also no SHA-1"] = check_nonlink("Also no SHA-1: %sg" % SHORT_SHA1_1) frontend.page( "critic/%s" % TESTNAME, expect=expect) ================================================ FILE: testing/tests/001-main/003-self/011-linkification-custom.py ================================================ # Need a VM (full installation) to do customizations. # @flag full import os TESTNAME = "010-linkification-custom" FILENAME = "%s.txt" % TESTNAME MESSAGE = """\ Add %(FILENAME)s The rest of this commit message contains some "issue links". At end of line: #1001 Followed by text: #1002 is fixed! Followed by a period: #1003. Followed by a comma: #1004, huh? Within parentheses: (#1005) That's all, folks! """ % { "FILENAME": FILENAME } with repository.workcopy() as work: work.run(["checkout", "-b", TESTNAME]) with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "This line is not significant." work.run(["add", FILENAME]) work.run(["commit", "-m", MESSAGE]) work.run(["push", instance.repository_url("alice"), "HEAD"]) instance.execute( ["sudo", "mkdir", "-p", "/etc/critic/main/customization", "&&", "sudo", "touch", "/etc/critic/main/customization/__init__.py", "&&", "sudo", "cp", "critic/testing/input/customization/linktypes.py", "/etc/critic/main/customization", "&&", "sudo", "chown", "-R", "critic.critic", "/etc/critic/main/customization"]) instance.restart() def issue(number): return ("https://issuetracker.example.com/showIssue?id=%d" % number, "#%d" % number) LINKS = { "At end of line": issue(1001), "Followed by text": issue(1002), "Followed by a period": issue(1003), "Followed by a comma": issue(1004), "Within parentheses": issue(1005) } def check_link(label, expected_href, expected_string): def check(document): line_attrs = testing.expect.with_class("line", "commit-msg") for line in document.findAll("td", attrs=line_attrs): if not isinstance(line.contents[0], basestring): continue if not line.contents[0].startswith(label + ": "): continue if len(line.contents) < 2: continue link = line.contents[1] try: if link.name != "a": continue except AttributeError: continue break else: testing.expect.check("line: '%s: <a ...>...</a>'" % label, "<expected content not found>") testing.expect.check(expected_href, link["href"]) testing.expect.check(expected_string, link.string) return check frontend.page( "critic/%s" % TESTNAME, expect=dict((label, check_link(label, href, string)) for label, (href, string) in LINKS.items())) ================================================ FILE: testing/tests/001-main/003-self/012-createreview-recipients.py ================================================ # Scenario: Alice creates an opt-in review and includes "bob" as a recipient. import re # Random commit on master: COMMIT_SHA1 = "f771149aba230c4712c9cb9c6af4ccfea2b7967d" COMMIT_SUMMARY = "Minor /dashboard query optimizations" with frontend.signin("alice"): # Load /createreview to get commit_id. document = frontend.page( "createreview", params={ "repository": "critic", "commits": COMMIT_SHA1 }) scripts = document.findAll("script") for script in scripts: if script.has_key("src"): continue match = re.search( r"^\s*var review_data\s*=\s*\{\s*commit_ids:\s*\[\s*(\d+)\s*\]", script.string, re.MULTILINE) if match: commit_id = int(match.group(1)) break else: testing.expect.check("<data script>", "<expected content not found>") result = frontend.operation( "submitreview", data={ "repository_name": "critic", "commit_ids": [commit_id], "branch": "r/012-createreview-recipients", "summary": COMMIT_SUMMARY, "applyfilters": True, "applyparentfilters": True, "reviewfilters": [{ "username": "bob", "type": "reviewer", "path": "/" }, { "username": "dave", "type": "watcher", "path": "/" }], "recipientfilters": { "mode": "opt-in", "included": ["bob"] }}) def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) mailbox.pop(accept=to("alice")) mailbox.pop(accept=to("bob")) mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/012-replayrebase.py ================================================ import os import re import shutil def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) # The commit we'll be "reviewing." COMMIT_SHA1 = "aca57d0899e5193232dbbea726d94a838a4274ed" # The original parent of that commit. PARENT_SHA1 = "ca89553db7a2ba22fef70535a65beedf33c97216" # An ancestor of the original parent, onto which we'll be rebasing the reviewed # commit. TARGET_SHA1 = "132dbfb7c2ac0f4333fb483a70f1e8cce0333d11" # The subject of the reviewed commit. SUMMARY = "Use temporary clones for relaying instead of temporary remotes" SETTINGS = { "review.createViaPush": True, "email.subjectLine.updatedReview.reviewRebased": "Rebased Review: %(summary)s" } with testing.utils.settings("alice", SETTINGS), frontend.signin("alice"): with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") work.run(["checkout", "-b", "r/012-replayrebase", PARENT_SHA1]) work.run(["cherry-pick", COMMIT_SHA1], GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") output = work.run(["push", REMOTE_URL, "HEAD"]) next_is_review_url = False for line in output.splitlines(): if not line.startswith("remote:"): continue line = line[len("remote:"):].split("\x1b", 1)[0].strip() if line == "Submitted review:": next_is_review_url = True elif next_is_review_url: review_id = int(re.search(r"/r/(\d+)$", line).group(1)) break else: testing.expect.check("<review URL in git hook output>", "<expected content not found>") mailbox.pop(accept=[to("alice"), about("New Review: %s" % SUMMARY)]) frontend.operation( "preparerebase", data={ "review_id": review_id, "new_upstream": TARGET_SHA1 }) work.run(["rebase", "--onto", TARGET_SHA1, PARENT_SHA1]) # Create some new files as part of the rebase. This serves two # purposes: # # 1) Generate some "changes introduced by rebase" for the rebase replay # mechanism to deal with, and # # 2) make sure the push creates a new pack file (a certain amount of new # objects are required to cause this) so that "git receive-pack" # creates a pack-*.keep file, which creates trouble for "git clone". source_path = os.path.join(work.path, "testing") for index in range(10): destination_path = os.path.join(work.path, "testing%d" % index) shutil.copytree(source_path, destination_path) for path, _, filenames in os.walk(destination_path): for filename in filenames: with open(os.path.join(path, filename), "a") as copied: copied.write("%d\n" % index) work.run(["add", "testing%d" % index]) work.run(["commit", "--amend", "--reuse-message=HEAD"]) work.run(["push", "--force", REMOTE_URL, "HEAD"]) mailbox.pop(accept=[to("alice"), about("Updated Review: %s" % SUMMARY)]) mailbox.pop(accept=[to("alice"), about("Rebased Review: %s" % SUMMARY)]) ================================================ FILE: testing/tests/001-main/003-self/014-non-ascii-filenames.py ================================================ # coding=utf-8 # Scenario: Alice creates a review for a commit where a file that contains # non-ascii chars has been added. Critic should not crash. import os TC_NAME_PREFIX = "014-non-ascii-filename" TC_NAME_UTF8 = (u"%s-åäö\x01\ufffd" % TC_NAME_PREFIX).encode("utf-8") TC_NAME_ESCAPED = u"%s-åäö\\x01\\ufffd" % TC_NAME_PREFIX def check_filename(class_name): def check(document): cells = document.findAll("td", attrs=testing.expect.with_class(class_name)) for cell in cells: anchor = cell.find("a") if not anchor: continue if anchor.string.startswith(TC_NAME_PREFIX): testing.expect.check(TC_NAME_ESCAPED, anchor.string) break else: testing.expect.check("<td class=%s><a>%s" % (class_name, TC_NAME_ESCAPED), "<expected content not found>") return check with frontend.signin("alice"): with repository.workcopy(empty=True) as work: REMOTE_URL = instance.repository_url("alice") def commit(): work.run(["add", TC_NAME_UTF8]) work.run(["commit", "-m", TC_NAME_UTF8], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") return work.run(["rev-parse", "HEAD"]).strip() def push(): work.run(["push", "-q", REMOTE_URL, "HEAD:refs/heads/" + TC_NAME_PREFIX]) with open(os.path.join(work.path, TC_NAME_UTF8), "w") as text_file: print >>text_file, "Content of file " + TC_NAME_UTF8 sha1 = commit() push() frontend.page( "showcommit", params={ "repository": "critic", "sha1": sha1 }, expect={ "filename": check_filename("path") }) frontend.page( "showtree", params={ "repository": "critic", "sha1": sha1, "path": "/" }, expect={ "filename": check_filename("name") }) ================================================ FILE: testing/tests/001-main/003-self/015-non-ascii-line-diff.py ================================================ import os import json import BeautifulSoup TESTNAME = "015-non-ascii-line-diff" with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") def commit(encoding, content, index): filename = "%s.%s.txt" % (TESTNAME, encoding) with open(os.path.join(work.path, filename), "w") as text_file: text_file.write(content.encode(encoding)) work.run(["add", filename]) work.run(["commit", "-m", "%s (%s #%d)" % (TESTNAME, encoding, index)], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") return work.run(["rev-parse", "HEAD"]).strip() work.run(["checkout", "-b", TESTNAME]) utf8_from_sha1 = commit("utf-8", u"Non-ascii: \xf6\n", 1) utf8_to_sha1 = commit("utf-8", u"Non-ascii: \xf7\n", 2) latin1_from_sha1 = commit("latin-1", u"Non-ascii: \xf6\n", 1) latin1_to_sha1 = commit("latin-1", u"Non-ascii: \xf7\n", 2) work.run(["push", REMOTE_URL, "HEAD"]) def check_line_diff(document): tbody_lines = document.findAll("tbody", attrs={ "class": "lines" }) testing.expect.check(1, len(tbody_lines)) testing.expect.check(1, len(tbody_lines[0].contents)) comment = tbody_lines[0].contents[0] testing.expect.check(BeautifulSoup.Comment, comment.__class__) try: data = json.loads(comment) except ValueError: testing.expect.check("<valid JSON>", repr(str(comment))) testing.expect.check(5, len(data)) file_id, sides, old_offset, new_offset, lines = data testing.expect.check(2, sides) testing.expect.check(1, old_offset) testing.expect.check(1, new_offset) testing.expect.check(1, len(lines)) testing.expect.check(3, len(lines[0])) line_type, old_line, new_line = lines[0] # See diff/__init__.py, class Line MODIFIED = 3 testing.expect.check(MODIFIED, line_type) testing.expect.check(u"Non-ascii: <ir>\xf6</i>", old_line) testing.expect.check(u"Non-ascii: <ir>\xf7</i>", new_line) frontend.page( "showcommit", params={ "repository": "critic", "from": utf8_from_sha1, "to": utf8_to_sha1 }, expect={ "utf8_line_diff": check_line_diff }) frontend.page( "showcommit", params={ "repository": "critic", "from": latin1_from_sha1, "to": latin1_to_sha1 }, expect={ "latin1_line_diff": check_line_diff }) ================================================ FILE: testing/tests/001-main/003-self/016-showcommit-ranges.py ================================================ # Scenario: Alice opens the BRANCHES page, switches to the master branch in the # Critic-inside-Critic repository and selects a range of two adjacent non-merge # commits and verifies that there is no error. She then selects a range that # starts with a merge commit and makes sure that the appropriate error message # is shown. with frontend.signin("alice"): # Two adjacent non-merge commits. document = frontend.page( "showcommit", params={ "first": "016f2149c334ff7dabac98700e74a7e9500e702e", "last": "007b4b53a2a8e9561f5143eff27300ea693ca621" }, expect={ "document_title": testing.expect.document_title(u"fa686f55..007b4b53"), "content_title": testing.expect.paleyellow_title(0, u"Squashed History") }) # 57a886e is a merge commit. document = frontend.page( "showcommit", params={ "first": "57a886e6352b229991c81e7ba43244ace7e02d76", "last": "b2b78ca013b49c73231bee11674bcdb3edf6d3f2" }, expect={ "message": testing.expect.message_title(u"Invalid parameters; 'first' can not be a merge commit.") }) mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/017-showcommit-merge-replay.py ================================================ # Scenario: Alice opens the BRANCHES page, switches to the master branch in the # Critic-inside-Critic repository and clicks on the merge commit 8ebec44a. # Finally, even though the merge is empty she clicks the link "display conflict # resolution changes" near the top of the page. After that she also views # 030afecd which had some actual conflicts. document_title = testing.expect.document_title(u"Merge pull request #30 from rchl/exception-fixes (8ebec44a)") with frontend.signin("alice"): document = frontend.page( "showcommit", params={ "sha1": "8ebec44af03197c9679f08afc2b19606c839db99", "conflicts": "yes" }, expect={ "document_title": document_title }) document_title = testing.expect.document_title(u"Merge remote-tracking branch 'github/master' into r/molsson/showcommit_sends_no_data (030afecd)") frontend.page( url="showcommit", params={ "sha1": "030afecdfb40235af03faa52a2a193c7d8199b66", "conflicts": "yes" }, expect={ "document_title": document_title }) mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/018-detect-moves-no-moved-code.py ================================================ # Scenario: Bob is viewing a commit that doesn't contain any chunks that Critic # detects as "moved code". Bob is not sure though, so he hits 'm', selects the # appropriate filenames and clicks SEARCH. Critic should not crash. COMMIT_WITH_NO_MOVES = 'cc1c1a25' with frontend.signin("bob"): document = frontend.page( "critic/%s" % COMMIT_WITH_NO_MOVES, params={ "moves": "yes" }, expect={ "message": testing.expect.message_title(u"No moved code found!") }) mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/019-showtree-showfile-bogus.py ================================================ # Scenario: /showtree or /showfile loaded, with or without a repository # specifier, with a SHA-1 that is or is not present in that/any repository, or a # path that is or is not valid. VALID_SHA1 = "378a00935735431d5408dc8acbca77e6887f91c6" INVALID_SHA1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" VALID_TREE_PATH = "src/page" INVALID_TREE_PATH = "src/horse" VALID_FILE_PATH = "src/page/showtree.py" INVALID_FILE_PATH = "src/page/showpuppy.py" missing_sha1 = testing.expect.message( expected_title="SHA-1 not found", expected_body="Couldn't find commit %s in any repository." % INVALID_SHA1) missing_tree = testing.expect.message( expected_title="Directory does not exist", expected_body=("There is no directory named /%s in the commit %s." % (INVALID_TREE_PATH, VALID_SHA1[:8]))) missing_file = testing.expect.message( expected_title="File does not exist", expected_body=("There is no file named /%s in the commit %s." % (INVALID_FILE_PATH, VALID_SHA1[:8]))) invalid_file = testing.expect.message( expected_title="Invalid path parameter", expected_body="The path must be non-empty and must not end with a /.") frontend.page( "showtree", params={ "sha1": VALID_SHA1, "path": VALID_TREE_PATH }, expect={ "message": testing.expect.no_message() }) frontend.page( "showtree", params={ "sha1": INVALID_SHA1, "path": VALID_TREE_PATH }, expect={ "message": missing_sha1 }) frontend.page( "showtree", params={ "sha1": VALID_SHA1, "path": INVALID_TREE_PATH }, expect={ "message": missing_tree }) frontend.page( "showfile", params={ "sha1": VALID_SHA1, "path": VALID_FILE_PATH }, expect={ "message": testing.expect.no_message() }) frontend.page( "showfile", params={ "sha1": INVALID_SHA1, "path": VALID_FILE_PATH }, expect={ "message": missing_sha1 }) frontend.page( "showfile", params={ "sha1": VALID_SHA1, "path": INVALID_FILE_PATH }, expect={ "message": missing_file }) frontend.page( "showfile", params={ "sha1": VALID_SHA1, "path": "" }, expect={ "message": invalid_file }) frontend.page( "showfile", params={ "sha1": VALID_SHA1, "path": VALID_FILE_PATH + "/" }, expect={ "message": invalid_file }) ================================================ FILE: testing/tests/001-main/003-self/020-fixup-review-via-push.py ================================================ import os def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) FILENAME = "020-fixup-review-via-push.txt" SETTINGS = { "review.createViaPush": True } with testing.utils.settings("alice", SETTINGS), frontend.signin("alice"): with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "Some content." work.run(["add", FILENAME]) work.run(["commit", "-m", """\ fixup! Commit reference Relevant summary """], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") work.run(["push", "-q", REMOTE_URL, "HEAD:refs/heads/r/020-fixup-review-via-push"]) mailbox.pop(accept=[to("alice"), about("New Review: Relevant summary")]) ================================================ FILE: testing/tests/001-main/003-self/020-reviewrebase.py ================================================ import os FILENAME = "020-reviewrebase.txt" FILENAME_BASE = "020-reviewrebase.base.txt" NONSENSE = """\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec ut enim sit amet purus ultricies lobortis. Pellentesque nisi arcu, convallis sed purus sed, semper ultrices velit. Ut egestas lorem tortor, vitae lacinia lorem consectetur nec. Integer tempor ornare ipsum at viverra. Curabitur nec orci mollis, lacinia sapien eget, ultricies ipsum. Curabitur a libero tortor. Curabitur volutpat lacinia erat, ac suscipit enim dignissim nec.""" def lines(*args): return "\n".join((line.upper() if index in args else line) for index, line in enumerate(NONSENSE.splitlines())) def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) SETTINGS = { "review.createViaPush": True, "email.subjectLine.updatedReview.reviewRebased": "Rebased Review: %(summary)s" } work = repository.workcopy() settings = testing.utils.settings("alice", SETTINGS) signin = frontend.signin("alice") reviews = [] with work, settings, signin: REMOTE_URL = instance.repository_url("alice") def write(*args, **kwargs): """Write the file<tm>, optionally with lines upper-cased.""" filename = kwargs.get("filename", FILENAME) with open(os.path.join(work.path, filename), "w") as target: print >>target, lines(*args) def commit(message_or_ref="HEAD", generate=None, *args, **kwargs): """If called with two or more arguments, create a commit and return, otherwise return commit referenced by first argument.""" if generate is not None: generate(*args, **kwargs) work.run(["add", kwargs.get("filename", FILENAME)]) work.run(["commit", "-m", message_or_ref]) message_or_ref = "HEAD" oneline = work.run(["log", "--no-abbrev", "--pretty=oneline", "-1", message_or_ref]) sha1, summary = oneline.strip().split(" ", 1) return { "sha1": sha1, "summary": summary } def expectmail(title): review = reviews[-1] mailbox.pop(accept=[to("alice"), about("%s: %s" % (title, review["summary"]))]) def expecthead(expected): """Check that the review branch in Critic's repository is where we want it to be.""" review = reviews[-1] actual = work.run(["ls-remote", REMOTE_URL, review["branch"]]).split()[0] testing.expect.check(expected["sha1"], actual) def createreview(commits): """Create a review of the specified commits.""" index = len(reviews) + 1 branch = "020-reviewrebase/%d" % index summary = "020-reviewrebase, test %d" % index work.run(["push", REMOTE_URL, "%s:refs/heads/%s" % (commits[-1]["sha1"], branch)]) result = frontend.operation( "submitreview", data={ "repository": "critic", "commit_sha1s": [commit["sha1"] for commit in commits], "branch": "r/" + branch, "frombranch": branch, "summary": summary }) reviews.append({ "id": result["review_id"], "branch": "r/" + branch, "summary": summary }) expectmail("New Review") def push(new_head, force=False): """Push specified commit to the review branch in Critic's repository, optionally forced.""" review = reviews[-1] args = ["push"] if force: args.append("-f") args.extend([REMOTE_URL, "%s:refs/heads/%s" % (new_head["sha1"], review["branch"])]) work.run(args) expecthead(new_head) def moverebase(new_upstream, new_head): """Perform a move rebase.""" review = reviews[-1] work.run(["reset", "--hard", new_head["sha1"]]) frontend.operation( "preparerebase", data={ "review_id": review["id"], "new_upstream": new_upstream["sha1"] }) push(new_head, force=True) expectmail("Rebased Review") def historyrewrite(new_head): """Perform a history rewrite rebase.""" review = reviews[-1] work.run(["reset", "--hard", new_head["sha1"]]) frontend.operation( "preparerebase", data={ "review_id": review["id"] }) push(new_head, force=True) expectmail("Rebased Review") def expectlog(expected): """Fetch the review front-page and check that the commit log contains the expected lines. Also fetch a /showcommit page whose 'Squashed History' log lists everything in the review and check that it contains the same lines too.""" expected = [(item if isinstance(item, str) else item["summary"]) for item in expected] def checklog(document): with_class = testing.expect.with_class actual = [] for tr in document.findAll("tr"): if not tr.has_key("class"): continue classes = tr["class"].split() if "commit" in classes: td = tr.find("td", attrs=with_class("summary")) a = td.find("a", attrs=with_class("commit")) actual.append(a.string) elif "rebase" in classes: td = tr.find("td") if td.contents[0].startswith("Branch rebased"): a = td.find("a") sha1 = a["href"].split("/")[-1] actual.append("rebased onto " + sha1) elif td.contents[0].startswith("History rewritten"): actual.append("history rewritten") testing.expect.check(expected, actual) review = reviews[-1] frontend.page( "r/%d" % review["id"], expect={ "log": checklog }) frontend.page( "showcommit", params={ "review": review["id"], "filter": "files", "file": FILENAME }, expect={ "log": checklog }) def revertrebase(): """Revert the most recent rebase.""" review = reviews[-1] document = frontend.page("r/%d" % review["id"]) for a in document.findAll("a"): if a.string == "[revert]": def revertRebase(rebase_id): return rebase_id rebase_id = eval(a["href"].split(":", 1)[1]) break else: logger.error("No [revert] link found!") frontend.operation( "revertrebase", data={ "review_id": review["id"], "rebase_id": rebase_id }) start_sha1 = work.run(["rev-parse", "HEAD"]).strip() # TEST #1: Create a review with three commits, then history rewrite so that # the branch points to the first commit (i.e. remove the second and third # commit.) Then push another pair of commits, and history rewrite back to # the first commit again. work.run(["checkout", "-b", "020-reviewrebase-1", start_sha1]) commits = [commit("Test #1, commit 1", write), commit("Test #1, commit 2", write, 4, 5), commit("Test #1, commit 3", write)] createreview(commits) historyrewrite(commits[0]) expectlog(["history rewritten", commits[2], commits[1], commits[0]]) commits.extend([commit("Test #1, commit 4", write, 3, 4, 5), commit("Test #1, commit 5", write, 4, 5), commit("Test #1, commit 6", write)]) push(commits[-1]) expectmail("Updated Review") expectlog([commits[5], commits[4], commits[3], "history rewritten", commits[2], commits[1], commits[0]]) historyrewrite(commits[0]) expectlog(["history rewritten", commits[5], commits[4], commits[3], "history rewritten", commits[2], commits[1], commits[0]]) revertrebase() expectlog([commits[5], commits[4], commits[3], "history rewritten", commits[2], commits[1], commits[0]]) # Random extra check for crash fixed in http://critic-review.org/r/207: # Use the [partial] filter to look at the two last commits in the review. # We're only interested in checking that the page loads successfully. frontend.page( "showcommit", params={ "from": commits[3]["sha1"], "to": commits[5]["sha1"], "review": reviews[-1]["id"], "filter": "files", "file": FILENAME }) # TEST #2: First, set up two different commits that we'll be basing our # review branch on. Then create a review with three commits, move rebase # it (ff), rewrite the history, and move rebase it (non-ff) again. work.run(["checkout", "-b", "020-reviewrebase-2-base", start_sha1]) base_commits = [commit("Test #2 base, commit 1", write), commit("Test #2 base, commit 2", write, 0)] work.run(["push", REMOTE_URL, "020-reviewrebase-2-base"]) work.run(["checkout", "-b", "020-reviewrebase-2", base_commits[0]["sha1"]]) commits = [commit("Test #2, commit 1", write, 5), commit("Test #2, commit 2", write, 5, 6), commit("Test #2, commit 3", write, 5, 6, 7)] createreview(commits) work.run(["reset", "--hard", base_commits[1]["sha1"]]) commits.extend([commit("Test #2, commit 4", write, 0, 5), commit("Test #2, commit 5", write, 0, 5, 6), commit("Test #2, commit 6", write, 0, 5, 6, 7)]) moverebase(base_commits[1], commits[-1]) expectlog(["rebased onto " + base_commits[1]["sha1"], commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[1]["sha1"]]) commits.append(commit("Test #2, commit 7", write, 0, 5, 6, 7)) historyrewrite(commits[-1]) expectlog(["history rewritten", "rebased onto " + base_commits[1]["sha1"], commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[0]["sha1"]]) commits.append(commit("Test #2, commit 8", write, 5, 6, 7)) moverebase(base_commits[0], commits[-1]) expectlog(["rebased onto " + base_commits[0]["sha1"], "history rewritten", "rebased onto " + base_commits[1]["sha1"], commits[2], commits[1], commits[0]]) # TEST #3: Like test #2, but the base commits have changes that trigger # "conflicts" and thus equivalent merge commits. work.run(["checkout", "-b", "020-reviewrebase-3-base", start_sha1]) base_commits = [commit("Test #3 base, commit 1", write), commit("Test #3 base, commit 2", write, 2)] work.run(["push", REMOTE_URL, "020-reviewrebase-3-base"]) work.run(["checkout", "-b", "020-reviewrebase-3", base_commits[0]["sha1"]]) commits = [commit("Test #3, commit 1", write, 5), commit("Test #3, commit 2", write, 5, 6), commit("Test #3, commit 3", write, 5, 6, 7)] createreview(commits) work.run(["reset", "--hard", base_commits[1]["sha1"]]) commits.extend([commit("Test #3, commit 4", write, 2, 5), commit("Test #3, commit 5", write, 2, 5, 6), commit("Test #3, commit 6", write, 2, 5, 6, 7)]) moverebase(base_commits[1], commits[-1]) expectmail("Updated Review") expectlog(["rebased onto " + base_commits[1]["sha1"], "Merge commit '%s' into %s" % (base_commits[1]["sha1"], reviews[-1]["branch"]), commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[1]["sha1"]]) commits.append(commit("Test #3, commit 7", write, 2, 5, 6, 7)) historyrewrite(commits[-1]) expectlog(["history rewritten", "rebased onto " + base_commits[1]["sha1"], "Merge commit '%s' into %s" % (base_commits[1]["sha1"], reviews[-1]["branch"]), commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[0]["sha1"]]) commits.append(commit("Test #3, commit 8", write, 5, 6, 7)) moverebase(base_commits[0], commits[-1]) expectlog(["rebased onto " + base_commits[0]["sha1"], "history rewritten", "rebased onto " + base_commits[1]["sha1"], "Merge commit '%s' into %s" % (base_commits[1]["sha1"], reviews[-1]["branch"]), commits[2], commits[1], commits[0]]) # TEST #4: Create a review with three commits based on master~2, then merge # master~1 into the review, and then rebase the review onto master. work.run(["fetch", REMOTE_URL, "refs/heads/master"]) base_commits = [commit("FETCH_HEAD~2"), commit("FETCH_HEAD~1"), commit("FETCH_HEAD")] work.run(["checkout", "-b", "020-reviewrebase-4-1", base_commits[0]["sha1"]]) commits = [commit("Test #4, commit 1", write, 7), commit("Test #4, commit 2", write, 6, 7), commit("Test #4, commit 3", write, 5, 6, 7)] createreview(commits) work.run(["checkout", "-b", "020-reviewrebase-4-2", base_commits[1]["sha1"]]) work.run(["merge", "020-reviewrebase-4-1"]) commits.append(commit()) push(commits[-1]) expectmail("Updated Review") work.run(["reset", "--hard", base_commits[2]["sha1"]]) commits.extend([commit("Test #4, commit 4", write, 7), commit("Test #4, commit 5", write, 6, 7), commit("Test #4, commit 6", write, 5, 6, 7)]) moverebase(base_commits[2], commits[-1]) expectlog(["rebased onto " + base_commits[2]["sha1"], commits[3], commits[2], commits[1], commits[0]]) # TEST #5: First, set up two different commits that we'll be basing our # review branch on. Then create a review with three commits, then history # rewrite so that the branch points to the first commit (i.e. remove the # second and third commit.) Then non-ff move-rebase the review. work.run(["checkout", "-b", "020-reviewrebase-5-base", start_sha1]) base_commits = [commit("Test #5 base, commit 1", write, filename=FILENAME_BASE), commit("Test #5 base, commit 2", write, 0, filename=FILENAME_BASE)] work.run(["push", REMOTE_URL, "020-reviewrebase-5-base"]) work.run(["checkout", "-b", "020-reviewrebase-5", base_commits[1]["sha1"]]) commits = [commit("Test #5, commit 1", write, 4), commit("Test #5, commit 2", write, 4, 5), commit("Test #5, commit 3", write, 4)] createreview(commits) historyrewrite(commits[0]) expectlog(["history rewritten", commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[0]["sha1"]]) commits.append(commit("Test #5, commit 4", write, 4)) moverebase(base_commits[0], commits[-1]) expectlog(["rebased onto " + base_commits[0]["sha1"], "history rewritten", commits[2], commits[1], commits[0]]) # TEST #6: Like test #5, but we revert the rebases afterwards. work.run(["checkout", "-b", "020-reviewrebase-6-base", start_sha1]) base_commits = [commit("Test #6 base, commit 1", write, filename=FILENAME_BASE), commit("Test #6 base, commit 2", write, 0, filename=FILENAME_BASE)] work.run(["push", REMOTE_URL, "020-reviewrebase-6-base"]) work.run(["checkout", "-b", "020-reviewrebase-6", base_commits[1]["sha1"]]) commits = [commit("Test #6, commit 1", write, 4), commit("Test #6, commit 2", write, 4, 5), commit("Test #6, commit 3", write, 4)] createreview(commits) historyrewrite(commits[0]) expectlog(["history rewritten", commits[2], commits[1], commits[0]]) work.run(["reset", "--hard", base_commits[0]["sha1"]]) commits.append(commit("Test #6, commit 4", write, 4)) moverebase(base_commits[0], commits[-1]) expectlog(["rebased onto " + base_commits[0]["sha1"], "history rewritten", commits[2], commits[1], commits[0]]) revertrebase() expectlog(["history rewritten", commits[2], commits[1], commits[0]]) revertrebase() expectlog([commits[2], commits[1], commits[0]]) # TEST #7: Test reverting a ff-move rebase with conflicts. work.run(["checkout", "-b", "020-reviewrebase-7-base", start_sha1]) base_commits = [commit("Test #7 base, commit 1", write), commit("Test #7 base, commit 2", write, 2)] work.run(["push", REMOTE_URL, "020-reviewrebase-7-base"]) work.run(["checkout", "-b", "020-reviewrebase-7", base_commits[0]["sha1"]]) commits = [commit("Test #7, commit 1", write, 5), commit("Test #7, commit 2", write, 5, 6), commit("Test #7, commit 3", write, 5, 6, 7)] createreview(commits) work.run(["reset", "--hard", base_commits[1]["sha1"]]) commits.extend([commit("Test #7, commit 4", write, 2, 5), commit("Test #7, commit 5", write, 2, 5, 6), commit("Test #7, commit 6", write, 2, 5, 6, 7)]) moverebase(base_commits[1], commits[-1]) expectmail("Updated Review") expectlog(["rebased onto " + base_commits[1]["sha1"], "Merge commit '%s' into %s" % (base_commits[1]["sha1"], reviews[-1]["branch"]), commits[2], commits[1], commits[0]]) revertrebase() expectlog([commits[2], commits[1], commits[0]]) # TEST #8: Test reverting a non-ff-move rebase with conflicts. work.run(["checkout", "-b", "020-reviewrebase-8-base", start_sha1]) base_commits = [commit("Test #8 base, commit 1", write), commit("Test #8 base, commit 2", write, 2)] work.run(["push", REMOTE_URL, "020-reviewrebase-8-base"]) work.run(["checkout", "-b", "020-reviewrebase-8", base_commits[1]["sha1"]]) commits = [commit("Test #8, commit 1", write, 2, 3), commit("Test #8, commit 2", write, 2, 3, 6), commit("Test #8, commit 3", write, 2, 3, 6, 7)] createreview(commits) work.run(["reset", "--hard", base_commits[0]["sha1"]]) commits.extend([commit("Test #8, commit 4", write, 3), commit("Test #8, commit 5", write, 3, 6), commit("Test #8, commit 6", write, 3, 6, 7)]) moverebase(base_commits[0], commits[-1]) expectmail("Updated Review") expectlog(["rebased onto " + base_commits[0]["sha1"], "Changes introduced by rebase", commits[2], commits[1], commits[0]]) revertrebase() expectlog([commits[2], commits[1], commits[0]]) ================================================ FILE: testing/tests/001-main/003-self/021-updatereview-bogus.py ================================================ with frontend.signin("alice"): frontend.operation( "updatereview", data={ "review_id": -1, "new_owners": ["alice"] }, expect={ "status": "failure", "code": "nosuchreview" }) ================================================ FILE: testing/tests/001-main/003-self/022-removereviewfilter-bogus.py ================================================ with frontend.signin("alice"): frontend.operation( "removereviewfilter", data={ "filter_id": -1 }, expect={ "status": "failure", "code": "nosuchfilter" }) ================================================ FILE: testing/tests/001-main/003-self/024-customizations.githook.py ================================================ # Need a VM (full installation) to do customizations. # @flag full import re import json # Install the githook customization. instance.execute( ["sudo", "mkdir", "-p", "/etc/critic/main/customization", "&&", "sudo", "touch", "/etc/critic/main/customization/__init__.py", "&&", "sudo", "cp", "critic/testing/input/customization/githook.py", "/etc/critic/main/customization", "&&", "sudo", "chown", "-R", "critic.critic", "/etc/critic/main/customization"]) # Note: no need to restart, since the githook background service effectively # re-imports the 'index' module, which imports the 'customizations.githook' # module. with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") def lsremote(ref_name): try: output = work.run(["ls-remote", "--exit-code", REMOTE_URL, ref_name]) except testing.repository.GitCommandError: return None lines = output.splitlines() testing.expect.check(1, len(lines)) testing.expect.check("[0-9a-f]{40}\t" + ref_name, lines[0], equal=re.match) return lines[0][:40] def push(new_value, ref_name, expected_result): old_value = lsremote(ref_name) if new_value is not None: new_value = work.run(["rev-parse", new_value]).strip() try: output = work.run(["push", "--quiet", REMOTE_URL, "%s:%s" % (new_value or "", ref_name)]) testing.expect.check("ACCEPT", expected_result) except testing.repository.GitCommandError as error: output = error.output testing.expect.check("REJECT", expected_result) from_hook = [] for line in output.splitlines(): line = line.partition("\x1b")[0] if line.startswith("remote: "): from_hook.append(line[len("remote: "):].strip()) testing.expect.check("^%s:" % expected_result, from_hook[0], equal=re.match) testing.expect.check({ "repository_path": "/var/git/critic.git", "ref_name": ref_name, "old_value": old_value, "new_value": new_value }, json.loads(from_hook[0][7:])) if expected_result == "ACCEPT": testing.expect.check(new_value, lsremote(ref_name)) else: testing.expect.check(old_value, lsremote(ref_name)) push("HEAD", "refs/heads/reject-create", "REJECT") push("HEAD^", "refs/heads/reject-delete", "ACCEPT") push("HEAD", "refs/heads/reject-delete", "ACCEPT") push(None, "refs/heads/reject-delete", "REJECT") push("HEAD^", "refs/heads/reject-update", "ACCEPT") push("HEAD", "refs/heads/reject-update", "REJECT") push("HEAD^", "refs/heads/reject-nothing", "ACCEPT") push("HEAD", "refs/heads/reject-nothing", "ACCEPT") push(None, "refs/heads/reject-nothing", "ACCEPT") # Remove the githook customization again. instance.execute( ["sudo", "rm", "-f", "/etc/critic/main/customization/githook.py", "/etc/critic/main/customization/githook.pyc"]) # And again, no need to restart. ================================================ FILE: testing/tests/001-main/003-self/025-trackedbranch.py ================================================ import time BRANCH_NAME = "025-trackedbranch" with repository.workcopy() as work, frontend.signin(): REMOTE_URL = instance.repository_url("alice") def wait_for_branch(branch_name, value): instance.synchronize_service("branchtracker") try: output = work.run(["ls-remote", "--exit-code", REMOTE_URL, "refs/heads/" + branch_name]) if output.startswith(value): return except testing.repository.GitCommandError: logger.error("Tracked branch %s not updated as expected." % branch_name) raise testing.TestFailure def get_branch_log(branch_id, expected_length): result = frontend.operation( "trackedbranchlog", data={ "branch_id": branch_id }) branch_log = result["items"] testing.expect.check(expected_length, len(branch_log)) return branch_log def check_log_item(branch_log_item, from_sha1, to_sha1, hook_output, successful): testing.expect.check(from_sha1, branch_log_item["from_sha1"]) testing.expect.check(to_sha1, branch_log_item["to_sha1"]) testing.expect.check(hook_output, branch_log_item["hook_output"]) testing.expect.check(successful, branch_log_item["successful"]) work.run(["push", "origin", "HEAD:refs/heads/" + BRANCH_NAME]) sha1s = { "HEAD": work.run(["rev-parse", "HEAD"]).strip(), "HEAD^": work.run(["rev-parse", "HEAD^"]).strip() } result = frontend.operation( "addtrackedbranch", data={ "repository_id": 1, "source_location": repository.url, "source_name": BRANCH_NAME, "target_name": BRANCH_NAME, "users": ["alice"], "forced": False }) branch_id = result["branch_id"] wait_for_branch(BRANCH_NAME, sha1s["HEAD"]) branch_log = get_branch_log(branch_id, expected_length=1) check_log_item(branch_log[0], from_sha1="0" * 40, to_sha1=sha1s["HEAD"], hook_output="", successful=True) work.run(["push", "origin", "-f", "HEAD^:refs/heads/" + BRANCH_NAME]) frontend.operation( "triggertrackedbranchupdate", data={ "branch_id": branch_id }) instance.synchronize_service("branchtracker") log_entries = instance.filter_service_log("branchtracker", "error") testing.expect.check(1, len(log_entries)) testing.expect.check("ERROR - update of branch 025-trackedbranch from " "025-trackedbranch in %s failed" % repository.url, log_entries[0].splitlines()[0]) to_system = testing.mailbox.ToRecipient("system@example.org") system_subject = testing.mailbox.WithSubject( "branchtracker.log: update of branch %s from %s in %s failed" % (BRANCH_NAME, BRANCH_NAME, repository.url)) mailbox.pop(accept=[to_system, system_subject]) to_alice = testing.mailbox.ToRecipient("alice@example.org") alice_subject = testing.mailbox.WithSubject( "%s: update from %s in %s" % (BRANCH_NAME, BRANCH_NAME, repository.url)) mailbox.pop(accept=[to_alice, alice_subject]) branch_log = get_branch_log(branch_id, expected_length=2) check_log_item(branch_log[0], from_sha1="0" * 40, to_sha1=sha1s["HEAD"], hook_output="", successful=True) check_log_item(branch_log[1], from_sha1=sha1s["HEAD"], to_sha1=sha1s["HEAD^"], hook_output="""\ Rejecting non-fast-forward update of branch. To perform the update, you can delete the branch using git push critic :%s first, and then repeat this push. """ % BRANCH_NAME, successful=False) work.run(["push", "origin", "HEAD:refs/heads/%s-forced" % BRANCH_NAME]) result = frontend.operation( "addtrackedbranch", data={ "repository_id": 1, "source_location": repository.url, "source_name": BRANCH_NAME + "-forced", "target_name": BRANCH_NAME + "-forced", "users": ["alice"], "forced": True }) branch_id = result["branch_id"] wait_for_branch(BRANCH_NAME + "-forced", sha1s["HEAD"]) branch_log = get_branch_log(branch_id, expected_length=1) check_log_item(branch_log[0], from_sha1="0" * 40, to_sha1=sha1s["HEAD"], hook_output="", successful=True) work.run(["push", "origin", "-f", "HEAD^:refs/heads/%s-forced" % BRANCH_NAME]) frontend.operation( "triggertrackedbranchupdate", data={ "branch_id": branch_id }) wait_for_branch(BRANCH_NAME + "-forced", sha1s["HEAD^"]) branch_log = get_branch_log(branch_id, expected_length=2) check_log_item(branch_log[0], from_sha1="0" * 40, to_sha1=sha1s["HEAD"], hook_output="", successful=True) check_log_item(branch_log[1], from_sha1=sha1s["HEAD"], to_sha1=sha1s["HEAD^"], hook_output="""\ Non-fast-forward update detected; deleting and recreating branch. """, successful=True) mailbox.check_empty() ================================================ FILE: testing/tests/001-main/003-self/026-searchreview.py ================================================ import re REVIEWS = { "giraffe": { "sha1": "5360e5d734e3b990c0dc67496c7a83f94013d01d", "branch": "r/026-searchreview/giraffe", "owners": ["alice"], "summary": "Make sure TH element lives inside a TR element", "paths": ["src/page/repositories.py"] }, "elephant": { "sha1": "18db724faccfb2f8d04c81309feadf05b48ec9e3", "branch": "r/026-searchreview/elephant", "owners": ["alice", "bob"], "reviewers": ["alice"], "watchers": ["erin"], "summary": "URL escape shortname in repository SELECT", "description": """\ Before this fix, a repository shortname such as "a&b" meant that the user would be forwarded to /branches?repository=a&b and Critic would say "No such repository: a". Same problem existed for shortname "a#b". Also shortname "a+b" hit the error "No such repository: a b".""", "paths": ["src/resources/branches.js", "src/resources/config.js"] }, "tiger": { "sha1": "95e52c53a4a183c9f0eada7401e1da174353e00e", "branch": "r/026-searchreview/cat", "owners": ["dave"], "reviewers": ["dave", "erin"], "summary": "Extend testing.tools.upgrade: support custom maintenance and reboot", "paths": ["testing/tools/upgrade.py", "testing/virtualbox.py"] }, "ashtray": { "sha1": "94391a18858c05b2619dfea4b58507d08d932bd3", "branch": "r/026-searchreview/ashtray", "owners": ["dave", "alice"], "reviewers": ["dave"], "summary": "Add support for installing packages in instance", "description": """\ Extend the testing.tools.upgrade tool to support installing extra packages in the instance and retake the snapshot afterwards.""", "paths": ["testing/tools/upgrade.py"], "dropped": True } } FAILED = False SETTINGS = { "review.createViaPush": True } def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) with repository.workcopy() as work: for review in REVIEWS.values(): primary_owner = review["owners"][0] with testing.utils.settings(primary_owner, SETTINGS), \ frontend.signin(primary_owner): REMOTE_URL = instance.repository_url(primary_owner) output = work.run( ["push", REMOTE_URL, "%(sha1)s:refs/heads/%(branch)s" % review]) next_is_review_url = False for line in output.splitlines(): if not line.startswith("remote:"): continue line = line[len("remote:"):].split("\x1b", 1)[0].strip() if line == "Submitted review:": next_is_review_url = True elif next_is_review_url: logger.debug(line) review["id"] = int(re.search(r"/r/(\d+)$", line).group(1)) break else: testing.expect.check("<review URL in git hook output>", "<expected content not found>") mailbox.pop( accept=[to(primary_owner), about("New Review: %s" % review["summary"])]) updatereview_data = {} if len(review["owners"]) > 1: updatereview_data["new_owners"] = review["owners"] if "description" in review: updatereview_data["new_description"] = review["description"] if updatereview_data: updatereview_data["review_id"] = review["id"] frontend.operation( "updatereview", data=updatereview_data) recipients = set() if "reviewers" in review: frontend.operation( "addreviewfilters", data={ "review_id": review["id"], "filters": [{ "type": "reviewer", "user_names": review["reviewers"], "paths": ["/"] }] }) recipients.update(review["reviewers"]) if "watchers" in review: frontend.operation( "addreviewfilters", data={ "review_id": review["id"], "filters": [{ "type": "watcher", "user_names": review["watchers"], "paths": ["/"] }] }) recipients.update(review["watchers"]) for username in recipients: if username not in review["owners"]: mailbox.pop(accept=[to(username), about(r"^New\(ish\) Review:")]) if username != primary_owner: mailbox.pop(accept=[to(username), about(r"^Updated Review:")]) if "closed" in review: frontend.operation( "closereview", data={ "review_id": review["id"] }) if "dropped" in review: frontend.operation( "dropreview", data={ "review_id": review["id"] }) def search(query, expected): global FAILED if isinstance(query, list): for q in query: search(q, expected) return try: result = frontend.operation( "searchreview", data={ "query": query }) except testing.TestFailure: # Continue testing instead of aborting. The error will have # been logged by frontend.operation() already. FAILED = True return actual = dict((review["id"], review["summary"]) for review in result["reviews"]) # Note: We only check that reviews we just created are included (or not) in # the search result. We specifically don't check that the search result # doesn't contain other reviews (not created above) since that typically # depends on which other tests have run, on which we don't want to depend. for key in expected: expected_review = REVIEWS[key] if expected_review["id"] not in actual: logger.error("r/<%s>: not found by query %r as expected" % (key, query)) FAILED = True else: if actual[expected_review["id"]] != expected_review["summary"]: logger.error("r/<%s>: wrong summary %r reported" % (key, actual[expected_review["id"]])) FAILED = True for key in REVIEWS.keys(): if key not in expected: if REVIEWS[key]["id"] in actual: logger.error("r/<%s>: incorrectly found by query %r" % (key, query)) FAILED = True def invalid(query, code, title): global FAILED try: frontend.operation( "searchreview", data={ "query": query }, expect={ "status": "failure", "code": code, "title": title }) except testing.TestFailure: # Continue testing instead of aborting. The error will have # been logged by frontend.operation() already. FAILED = True return search(query="existentialism", expected=[]) search(query="support", expected=["tiger", "ashtray"]) search(query="support for", expected=["ashtray"]) search(query="'support for'", expected=["ashtray"]) search(query='"support for"', expected=["ashtray"]) search(query="support owner:dave", expected=["tiger", "ashtray"]) search(query="support owner:alice", expected=["ashtray"]) search(query="support owner:bob", expected=[]) search(query="support installing", expected=["ashtray"]) search(query="'support installing'", expected=["ashtray"]) search(query="summary:'support installing'", expected=[]) search(query="description:'support installing'", expected=["ashtray"]) search(query="r/026-searchreview/*", expected=["giraffe", "elephant", "tiger", "ashtray"]) search(query=["b:r/026-searchreview/*", "branch:r/026-searchreview/*"], expected=["giraffe", "elephant", "tiger", "ashtray"]) search(query="path:r/026-searchreview/*", expected=[]) search(query="r/026-searchreview/elephant", expected=["elephant"]) search(query="r/026-searchreview/* upgrade.py", expected=["tiger", "ashtray"]) search(query="branch:r/026-searchreview/* path:upgrade.py", expected=["tiger", "ashtray"]) search(query=["p:upgrade.py", "path:upgrade.py"], expected=["tiger", "ashtray"]) search(query="branch:upgrade.py", expected=[]) search(query="user:alice", expected=["giraffe", "elephant", "ashtray"]) search(query="owner:alice", expected=["giraffe", "elephant", "ashtray"]) search(query="reviewer:alice", expected=["elephant"]) search(query="user:bob", expected=["elephant"]) search(query="owner:bob", expected=["elephant"]) search(query="reviewer:bob", expected=[]) search(query="user:dave", expected=["tiger", "ashtray"]) search(query="owner:dave", expected=["tiger", "ashtray"]) search(query="reviewer:dave", expected=["tiger", "ashtray"]) search(query=["u:erin", "user:erin"], expected=["elephant", "tiger"]) search(query=["o:erin", "owner:erin"], expected=[]) search(query=["reviewer:erin"], expected=["tiger"]) search(query="owner:alice reviewer:bob user:erin", expected=[]) search(query="reviewer:alice owner:bob", expected=["elephant"]) search(query=["s:open", "state:open"], expected=["giraffe", "elephant", "tiger"]) # It would be nice if we could make one of the reviews accepted, but doing that # is a lot of work. In practice, a "pending" search is almost the same as an # "accepted" search; it's just inverted. So we're at least quite close to # testing an "accepted" search. search(query=["s:pending", "state:pending"], expected=["giraffe", "elephant", "tiger"]) search(query=["s:accepted", "state:accepted"], expected=[]) # It would be nice if we could close a review too, but again, that depends on # making a review accepted, and that's a lot of work. search(query=["s:closed", "state:closed"], expected=[]) search(query=["s:dropped", "state:dropped"], expected=["ashtray"]) # A bit boring since there's only one repository. search(query=["r:critic", "repo:critic", "repository:critic"], expected=["giraffe", "elephant", "tiger", "ashtray"]) invalid(query="overlord:admin", code="invalidkeyword", title="Invalid keyword: 'overlord'") invalid(query="user:nosuchuser", code="invalidterm", title="No such user: 'nosuchuser'") invalid(query="owner:nosuchuser", code="invalidterm", title="No such user: 'nosuchuser'") invalid(query="reviewer:nosuchuser", code="invalidterm", title="No such user: 'nosuchuser'") invalid(query="state:limbo", code="invalidterm", title="Invalid review state: 'limbo'") if FAILED: raise testing.TestFailure ================================================ FILE: testing/tests/001-main/003-self/027-whitespace-filenames.py ================================================ import os BRANCH = "027-whitespace-filename" FILENAME = "filename with spaces.txt" def check_filename(class_name): def check(document): cells = document.findAll("td", attrs=testing.expect.with_class(class_name)) for cell in cells: anchor = cell.find("a") if not anchor: continue testing.expect.check(FILENAME, anchor.string) break else: testing.expect.check("<td class=%s><a>%s" % (class_name, FILENAME), "<expected content not found>") return check with frontend.signin("alice"): with repository.workcopy(empty=True) as work: REMOTE_URL = instance.repository_url("alice") def commit(): work.run(["add", FILENAME]) work.run(["commit", "-m", FILENAME], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") return work.run(["rev-parse", "HEAD"]).strip() def push(): work.run(["push", "-q", REMOTE_URL, "HEAD:refs/heads/" + BRANCH]) with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "Content of file " + FILENAME sha1 = commit() push() frontend.page( "showcommit", params={ "repository": "critic", "sha1": sha1 }, expect={ "filename": check_filename("path") }) frontend.page( "showtree", params={ "repository": "critic", "sha1": sha1, "path": "/" }, expect={ "filename": check_filename("name") }) ================================================ FILE: testing/tests/001-main/003-self/028-gitemails.py ================================================ # Test summary # ============ # # Alice, Bob and Dave adds a bunch of filters making them reviewers of # a directory we add a bunch of different files to. Alice adds a filter with # Erin as delegate. # # They all also set their Git emails, Alice and Bob sharing common@example.org # and dave having two different addresses. # # Then we make a bunch of commits with different authors (all involved Git email # addresses, plus nobody@example.org.) Each commits adds one file. # # We then catch the "New Review" and "Updated Review" emails sent, and make sure # those emails claim that the right set of files is assigned to be reviewed by # the expected users. import os import re REMOTE_URL = instance.repository_url("alice") to_recipient = testing.mailbox.ToRecipient with_subject = testing.mailbox.WithSubject showfilters_output = frontend.page( "showfilters", params={ "repository": "critic" }, expected_content_type="text/plain") testing.expect.check( "Path: /\n\nNo matching filters found.\n", showfilters_output) showfilters_output = frontend.page( "showfilters", params={ "repository": "critic", "path": "028-gitemails/" }, expected_content_type="text/plain") testing.expect.check( "Path: 028-gitemails/\n\nNo matching filters found.\n", showfilters_output) with repository.workcopy() as workcopy: def commit(filename, author): path = os.path.join(workcopy.path, "028-gitemails", filename) with open(path, "w") as the_file: the_file.write("This is '%s' by %s.\n" % (filename, author)) workcopy.run(["add", "028-gitemails/" + filename]) workcopy.run(["commit", "-m", "Edited " + filename], GIT_AUTHOR_NAME="Anonymous Coward", GIT_AUTHOR_EMAIL=author + "@example.org", GIT_COMMITTER_NAME="Anonymous Coward", GIT_COMMITTER_EMAIL=author + "@example.org") return workcopy.run(["rev-parse", "HEAD"]).strip() def expect_mail(recipient, expected_files): mail = mailbox.pop( accept=[to_recipient(recipient + "@example.org"), with_subject("(New|Updated) Review: Edited cat.txt")]) assigned_files = [] try: marker_index = mail.lines.index( "These changes were assigned to you:") except ValueError: pass else: for line in mail.lines[marker_index + 1:]: if not line.strip(): break filename, counts = line.strip().split(None, 1) if assigned_files: filename = filename.replace(".../", "028-gitemails/") assigned_files.append((filename, counts)) for filename, counts in assigned_files: if not expected_files: testing.expect.check("<no more assigned files>", filename) else: testing.expect.check( "028-gitemails/" + expected_files.pop(0), filename) testing.expect.check("+1", counts) if expected_files: for filename in expected_files: testing.expect.check(filename, "<no more assigned files>") with frontend.signin("alice"): frontend.operation( "addfilter", data={ "filter_type": "reviewer", "repository_name": "critic", "path": "028-gitemails/", "delegates": ["erin"] }) frontend.operation( "setgitemails", data={ "subject_id": instance.userid("alice"), "value": ["alice@example.org", "common@example.org"] }) with frontend.signin("bob"): frontend.operation( "addfilter", data={ "filter_type": "reviewer", "repository_name": "critic", "path": "028-gitemails/", "delegates": [] }) frontend.operation( "setgitemails", data={ "subject_name": "bob", "value": ["bob@example.org", "common@example.org"] }) with frontend.signin("dave"): frontend.operation( "addfilter", data={ "filter_type": "reviewer", "repository_name": "critic", "path": "028-gitemails/", "delegates": [] }) frontend.operation( "setgitemails", data={ "subject": instance.userid("dave"), "value": ["dave@example.org", "dave@example.com"] }) workcopy.run(["checkout", "-b", "r/028-gitemails"]) os.mkdir(os.path.join(workcopy.path, "028-gitemails")) commits = [] commits.append(commit("cat.txt", "alice")) with testing.utils.settings("alice", { "review.createViaPush": True }): workcopy.run(["push", REMOTE_URL, "HEAD"]) expect_mail("alice", []) expect_mail("bob", ["cat.txt"]) expect_mail("dave", ["cat.txt"]) expect_mail("erin", ["cat.txt"]) commits.append(commit("dog.txt", "bob")) commits.append(commit("mouse.txt", "dave")) commits.append(commit("snake.txt", "dave")) commits.append(commit("bird.txt", "common")) commits.append(commit("fish.txt", "nobody")) workcopy.run(["push", REMOTE_URL, "HEAD"]) expect_mail("alice", ["dog.txt", "fish.txt", "mouse.txt", "snake.txt"]) expect_mail("bob", ["fish.txt", "mouse.txt", "snake.txt"]) expect_mail("dave", ["bird.txt", "dog.txt", "fish.txt"]) expect_mail("erin", ["bird.txt"]) showfilters_output = frontend.page( "showfilters", params={ "repository": "critic" }, expected_content_type="text/plain") testing.expect.check( "Path: /\n\nNo matching filters found.\n", showfilters_output) showfilters_output = frontend.page( "showfilters", params={ "repository": "critic", "path": "028-gitemails/" }, expected_content_type="text/plain") testing.expect.check( """\ Path: 028-gitemails/ Reviewers: Alice von Testing <alice@example.org> Bob von Testing <bob@example.org> Dave von Testing <dave@example.org> """, showfilters_output) ================================================ FILE: testing/tests/001-main/003-self/029-log-bogus.py ================================================ expected = testing.expect.message("'notabranch' doesn't name a branch!", None) frontend.page( url="log", params={ "repository": "critic", "branch": "notabranch" }, expect={ "message": expected }) expected = testing.expect.message("Missing URI Parameter!", "Expected 'repository' parameter.") frontend.page( url="log", params={ "branch": "branch_that_does_not_exist" }, expect={ "message": expected }, expected_http_status=400) expected = testing.expect.message("'nyetvetka' doesn't name a branch!", None) frontend.page( url="log", params={ "repository": "critic", "branch": "master", "base": "nyetvetka" }, expect={ "message": expected }) ================================================ FILE: testing/tests/001-main/003-self/030-trackingreview.py ================================================ TEST_NAME = "030-trackingreview" BRANCH_NAME = [TEST_NAME + "-1", TEST_NAME + "-2"] UPSTREAM_NAME = [name + "-upstream" for name in BRANCH_NAME] SUMMARY = TEST_NAME ORIGINAL_SHA1 = "37bfd1ee7d301b364d0a8c716e9bca36efd5d139" REVIEWED_SHA1 = [] UPSTREAM_SHA1 = ["22afd9377add956e1e8d8dd6efa378fad9237532", "702c1b1a4043d8837e788317698cfc88c5570ff8"] def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) repository.run(["branch", UPSTREAM_NAME[0], UPSTREAM_SHA1[0]]) repository.run(["branch", UPSTREAM_NAME[1], UPSTREAM_SHA1[1]]) with repository.workcopy() as work: work.run(["checkout", "-b", BRANCH_NAME[0], UPSTREAM_SHA1[0]]) work.run(["cherry-pick", ORIGINAL_SHA1]) work.run(["push", "origin", "HEAD"]) REVIEWED_SHA1.append(work.run(["rev-parse", "HEAD"]).strip()) work.run(["checkout", "-b", BRANCH_NAME[1], UPSTREAM_SHA1[1]]) work.run(["cherry-pick", ORIGINAL_SHA1]) work.run(["push", "origin", "HEAD"]) REVIEWED_SHA1.append(work.run(["rev-parse", "HEAD"]).strip()) with_class = testing.expect.with_class extract_text = testing.expect.extract_text def check_tracking(branch_name, disabled=False): def check(document): class_names = ["tracking"] if disabled: class_names.append("disabled") p_tracking = document.find("p", attrs=with_class(*class_names)) testing.expect.check("tracking", extract_text(p_tracking)) if not disabled: testing.expect.check("tracking", p_tracking["class"]) code_branch = document.findAll("code", attrs=with_class("branch")) testing.expect.check(2, len(code_branch)) testing.expect.check(branch_name, extract_text(code_branch[1])) code_repository = document.findAll("code", attrs=with_class("repository")) testing.expect.check(2, len(code_repository)) testing.expect.check(repository.url, extract_text(code_repository[1])) return check SETTINGS = { "email.subjectLine.updatedReview.reviewRebased": "Rebased Review: %(summary)s" } with testing.utils.settings("alice", SETTINGS), frontend.signin("alice"): result = frontend.operation( "fetchremotebranch", data={ "repository_name": "critic", "remote": repository.url, "branch": BRANCH_NAME[0], "upstream": "refs/heads/" + UPSTREAM_NAME[0] }, expect={ "head_sha1": REVIEWED_SHA1[0], "upstream_sha1": UPSTREAM_SHA1[0] }) # Run a GC to make sure the objects fetched by /fetchremotebranch are # referenced and thus usable by the subsequent /submitreview operation. instance.gc("critic.git") commit_ids = result["commit_ids"] result = frontend.operation( "submitreview", data={ "repository": "critic", "branch": "r/" + TEST_NAME, "summary": SUMMARY, "commit_ids": commit_ids, "trackedbranch": { "remote": repository.url, "name": BRANCH_NAME[0] }}) review_id = result["review_id"] trackedbranch_id = result["trackedbranch_id"] mailbox.pop( accept=[to("alice"), about("New Review: " + SUMMARY)]) # Wait for the immediate fetch of the tracked branch that /submitreview # schedules. instance.synchronize_service("branchtracker") # Emulate a review rebase via /rebasetrackingreview. frontend.page( "r/%d" % review_id, expect={ "tracking": check_tracking(BRANCH_NAME[0]) }) frontend.page( "rebasetrackingreview", params={ "review": review_id }) result = frontend.operation( "fetchremotebranch", data={ "repository_name": "critic", "remote": repository.url, "branch": BRANCH_NAME[1], "upstream": "refs/heads/" + UPSTREAM_NAME[1] }, expect={ "head_sha1": REVIEWED_SHA1[1], "upstream_sha1": UPSTREAM_SHA1[1] }) # Run a GC to make sure the objects fetched by /fetchremotebranch are # referenced and thus usable by the subsequent /rebasetrackingreview # operation. instance.gc("critic.git") frontend.page( "rebasetrackingreview", params={ "review": review_id, "newbranch": BRANCH_NAME[1], "upstream": UPSTREAM_NAME[1], "newhead": REVIEWED_SHA1[1], "newupstream": UPSTREAM_SHA1[1] }) frontend.operation( "checkconflictsstatus", data={ "review_id": review_id, "new_head_sha1": REVIEWED_SHA1[1], "new_upstream_sha1": UPSTREAM_SHA1[1] }, expect={ "has_changes": False, "has_conflicts": False }) frontend.operation( "rebasereview", data={ "review_id": review_id, "new_head_sha1": REVIEWED_SHA1[1], "new_upstream_sha1": UPSTREAM_SHA1[1], "new_trackedbranch": BRANCH_NAME[1] }) frontend.page( "r/%d" % review_id, expect={ "tracking": check_tracking(BRANCH_NAME[1]) }) mailbox.pop( accept=[to("alice"), about("Rebased Review: " + SUMMARY)]) # Disable and enable the tracking. frontend.operation( "disabletrackedbranch", data={ "branch_id": trackedbranch_id }) frontend.page( "r/%d" % review_id, expect={ "tracking": check_tracking(BRANCH_NAME[1], disabled=True) }) frontend.operation( "enabletrackedbranch", data={ "branch_id": trackedbranch_id }) frontend.page( "r/%d" % review_id, expect={ "tracking": check_tracking(BRANCH_NAME[1]) }) ================================================ FILE: testing/tests/001-main/003-self/031-fetchlines-bom.py ================================================ import os UTF8_BOM = "\xEF\xBB\xBF" with frontend.signin("alice"), repository.workcopy(empty=True) as work: REMOTE_URL = instance.repository_url("alice") def commit(filename): work.run(["add", filename]) work.run(["commit", "-m", "Add " + filename], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") sha1 = work.run(["ls-tree", "HEAD", filename]).strip().split()[2] return sha1 def push(): return work.run(["push", "-q", REMOTE_URL, "HEAD:refs/heads/031-fetchlines"]) filename_cc = "031-fetchlines.cc" with open(os.path.join(work.path, filename_cc), "w") as text_file: print >>text_file, UTF8_BOM print >>text_file, "\n"*42 print >>text_file, "hello world" file_sha1_cc = commit(filename_cc) filename_py = "031-fetchlines.py" with open(os.path.join(work.path, filename_py), "w") as text_file: print >>text_file, UTF8_BOM print >>text_file, "\n"*42 print >>text_file, "hello world" file_sha1_py = commit(filename_py) push() frontend.operation( "fetchlines", data={ "repository_id": 1, "path": filename_cc, "sha1": file_sha1_cc, "ranges": [{ "offset": 1, "count": 40, "context": True }], "tabify": False }) frontend.operation( "fetchlines", data={ "repository_id": 1, "path": filename_py, "sha1": file_sha1_py, "ranges": [{ "offset": 1, "count": 40, "context": True }], "tabify": False }) ================================================ FILE: testing/tests/001-main/003-self/032-download.py ================================================ # Download the README file in the root directory in the initial released commit. expected_README = """\ Critic ====== This is the code review system, Critic. """ actual_README = frontend.page( "download/README", params={ "repository": "critic", "sha1": "f4c6e5fc09de47f7eb1a623cbc8820f67967d558" }, expected_content_type="text/plain") testing.expect.check(expected_README, actual_README) # Download the resources/basic.js file in the initial released commit. We won't # bother to check that the content is correct (it's too big to inline in this # test,) so just check that the content type is correctly guessed. frontend.page( "download/resources/basic.js", params={ "repository": "critic", "sha1": "2c7d6f87c11670f3c371cca0580553f01ec94340" }, expected_content_type="text/javascript") # Download the resources/basic.js file in the initial released commit, this time # with an abbreviated SHA-1 sum. frontend.page( "download/resources/basic.js", params={ "repository": "critic", "sha1": "2c7d6f87c11" }, expected_content_type="text/javascript") # Attempt to download the README file in the root directory but specify a SHA-1 # that isn't a blob, but rather the initial released commit's SHA-1. This # should fail. frontend.page( "download/README", params={ "repository": "critic", "sha1": "aa15bc746d3340bda912a1cc4759b332b9adff55" }, expected_http_status=404) # Attempt to download the README file in the root directory but specify a SHA-1 # that doesn't exist at all in the repository. frontend.page( "download/README", params={ "repository": "critic", "sha1": "0000000000000000000000000000000000000000" }, expected_http_status=404) # Attempt to download the README file in the root directory but specify a SHA-1 # that isn't a valid SHA-1. frontend.page( "download/README", params={ "repository": "critic", "sha1": "0123456789abcdefghijklmnopqrstuvwxzy" }, expected_http_status=404) # Use a bogus repository parameter. frontend.page( "download/README", params={ "repository": "notcritic", "sha1": "f4c6e5fc09de47f7eb1a623cbc8820f67967d558" }, expected_http_status=404) # Omit the sha1 parameter. frontend.page( "download/README", params={ "repository": "critic" }, expected_http_status=400) # Omit the repository parameter. frontend.page( "download/README", params={ "sha1": "f4c6e5fc09de47f7eb1a623cbc8820f67967d558" }, expected_http_status=400) ================================================ FILE: testing/tests/001-main/003-self/033-propagation-vs-rebase.py ================================================ import os FILENAME = "033-propagation-vs-rebase.txt" FILENAME_BASE = "033-propagation-vs-rebase.base.txt" NONSENSE = """\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec ut enim sit amet purus ultricies lobortis. Pellentesque nisi arcu, convallis sed purus sed, semper ultrices velit. Ut egestas lorem tortor, vitae lacinia lorem consectetur nec. Integer tempor ornare ipsum at viverra. Curabitur nec orci mollis, lacinia sapien eget, ultricies ipsum. Curabitur a libero tortor. Curabitur volutpat lacinia erat, ac suscipit enim dignissim nec.""" def lines(*args): return "\n".join((line.upper() if index in args else line) for index, line in enumerate(NONSENSE.splitlines())) def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) SETTINGS = { "review.createViaPush": True, "email.subjectLine.updatedReview.reviewRebased": "Rebased Review: %(summary)s" } work = repository.workcopy() settings = testing.utils.settings("alice", SETTINGS) signin = frontend.signin("alice") reviews = [] with work, settings, signin: REMOTE_URL = instance.repository_url("alice") def write(*args, **kwargs): """Write the file<tm>, optionally with lines upper-cased.""" filename = kwargs.get("filename", FILENAME) with open(os.path.join(work.path, filename), "w") as target: print >>target, lines(*args) def commit(message, *args, **kwargs): """Create a commit and return its SHA-1""" write(*args, **kwargs) work.run(["add", kwargs.get("filename", FILENAME)]) work.run(["commit", "-m", message]) return work.run(["rev-parse", "HEAD"]).strip() def expectmail(title): review = reviews[-1] mailbox.pop(accept=[to("alice"), about("%s: %s" % (title, review["summary"]))]) def expecthead(expected): """Check that the review branch in Critic's repository is where we want it to be.""" review = reviews[-1] actual = work.run(["ls-remote", REMOTE_URL, review["branch"]]).split()[0] testing.expect.check(expected, actual) def createreview(commits): """Create a review of the specified commits.""" index = len(reviews) + 1 branch = "033-propagation-vs-rebase/%d" % index summary = "033-propagation-vs-rebase, test %d" % index work.run(["push", REMOTE_URL, "%s:refs/heads/%s" % (commits[-1], branch)]) result = frontend.operation( "submitreview", data={ "repository": "critic", "commit_sha1s": [sha1 for sha1 in commits], "branch": "r/" + branch, "frombranch": branch, "summary": summary }) reviews.append({ "id": result["review_id"], "branch": "r/" + branch, "summary": summary }) expectmail("New Review") def push(new_head, force=False): """Push specified commit to the review branch in Critic's repository, optionally forced.""" review = reviews[-1] args = ["push"] if force: args.append("-f") args.extend([REMOTE_URL, "%s:refs/heads/%s" % (new_head, review["branch"])]) work.run(args) expecthead(new_head) def moverebase(new_upstream, new_head): """Perform a move rebase.""" review = reviews[-1] work.run(["reset", "--hard", new_head]) frontend.operation( "preparerebase", data={ "review_id": review["id"], "new_upstream": new_upstream }) push(new_head, force=True) expectmail("Rebased Review") def historyrewrite(new_head): """Perform a history rewrite rebase.""" review = reviews[-1] work.run(["reset", "--hard", new_head]) frontend.operation( "preparerebase", data={ "review_id": review["id"] }) push(new_head, force=True) expectmail("Rebased Review") def createcomment(parent_sha1, child_sha1, offset, count, verdict): frontend.operation( "validatecommentchain", data={ "review_id": reviews[-1]["id"], "origin": "new", "parent_sha1": parent_sha1, "child_sha1": child_sha1, "file_path": FILENAME, "offset": offset, "count": count }, expect={ "verdict": verdict }) frontend.operation( "createcommentchain", data={ "review_id": reviews[-1]["id"], "chain_type": "issue", "file_context": { "origin": "new", "parent_sha1": parent_sha1, "child_sha1": child_sha1, "file_path": FILENAME, "offset": offset, "count": count }, "text": ("Issue at lines %d-%d" % (offset, offset + count - 1)) }) start_sha1 = work.run(["rev-parse", "HEAD"]).strip() # TEST #1: Create a review with one commit, then do a fast-forward move # rebase, and then create some comments in the diff of the original, # pre-rebase commit. work.run(["checkout", "-b", "033-propagation-vs-rebase-base", start_sha1]) base_commits = [commit("Base commit 1"), commit("Base commit 2", 0), commit("Base commit 3", 0, 1)] work.run(["push", REMOTE_URL, "HEAD"]) work.run(["checkout", "-b", "033-propagation-vs-rebase-1", base_commits[0]]) original_commits = [commit("Test #1, commit 1", 6, 7)] createreview(original_commits) work.run(["reset", "--hard", base_commits[2]]) rebased_commits = [commit("Test #1, commit 1 (rebased)", 0, 1, 6, 7)] moverebase(base_commits[2], rebased_commits[0]) # Lines modified in the rebase. createcomment(base_commits[0], original_commits[0], 1, 2, "modified") # Lines not modified at all. createcomment(base_commits[0], original_commits[0], 4, 2, "transferred") # Lines modified in the review (but not in the rebase). createcomment(base_commits[0], original_commits[0], 7, 2, "transferred") # TEST #2: Create a review with one commit, then do a non-fast-forward move # rebase, and then create some comments in the diff of the original, # pre-rebase commit. work.run(["checkout", "-b", "033-propagation-vs-rebase-2", base_commits[2]]) original_commits = [commit("Test #2, commit 1", 0, 1, 6, 7)] createreview(original_commits) work.run(["reset", "--hard", base_commits[0]]) rebased_commits = [commit("Test #2, commit 1 (rebased)", 6, 7)] moverebase(base_commits[0], rebased_commits[0]) # Lines modified in the rebase. createcomment(base_commits[0], original_commits[0], 1, 2, "modified") # Lines not modified at all. createcomment(base_commits[0], original_commits[0], 4, 2, "transferred") # Lines modified in the review (but not in the rebase). createcomment(base_commits[0], original_commits[0], 7, 2, "transferred") ================================================ FILE: testing/tests/001-main/003-self/100-reviewing/001-comments.basic.py ================================================ import os import re import pprint def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) BASE = "100-reviewing/" TEST = BASE + "001-comment.basic" BRANCH = "r/" + TEST FILENAME = TEST + ".txt" SUMMARY = "Added " + FILENAME NEW_SUBJECT = "New Review: " + SUMMARY NEWISH_SUBJECT = r"New\(ish\) Review: " + SUMMARY UPDATED_SUBJECT = "Updated Review: " + SUMMARY LINES = ["First line", "Second line", "Third line", "Fourth line", "Fifth line", "Sixth line", "Seventh line", "Eighth line", "Ninth line", "Tenth line"] ################################################################################ # # Some utility stuff. # ################################################################################ class CommentChain(object): def __init__(self, chain_id, chain_type, author, text, lines=None): self.id = chain_id self.type = chain_type self.author = author self.text = text self.lines = lines self.replies = [] def add_reply(self, author): self.replies.append((author, ("This is a reply from %s." % author.capitalize()))) def findChainInMail(mail, chain_id): chain_type = author = text = lines = trailer = reply_author = None replies = [] last_comment_seen = False first_line_index = None for index, line in enumerate(mail + [None]): if last_comment_seen: if line is None or line: del mail[first_line_index:index] return chain_type, author, text, lines, replies, trailer if not line: continue match = re.match("(?:> )?General (issue|note)", line) if match: chain_type = "general " + match.group(1) continue match = re.match("(?:> )?(Issue|Note) in commit", line) if match: chain_type = "commit " + match.group(1).lower() continue match = re.match("(?:> )?(Issue|Note) in", line) if match: chain_type = "file " + match.group(1).lower() continue if chain_type is None: continue match = re.match(r"(?:> )? http://.*/showcomment\?chain=(\d+)", line) if match: first_line_index = index - 1 if int(match.group(1)) != chain_id: chain_type = None continue match = re.match("(?:> )?([^ ]+) von Testing at", line) if match: if author is None: author = match.group(1).lower() else: reply_author = match.group(1).lower() continue if re.match(r"(?:> )?-+$", line): if lines is None and not chain_type.startswith("general "): lines = [] continue if lines is not None and author is None: if chain_type.startswith("file "): match = re.match(r"(?:> )?\s*(\d+)\|(.*)$", line) lines.append((int(match.group(1)), match.group(2))) else: match = re.match(r"(?:> )? (.*)$", line) lines.append((None, match.group(1))) continue if line and (line.lower() != line == line.upper() or re.match(r"\(.*\)", line)): trailer = line last_comment_seen = True continue match = re.match(r"(> )? (.+)$", line) last_comment_seen = match.group(1) is None if reply_author: replies.append((reply_author, match.group(2))) else: text = match.group(2) testing.expect.check("<chain %d in mail>" % chain_id, "<expected content not found>") def checkSubmitter(mails, expected_submitter): for mail in mails: for line in mail: match = re.match("(.*) von Testing has submitted", line) if match: testing.expect.check(expected_submitter, match.group(1).lower()) return testing.expect.check("<'$USER has submitted' line in mail>", "<expected content not found>") def checkChain(mails, chain, expected_trailer=None): for mail in mails: (actual_type, actual_author, actual_text, actual_lines, actual_replies, actual_trailer) = findChainInMail(mail, chain.id) testing.expect.check(chain.type, actual_type) testing.expect.check(chain.author, actual_author) testing.expect.check(chain.text, actual_text) testing.expect.check(chain.lines, actual_lines) testing.expect.check(chain.replies, actual_replies) def checkNoMoreChains(mails): for mail in mails: for index, line in enumerate(mail): if re.match("(?:> )?General (issue|note)", line) \ or re.match("(?:> )?(Issue|Note) in commit", line) \ or re.match("(?:> )?(Issue|Note) in", line): testing.logger.error( "Unexpected comment chain mentioned in mail:\n %s\n %s" % (mail[index], mail[index + 1])) def receiveMails(subject): return [mailbox.pop(accept=[to(whom), about(subject)]).lines[:] for whom in ["alice", "bob", "dave", "erin"]] def createComment(chain, author): frontend.operation( "createcomment", data={ "chain_id": chain.id, "text": "This is a reply from %s." % author.capitalize() }) def resolveCommentChain(chain): frontend.operation( "resolvecommentchain", data={ "chain_id": chain.id }) def reopenResolvedCommentChain(chain): frontend.operation( "reopenresolvedcommentchain", data={ "chain_id": chain.id }) def morphCommentChain(chain, new_type): frontend.operation( "morphcommentchain", data={ "chain_id": chain.id, "new_type": new_type }) def submitChanges(): if instance.has_flag("fixed-batch-preview"): frontend.page( "showbatch", params={ "review": str(review_id) }) result = frontend.operation( "submitchanges", data={ "review_id": review_id }) if "batch_id" in result: frontend.page( "showbatch", params={ "batch": result["batch_id"] }) with repository.workcopy() as work: ############################################################################ # # As Alice, create a commit that adds a file, and a review of that commit, # with Bob, Dave and Erin as associated users. # ############################################################################ parent_sha1 = work.run(["rev-parse", "HEAD"]).strip() work.run(["checkout", "-b", BRANCH, "--no-track", "origin/master"]) os.mkdir(os.path.join(work.path, "100-reviewing")) with open(os.path.join(work.path, FILENAME), "w") as review_file: review_file.write("\n".join(LINES) + "\n") work.run(["add", FILENAME]) work.run(["commit", "-m", "\n".join([SUMMARY, ""] + LINES[:3])]) child_sha1 = work.run(["rev-parse", "HEAD"]).strip() review_id = testing.utils.createReviewViaPush(work, "alice") mailbox.pop(accept=[to("alice"), about(NEW_SUBJECT)]) with frontend.signin("alice"): frontend.operation( "addreviewfilters", data={ "review_id": review_id, "filters": [{ "type": "reviewer", "user_names": ["bob"], "paths": [BASE] }, { "type": "watcher", "user_names": ["dave"], "paths": ["/"] }, { "type": "watcher", "user_names": ["erin"], "paths": ["src/"] }]}) for whom in ["bob", "dave", "erin"]: mailbox.pop(accept=[to(whom), about(NEWISH_SUBJECT)]) mailbox.pop(accept=[to(whom), about(UPDATED_SUBJECT)]) ############################################################################ # # Create one each of the different types of comment chain: # { general, commit, file } x { issue, note} # # Submit, and check that everyone involved received a mail with each comment # chain included (and correctly rendered.) # ############################################################################ with frontend.signin("alice"): result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "issue", "text": "This is a general issue." }) general_issue = CommentChain( chain_id=result["chain_id"], chain_type="general issue", author="alice", text="This is a general issue.") result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "text": "This is a general note." }) general_note = CommentChain( chain_id=result["chain_id"], chain_type="general note", author="alice", text="This is a general note.") result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "issue", "commit_context": { "commit": child_sha1, "offset": 0, "count": 3 }, "text": "This is a commit issue." }) commit_issue = CommentChain( chain_id=result["chain_id"], chain_type="commit issue", author="alice", text="This is a commit issue.", lines=[(None, SUMMARY), (None, ""), (None, "First line")]) result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "commit_context": { "commit": child_sha1, "offset": 4, "count": 1 }, "text": "This is a commit note." }) commit_note = CommentChain( chain_id=result["chain_id"], chain_type="commit note", author="alice", text="This is a commit note.", lines=[(None, "Third line")]) result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "issue", "file_context": { "origin": "new", "parent": parent_sha1, "child": child_sha1, "file": FILENAME, "offset": 2, "count": 3 }, "text": "This is a file issue." }) file_issue = CommentChain( chain_id=result["chain_id"], chain_type="file issue", author="alice", text="This is a file issue.", lines=[(2, "Second line"), (3, "Third line"), (4, "Fourth line")]) result = frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "file_context": { "origin": "new", "parent": parent_sha1, "child": child_sha1, "file": FILENAME, "offset": 10, "count": 1 }, "text": "This is a file note." }) file_note = CommentChain( chain_id=result["chain_id"], chain_type="file note", author="alice", text="This is a file note.", lines=[(10, "Tenth line")]) testing.expect.check(6, result["draft_status"]["writtenComments"]) submitChanges() mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, general_issue) checkChain(mails, general_note) checkChain(mails, commit_issue) checkChain(mails, commit_note) checkChain(mails, file_issue) checkChain(mails, file_note) checkNoMoreChains(mails) ############################################################################ # # Verify that we have some basic correctness checks on comment creation, # such as the commented lines existing. # ############################################################################ with frontend.signin("alice"): # These don't work since the comment text is empty or contains only # white-space characters. for text in ("", " ", "\t", "\n", "\r"): frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "commit_context": { "commit": parent_sha1, "offset": 0, "count": 1 }, "text": text }, expect={ "status": "failure", "title": "Empty comment!" }) # These don't work since we're trying to comment lines that don't # exist in the commit message. (We tried offset=4/count=1 above.) frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "commit_context": { "commit": child_sha1, "offset": 5, "count": 1 }, "text": "This won't stick." }, expect={ "status": "failure", "message": "It's not possible to create a comment here." }) frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "commit_context": { "commit": child_sha1, "offset": 4, "count": 2 }, "text": "This won't stick." }, expect={ "status": "failure", "message": "It's not possible to create a comment here." }) # This doesn't work since we're trying to comment the "old" side of the # commit that added the commented file. frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "file_context": { "origin": "old", "parent": parent_sha1, "child": child_sha1, "file": FILENAME, "offset": 3, "count": 3 }, "text": "This won't stick." }, expect={ "status": "failure", "message": "It's not possible to create a comment here." }) # These don't work, since we're trying to comment lines that don't # exist in the file. (We tried offset=10/count=1 above.) frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "file_context": { "origin": "old", "parent": parent_sha1, "child": child_sha1, "file": FILENAME, "offset": 11, "count": 1 }, "text": "This won't stick." }, expect={ "status": "failure", "message": "It's not possible to create a comment here." }) frontend.operation( "createcommentchain", data={ "review_id": review_id, "chain_type": "note", "file_context": { "origin": "old", "parent": parent_sha1, "child": child_sha1, "file": FILENAME, "offset": 10, "count": 2 }, "text": "This won't stick." }, expect={ "status": "failure", "message": "It's not possible to create a comment here." }) ############################################################################ # # Reply to some of the comment chains, and morph, resolve and reopen them, # as multiple users. # ############################################################################ # Bob replies to some issues, but before he submits, Dave also replies to # one of them, and submits. Checks that Dave's reply appears before Bob's, # even though Bob created his first. with frontend.signin("bob"): createComment(general_issue, "bob") createComment(commit_issue, "bob") createComment(file_issue, "bob") with frontend.signin("dave"): createComment(general_issue, "dave") submitChanges() general_issue.add_reply("dave") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "dave") checkChain(mails, general_issue) checkNoMoreChains(mails) with frontend.signin("bob"): submitChanges() general_issue.add_reply("bob") commit_issue.add_reply("bob") file_issue.add_reply("bob") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "bob") checkChain(mails, general_issue) checkChain(mails, commit_issue) checkChain(mails, file_issue) checkNoMoreChains(mails) # Erin replies to an issue too. with frontend.signin("erin"): createComment(commit_issue, "erin") submitChanges() commit_issue.add_reply("erin") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "erin") checkChain(mails, commit_issue) checkNoMoreChains(mails) # Alice replies to the general note and converts it to an issue (in the same # batch.) with frontend.signin("alice"): createComment(general_note, "alice") morphCommentChain(general_note, "issue") submitChanges() general_note.add_reply("alice") general_note.type = "general issue" mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, general_note, "CONVERTED TO ISSUE!") checkNoMoreChains(mails) # Alice converts the general note back to a note, without replying. with frontend.signin("alice"): morphCommentChain(general_note, "note") submitChanges() general_note.type = "general note" mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, general_note, "CONVERTED TO NOTE!") checkNoMoreChains(mails) # Bob replies to and converts the general note to an issue, but before he # submits, Dave also converts it to an issue and submits. Checks that Bob's # converting of the issue has no effect, but his reply remains. with frontend.signin("bob"): createComment(general_note, "bob") morphCommentChain(general_note, "issue") with frontend.signin("dave"): morphCommentChain(general_note, "issue") submitChanges() general_note.type = "general issue" mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "dave") checkChain(mails, general_note, "CONVERTED TO ISSUE!") checkNoMoreChains(mails) with frontend.signin("bob"): submitChanges() general_note.add_reply("bob") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "bob") checkChain(mails, general_note) checkNoMoreChains(mails) # Alice resolves the general issue. with frontend.signin("alice"): resolveCommentChain(general_issue) submitChanges() mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, general_issue, "ISSUE RESOLVED!") checkNoMoreChains(mails) # Erin replies to the now resolved general issue. with frontend.signin("erin"): createComment(general_issue, "erin") submitChanges() general_issue.add_reply("erin") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "erin") checkChain(mails, general_issue, "(This issue is resolved.)") checkNoMoreChains(mails) # Alice replies to and reopens the general issue, but before she submits, # Bob also replies to and reopens it, and submits. Checks that Alice's # reopening of the issue has no effect, but her reply remains. with frontend.signin("alice"): createComment(general_issue, "alice") reopenResolvedCommentChain(general_issue) with frontend.signin("bob"): createComment(general_issue, "bob") reopenResolvedCommentChain(general_issue) submitChanges() general_issue.add_reply("bob") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "bob") checkChain(mails, general_issue, "ISSUE REOPENED!") checkNoMoreChains(mails) with frontend.signin("alice"): submitChanges() general_issue.add_reply("alice") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, general_issue) checkNoMoreChains(mails) # Alice replies to and resolved the commit issue, as does Dave, but before # either submit, Bob swoops in and converts the issue to a note, and # submits. Then Alice submits, which checks (again) that her resolving of # the issue has no effect, but her reply remains. Then Bob converts the # issue back to an issue, and submits. Finally, Dave submits, which checks # that his (old) resolving of the issue still remains and takes effect. with frontend.signin("alice"): createComment(commit_issue, "alice") resolveCommentChain(commit_issue) with frontend.signin("dave"): resolveCommentChain(commit_issue) with frontend.signin("bob"): morphCommentChain(commit_issue, "note") submitChanges() commit_issue.type = "commit note" mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "bob") checkChain(mails, commit_issue, "CONVERTED TO NOTE!") checkNoMoreChains(mails) with frontend.signin("alice"): submitChanges() commit_issue.add_reply("alice") mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "alice") checkChain(mails, commit_issue) checkNoMoreChains(mails) with frontend.signin("bob"): morphCommentChain(commit_issue, "issue") submitChanges() commit_issue.type = "commit issue" mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "bob") checkChain(mails, commit_issue, "CONVERTED TO ISSUE!") checkNoMoreChains(mails) with frontend.signin("dave"): submitChanges() mails = receiveMails(UPDATED_SUBJECT) checkSubmitter(mails, "dave") checkChain(mails, commit_issue, "ISSUE RESOLVED!") checkNoMoreChains(mails) ================================================ FILE: testing/tests/001-main/003-self/100-reviewing/__init__.py ================================================ # @dependency 001-main/002-createrepository.py ================================================ FILE: testing/tests/001-main/003-self/101-keepalives.py ================================================ # @dependency 001-main/002-createrepository.py instance.unittest("gitutils", ["keepalives"]) ================================================ FILE: testing/tests/001-main/003-self/200-json/001-users.py ================================================ # @dependency 001-main/001-empty/003-criticctl/002-adduser-deluser.py # @dependency 001-main/001-empty/004-mixed/003-oauth.py # @dependency 001-main/001-empty/004-mixed/004-password.py # @dependency 001-main/003-self/028-gitemails.py frontend.json( "users", expect={ "users": [user_json("admin", "Testing Administrator"), user_json("alice"), user_json("bob"), user_json("dave"), user_json("erin"), user_json("howard"), user_json("extra", status="retired"), user_json("carol"), user_json("felix"), user_json("gina", no_email=True), user_json("iris")] }) frontend.json( "users", params={ "status": "current" }, expect={ "users": [user_json("admin", "Testing Administrator"), user_json("alice"), user_json("bob"), user_json("dave"), user_json("erin"), user_json("howard"), user_json("carol"), user_json("felix"), user_json("gina", no_email=True), user_json("iris")] }) frontend.json( "users", params={ "status": "retired" }, expect={ "users": [user_json("extra", status="retired")] }) frontend.json( "users", params={ "sort": "fullname" }, expect={ "users": [user_json("alice"), user_json("bob"), user_json("carol"), user_json("dave"), user_json("erin"), user_json("extra", status="retired"), user_json("felix"), user_json("gina", no_email=True), user_json("howard"), user_json("iris"), user_json("admin", "Testing Administrator")] }) frontend.json( "users", params={ "sort": "fullname", "count": "4" }, expect={ "users": [user_json("alice"), user_json("bob"), user_json("carol"), user_json("dave")] }) frontend.json( "users", params={ "sort": "fullname", "offset": "2", "count": "4" }, expect={ "users": [user_json("carol"), user_json("dave"), user_json("erin"), user_json("extra", status="retired")] }) frontend.json( "users", params={ "sort": "fullname", "offset": "6" }, expect={ "users": [user_json("felix"), user_json("gina", no_email=True), user_json("howard"), user_json("iris"), user_json("admin", "Testing Administrator")] }) frontend.json( "users/%d" % instance.userid("alice"), expect=user_json("alice")) frontend.json( "users/%d" % instance.userid("alice"), params={ "fields": "id" }, expect={ "id": instance.userid("alice") }) frontend.json( "users", params={ "name": "alice" }, expect=user_json("alice")) frontend.json( "users/%d/emails" % instance.userid("alice"), expect={ "emails": [{ "address": "alice@example.org", "selected": True, "verified": None }] }) frontend.json( "users/%d/emails/1" % instance.userid("alice"), expect={ "address": "alice@example.org", "selected": True, "verified": None }) filter_json = { "id": int, "type": "reviewer", "path": "028-gitemails/", "repository": 1, "delegates": [instance.userid("erin")] } frontend.json( "users/%d/filters" % instance.userid("alice"), expect={ "filters": [filter_json] }) frontend.json( "users/%d/filters" % instance.userid("alice"), params={ "repository": "critic" }, expect={ "filters": [filter_json] }) result = frontend.json( "users/%d/filters" % instance.userid("alice"), params={ "repository": "1" }, expect={ "filters": [filter_json] }) frontend.json( "users/%d/filters" % instance.userid("alice"), params={ "include": "users,repositories" }, expect={ "filters": [{ "id": int, "type": "reviewer", "path": "028-gitemails/", "repository": 1, "delegates": [instance.userid("erin")] }], "linked": { "repositories": [critic_json], "users": [user_json("erin")] }}) frontend.json( "users/%d/filters/%d" % (instance.userid("alice"), result["filters"][0]["id"]), expect={ "id": result["filters"][0]["id"], "type": "reviewer", "path": "028-gitemails/", "repository": 1, "delegates": [instance.userid("erin")] }) # Test asking for just the list of delegates. frontend.json( "users/%d/filters/%d/delegates" % (instance.userid("alice"), result["filters"][0]["id"]), expect={ "delegates": [instance.userid("erin")] }) # Check that the repository is not linked when we ask for just delegates. frontend.json( "users/%d/filters/%d/delegates" % (instance.userid("alice"), result["filters"][0]["id"]), params={ "include": "users,repositories" }, expect={ "delegates": [instance.userid("erin")], "linked": { "repositories": [], "users": [user_json("erin")] }}) frontend.json( "users/%d,%d,%d" % (instance.userid("alice"), instance.userid("bob"), instance.userid("dave")), expect={ "users": [user_json("alice"), user_json("bob"), user_json("dave")] }) frontend.json( "users/%d,%d,%d" % (instance.userid("alice"), instance.userid("bob"), instance.userid("dave")), params={ "fields[users]": "name" }, expect={ "users": [{ "name": "alice" }, { "name": "bob" }, { "name": "dave" }] }) frontend.json( "users/4711", expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid user id: 4711" }}, expected_http_status=404) frontend.json( "users/alice", expect={ "error": { "title": "Invalid API request", "message": "Invalid numeric id: 'alice'" }}, expected_http_status=400) frontend.json( "users", params={ "name": "nosuchuser" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid user name: 'nosuchuser'" }}, expected_http_status=404) frontend.json( "users", params={ "status": "clown" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid user status values: 'clown'" }}, expected_http_status=400) frontend.json( "users", params={ "status": "current,clown,president" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid user status values: 'clown', 'president'" }}, expected_http_status=400) frontend.json( "users", params={ "sort": "age" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid user sort parameter: 'age'" }}, expected_http_status=400) with frontend.signin("alice"): frontend.json( "users/me", expect=user_json("alice")) frontend.json( "users/%d" % instance.userid("alice"), put={ "fullname": "Alice has a new name" }, expect=user_json("alice", fullname="Alice has a new name")) frontend.json( "users/%d" % instance.userid("alice"), expect=user_json("alice", fullname="Alice has a new name")) frontend.json( "users/%d" % instance.userid("alice"), put={ "fullname": user_json("alice")["fullname"] }, expect=user_json("alice")) frontend.json( "users/%d" % instance.userid("alice"), expect=user_json("alice")) frontend.json( "users/me", expected_http_status=404, expect={ "error": { "title": "No such resource", "message": "Resource not found: 'users/me' (not signed in)" } }) ================================================ FILE: testing/tests/001-main/003-self/200-json/002-branches.py ================================================ # @dependency 001-main/002-createrepository.py stored_branches = [] def check_branches(path, branches, check): if not check(path, expected=list, actual=branches): return for index, branch in enumerate(branches): if check("%s[%d]" % (path, index), expected={ "id": int, "name": str, "repository": 1, "head": int }, actual=branch): stored_branches.append(branch) frontend.json( "repositories/1/branches", expect={ "branches": check_branches }) for branch in stored_branches: frontend.json( "branches/%d" % branch["id"], expect=branch) frontend.json( "branches", params={ "name": branch["name"], "repository": branch["repository"] }, expect=branch) frontend.json( "repositories/%d/branches/%d" % (branch["repository"], branch["id"]), expect=branch) frontend.json( "repositories/%d/branches" % branch["repository"], params={ "name": branch["name"] }, expect=branch) stored_branch_heads_by_name = {} stored_commit_sha1s_by_id = {} def store_branches(path, branches, check): stored_branch_heads_by_name.update({ branch["name"]: branch["head"] for branch in branches }) def store_commits(path, commits, check): stored_commit_sha1s_by_id.update({ commit["id"]: commit["sha1"] for commit in commits }) frontend.json( "repositories/1/branches", params={ "include": "commits" }, expect={ "branches": store_branches, "linked": { "commits": store_commits }}) for name, head_id in stored_branch_heads_by_name.items(): if head_id not in stored_commit_sha1s_by_id: logger.error("linked head of branch %s (commit id=%d) not included" % (name, head_id)) continue expected_sha1 = repository.run( ["ls-remote", instance.repository_url("alice"), "refs/heads/" + name]).split()[0] actual_sha1 = stored_commit_sha1s_by_id[head_id] testing.expect.check(expected_sha1, actual_sha1) def check_commits(path, commits, check): if not check(path, expected=list, actual=commits): return for index, commit in enumerate(commits): check("%s[%d]" % (path, index), expected=generic_commit_json, actual=commit) frontend.json( "branches/%d/commits" % stored_branches[0]["id"], expect={ "commits": check_commits }) first10 = frontend.json( "branches/%d/commits" % stored_branches[0]["id"], params={ "sort": "topological", "fields": "id", "count": 10 }, expect={ "commits": list }) frontend.json( "branches/%d/commits" % stored_branches[0]["id"], params={ "sort": "date", "fields": "id", "count": 10 }, expect={ "commits": list }) frontend.json( "branches/%d/commits" % stored_branches[0]["id"], params={ "sort": "topological", "fields": "id", "offset": 5, "count": 5 }, expect={ "commits": first10["commits"][5:] }) frontend.json( "branches/4711", expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid branch id: 4711" }}, expected_http_status=404) frontend.json( "branches/master", expect={ "error": { "title": "Invalid API request", "message": "Invalid numeric id: 'master'" }}, expected_http_status=400) frontend.json( "branches", params={ "name": "nosuchbranch", "repository": "critic" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid branch name: 'nosuchbranch'" }}, expected_http_status=404) frontend.json( "branches", params={ "name": "nosuchbranch" }, expect={ "error": { "title": "Invalid API request", "message": "Named branch access must have repository specified." }}, expected_http_status=400) frontend.json( "branches", params={ "name": "master", "repository": "4711" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid repository id: 4711" }}, expected_http_status=404) frontend.json( "branches", params={ "name": "master", "repository": "nosuchrepository" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid repository name: 'nosuchrepository'" }}, expected_http_status=404) ================================================ FILE: testing/tests/001-main/003-self/200-json/003-repositories.py ================================================ # @dependency 001-main/002-createrepository.py frontend.json( "repositories", expect={ "repositories": [critic_json, other_json] }) frontend.json( "repositories/1", expect=critic_json) frontend.json( "repositories", params={ "name": "critic" }, expect=critic_json) frontend.json( "repositories/2", expect=other_json) frontend.json( "repositories", params={ "name": "other" }, expect=other_json) frontend.json( "repositories/4711", expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid repository id: 4711" }}, expected_http_status=404) frontend.json( "repositories/critic", expect={ "error": { "title": "Invalid API request", "message": "Invalid numeric id: 'critic'" }}, expected_http_status=400) frontend.json( "repositories", params={ "name": "nosuchrepository" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid repository name: 'nosuchrepository'" }}, expected_http_status=404) frontend.json( "repositories", params={ "filter": "interesting" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid repository filter parameter: 'interesting'" }}, expected_http_status=400) # Test with an access control profile that restricts access to other.git. no_other = { "repositories": { "rule": "allow", "exceptions": [{ "repository": "other" }] } } with testing.utils.access_token("alice", no_other) as access_token: with frontend.signin(access_token=access_token): # Check that we can still access critic.git. frontend.json( "repositories", params={ "name": "critic" }, expect=critic_json) # Check that we can't access other.git. frontend.json( "repositories", params={ "name": "other" }, expected_http_status=403) # Check that we can still list all repositories, but that other.git is # not included. frontend.json( "repositories", expect={ "repositories": [critic_json] }) ================================================ FILE: testing/tests/001-main/003-self/200-json/004-review.py ================================================ # @dependency 001-main/003-self/020-reviewrebase.py # @dependency 001-main/003-self/100-reviewing/001-comments.basic.py # Fetch the id of a review which contains some comments. result = frontend.operation( "searchreview", data={ "query": "branch:r/100-reviewing/001-comment.basic" }) testing.expect.check(1, len(result["reviews"])) review_id = result["reviews"][0]["id"] review_json = { "id": review_id, "state": "open", "summary": "Added 100-reviewing/001-comment.basic.txt", "description": None, "repository": 1, "branch": int, "owners": [instance.userid("alice")], "assigned_reviewers": [instance.userid("bob")], "active_reviewers": [], "progress": 0, "progress_per_commit": list, "watchers": [instance.userid("dave"), instance.userid("erin")], "partitions": [{ "commits": [int], "rebase": None }], "issues": [int, int, int, int], "notes": [int, int], "pending_rebase": None } frontend.json( "reviews/%d" % review_id, expect=review_json) frontend.json( "reviews/%d" % review_id, params={ "include": "users,commits" }, expect={ "id": review_id, "state": "open", "summary": "Added 100-reviewing/001-comment.basic.txt", "description": None, "repository": 1, "branch": int, "owners": [instance.userid("alice")], "assigned_reviewers": [instance.userid("bob")], "active_reviewers": [], "progress": 0, "progress_per_commit": list, "watchers": [instance.userid("dave"), instance.userid("erin")], "partitions": [{ "commits": [int], "rebase": None }], "issues": [int, int, int, int], "notes": [int, int], "pending_rebase": None, "linked": { "users": [user_json("alice"), user_json("bob"), user_json("dave"), user_json("erin")], "commits": [generic_commit_json] }}) frontend.json( "reviews/%d/commits" % review_id, expect={ "commits": [generic_commit_json] }) def check_description(path, description, check): if description is not None: check(path, expected=str, actual=description) def check_reviews(expected_state=str): def checker(path, reviews, check): if not check(path, expected=list, actual=reviews): return for index, review in enumerate(reviews): check("%s[%d]" % (path, index), expected={ "id": int, "state": str, "summary": str, "description": check_description, "repository": 1, "branch": int, "owners": list, "assigned_reviewers": list, "active_reviewers": list, "progress": 0, "progress_per_commit": list, "watchers": list, "partitions": list, "issues": list, "notes": list, "pending_rebase": None }, actual=review) return checker all_reviews = frontend.json( "reviews", expect={ "reviews": check_reviews() }) if not any(review["id"] == review_id for review in all_reviews["reviews"]): logger.error("/api/v1/reviews did not contain r/%d" % review_id) frontend.json( "reviews", params={ "repository": "critic" }, expect={ "reviews": check_reviews() }) open_reviews = frontend.json( "reviews", params={ "state": "open" }, expect={ "reviews": check_reviews("open") }) if not any(review["id"] == review_id for review in open_reviews["reviews"]): logger.error("/api/v1/reviews?state=open did not contain r/%d" % review_id) closed_reviews = frontend.json( "reviews", params={ "state": "closed" }, expect={ "reviews": check_reviews("closed") }) if any(review["id"] == review_id for review in closed_reviews["reviews"]): logger.error("/api/v1/reviews?state=closed contained r/%d" % review_id) dropped_reviews = frontend.json( "reviews", params={ "state": "dropped" }, expect={ "reviews": check_reviews("dropped") }) if any(review["id"] == review_id for review in dropped_reviews["reviews"]): logger.error("/api/v1/reviews?state=dropped contained r/%d" % review_id) frontend.json( "reviews/4711", expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid review id: 4711" }}, expected_http_status=404) frontend.json( "reviews/mypatch", expect={ "error": { "title": "Invalid API request", "message": "Invalid numeric id: 'mypatch'" }}, expected_http_status=400) frontend.json( "reviews", params={ "repository": "nosuchrepository" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid repository name: 'nosuchrepository'" }}, expected_http_status=404) frontend.json( "reviews", params={ "state": "rejected" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid review state values: 'rejected'" }}, expected_http_status=400) no_repository_access = { "repositories": { "rule": "deny", "exceptions": [] } } with testing.utils.access_token("alice", no_repository_access) as access_token: with frontend.signin(access_token=access_token): # Check that this review is inaccessible now. frontend.json( "reviews/%d" % review_id, expected_http_status=403) # Check that we can still list "all" reviews successfully. frontend.json( "reviews", expect={ "reviews": [] }) ================================================ FILE: testing/tests/001-main/003-self/200-json/005-commits.py ================================================ # @dependency 001-main/002-createrepository.py SHA1 = "78d7849db854f3544d7291cce96a0a4fa6d6843d" commit_json = { "id": int, "sha1": SHA1, "summary": "High-level testing framework", "message": """\ High-level testing framework Framework for automated installation and "black-box" testing of Critic running in a VirtualBox instance. """, "parents": [int], "author": { "name": "Jens Lindstrom", "email": "jl@opera.com", "timestamp": float }, "committer": { "name": "Jens Lindstrom", "email": "jl@opera.com", "timestamp": float }, } result = frontend.json( "commits", params={ "sha1": SHA1, "repository": "critic" }, expect=commit_json) frontend.json( "commits/%d" % result["id"], params={ "repository": "critic" }, expect=commit_json) result = frontend.json( "repositories/1/commits", params={ "sha1": SHA1 }, expect=commit_json) frontend.json( "repositories/1/commits/%d" % result["id"], expect=commit_json) frontend.json( "commits/47114711", params={ "repository": "critic" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid commit id: 47114711" }}, expected_http_status=404) frontend.json( "commits/47114711", expect={ "error": { "title": "Invalid API request", "message": "Commit reference must have repository specified." }}, expected_http_status=400) frontend.json( "commits", expect={ "error": { "title": "Invalid API request", "message": "Missing required SHA-1 parameter." }}, expected_http_status=400) frontend.json( "commits", params={ "sha1": SHA1 }, expect={ "error": { "title": "Invalid API request", "message": "Commit reference must have repository specified." }}, expected_http_status=400) frontend.json( "commits", params={ "sha1": "00", "repository": "critic" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid SHA-1 parameter: '00'" }}, expected_http_status=400) frontend.json( "commits", params={ "sha1": "invalid SHA-1", "repository": "critic" }, expect={ "error": { "title": "Invalid API request", "message": "Invalid SHA-1 parameter: 'invalid SHA-1'" }}, expected_http_status=400) frontend.json( "commits", params={ "sha1": "47114711", "repository": "critic" }, expect={ "error": { "title": "No such resource", "message": "Resource not found: Invalid commit SHA-1: '47114711'" }}, expected_http_status=404) ================================================ FILE: testing/tests/001-main/003-self/200-json/006-changesets.py ================================================ # @dependency 001-main/002-createrepository.py FROM_SHA1 = "573c5ff15ad95cfbc3e2f2efb0a638a4a78c17a7" FROM_SINGLE_SHA1 = "aabc2b10c930a9e72fe9587a6e8634087bb3efe1" TO_SHA1 = "6dc8e9c2d952028286d4b83475947bd0b1410860" ROOT_SHA1 = "ee37c47f6f6a14afa6912c1cc58a9f49d2a29acd" # Changeset for single commit frontend.json( "changesets", params={ "repository": 1, "commit": TO_SHA1}, expected_http_status=202) instance.synchronize_service("changeset") # wait for changeset creation to finish single_changeset = frontend.json( "changesets", params={ "repository": 1, "commit": TO_SHA1}, expect={ "files": [int, int, int], "type": "direct", "to_commit": int, "id": int, "from_commit": int, "contributing_commits": [int], "review_state": None }) equiv_changeset = frontend.json( "changesets", params={ "repository": 1, "from": FROM_SINGLE_SHA1, "to": TO_SHA1}, expect={ "files": [int, int, int], "type": "direct", "to_commit": int, "id": int, "from_commit": int, "contributing_commits": [int], "review_state": None }) assert (single_changeset == equiv_changeset),\ "single changeset should equal equivalent changeset" # Changeset between two commits frontend.json( "changesets", params={ "repository": "critic", "from": FROM_SHA1, "to": TO_SHA1 }, expected_http_status=202) instance.synchronize_service("changeset") # wait for changeset creation to finish frontend.json( "changesets", params={ "repository": "critic", "from": FROM_SHA1, "to": TO_SHA1 }, expect={ "files": [int, int, int, int, int, int, int, int], "type": "custom", "to_commit": int, "id": int, "from_commit": int, "contributing_commits": [int, int, int], "review_state": None }) # Changeset from id frontend.json( "changesets/" + str(single_changeset["id"]), params={ "repository": 1 }, expect={ "files": [int, int, int], "type": "direct", "to_commit": int, "id": single_changeset["id"], "from_commit": int, "contributing_commits": [int], "review_state": None }) # Changeset from partial SHA1 frontend.json( "changesets", params={ "repository": 1, "commit": TO_SHA1[:8]}, expect={ "files": [int, int, int], "type": "direct", "to_commit": int, "id": int, "from_commit": int, "contributing_commits": [int], "review_state": None }) # Missing changeset id and commit refs frontend.json( "changesets", params={ "repository": 1 }, expect={ "error": { "message": "Missing required parameters from and to, or commit", "title": "Invalid API request" } }, expected_http_status=400) # Missing repository frontend.json( "changesets", params={ "commit": TO_SHA1 }, expect={ "error": { "message": "repository needs to be specified, ex. &repository=<id>", "title": "Invalid API request" } }, expected_http_status=400) # Missing to frontend.json( "changesets", params={ "repository": 1, "from": FROM_SHA1 }, expect={ "error": { "message": "Missing required parameters from and to, only one supplied", "title": "Invalid API request" } }, expected_http_status=400) # Invalid SHA1 frontend.json( "changesets", params={ "repository": 1, "commit": "00g0"}, expect={ "error": { "message": "Invalid parameter: commit=00g0: Invalid ref: '00g0^{commit}'", "title": "No such resource" } }, expected_http_status=404) # Changeset between a commit and itself frontend.json( "changesets", params={ "repository": 1, "from": FROM_SHA1, "to": FROM_SHA1}, expect={ "error": { "message": "from and to can't be the same commit", "title": "Invalid API request" } }, expected_http_status=400) ================================================ FILE: testing/tests/001-main/003-self/200-json/006-comments.py ================================================ # @dependency 001-main/003-self/004-createreview.py # @dependency 001-main/003-self/100-reviewing/001-comments.basic.py import os # Fetch the id of a review which contains some comments. result = frontend.operation( "searchreview", data={ "query": "branch:r/100-reviewing/001-comment.basic" }) testing.expect.check(1, len(result["reviews"])) review_id = result["reviews"][0]["id"] result = frontend.json( "reviews/%d" % review_id, params={ "fields": "issues,notes" }, expect={ "issues": [int, int, int, int], "notes": [int, int] }) frontend.json( "comments/%d" % result["issues"][0], expect={ "id": result["issues"][0], "type": "issue", "is_draft": False, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a general issue.", "replies": [int, int, int, int, int], "draft_changes": None }) frontend.json( "comments/%d" % result["issues"][1], expect={ "id": result["issues"][1], "type": "issue", "is_draft": False, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a general note.", "replies": [int, int], "draft_changes": None }) frontend.json( "comments/%d" % result["issues"][2], expect={ "id": result["issues"][2], "type": "issue", "is_draft": False, "state": "resolved", "review": review_id, "author": instance.userid("alice"), "location": { "type": "commit-message", "first_line": int, "last_line": int, "commit": int }, "resolved_by": instance.userid("dave"), "addressed_by": None, "timestamp": float, "text": "This is a commit issue.", "replies": [int, int, int], "draft_changes": None }) frontend.json( "comments/%d" % result["issues"][3], expect={ "id": result["issues"][3], "type": "issue", "is_draft": False, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": { "type": "file-version", "first_line": int, "last_line": int, "file": int, "changeset": int, "side": "new", "commit": None, "is_translated": False }, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a file issue.", "replies": [int], "draft_changes": None }) frontend.json( "comments/%d" % result["notes"][0], expect={ "id": result["notes"][0], "type": "note", "is_draft": False, "state": None, "review": review_id, "author": instance.userid("alice"), "location": { "type": "commit-message", "first_line": int, "last_line": int, "commit": int }, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a commit note.", "replies": [], "draft_changes": None }) frontend.json( "comments/%d" % result["notes"][1], expect={ "id": result["notes"][1], "type": "note", "is_draft": False, "state": None, "review": review_id, "author": instance.userid("alice"), "location": { "type": "file-version", "first_line": int, "last_line": int, "file": int, "changeset": int, "side": "new", "commit": None, "is_translated": False }, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a file note.", "replies": [], "draft_changes": None }) frontend.json( "reviews/%d/comments" % review_id, params={ "fields": "id" }, expect={ "comments": [{ "id": result["issues"][0] }, { "id": result["issues"][1] }, { "id": result["issues"][2] }, { "id": result["notes"][0] }, { "id": result["issues"][3] }, { "id": result["notes"][1] }] }) frontend.json( "comments/%d" % result["issues"][0], params={ "include": "users,replies" }, expect={ "id": result["issues"][0], "type": "issue", "is_draft": False, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "This is a general issue.", "replies": [int, int, int, int, int], "draft_changes": None, "linked": { "users": [user_json("alice"), user_json("bob"), user_json("dave"), user_json("erin")], "replies": [reply_json("bob"), reply_json("dave"), reply_json("erin"), reply_json("alice"), reply_json("bob")] }}) with frontend.signin("alice"): # Create comment with review specified via query parameter. created_issue_id_1 = frontend.json( "comments", params={ "review": review_id }, post={ "type": "issue", "text": "JSON general issue #1" }, expect={ "id": int, "type": "issue", "is_draft": True, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "JSON general issue #1", "replies": [], "draft_changes": draft_changes_json("alice", is_draft=True), })["id"] # Create comment with review specified via POST data. Also specify author # explicitly. created_note_id_1 = frontend.json( "comments", post={ "type": "note", "review": review_id, "author": instance.userid("alice"), "text": "JSON general note #1" }, expect={ "id": int, "type": "note", "is_draft": True, "state": None, "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "JSON general note #1", "replies": [], "draft_changes": draft_changes_json("alice", is_draft=True), })["id"] # Create issue with review specified in the path. created_issue_id_2 = frontend.json( "reviews/%d/issues" % review_id, post={ "text": "JSON general issue #2" }, expect={ "id": int, "type": "issue", "is_draft": True, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "JSON general issue #2", "replies": [], "draft_changes": draft_changes_json("alice", is_draft=True), })["id"] # Create note with review specified in the path. created_note_id_2 = frontend.json( "reviews/%d/notes" % review_id, post={ "text": "JSON general note #2" }, expect={ "id": int, "type": "note", "is_draft": True, "state": None, "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "JSON general note #2", "replies": [], "draft_changes": draft_changes_json("alice", is_draft=True), })["id"] review_data = frontend.json( "reviews/%d" % review_id, params={ "fields": "issues,notes" }, expect={ "issues": [int, int, int, int, int, int], "notes": [int, int, int, int] }) testing.expect.true( created_issue_id_1 in review_data["issues"], "created issue #1 in reviews/N/issues") testing.expect.true( created_note_id_1 in review_data["notes"], "created note #1 in reviews/N/notes") testing.expect.true( created_issue_id_2 in review_data["issues"], "created issue #2 in reviews/N/issues") testing.expect.true( created_note_id_2 in review_data["notes"], "created note #2 in reviews/N/notes") with frontend.signin("bob"): # Check that Bob doesn't see Alice's draft comments. review_data = frontend.json( "reviews/%d" % review_id, params={ "fields": "issues,notes" }, expect={ "issues": [int, int, int, int], "notes": [int, int] }) published_issue_ids = review_data["issues"] published_note_ids = review_data["notes"] testing.expect.false( created_issue_id_1 in review_data["issues"], "created issue #1 in reviews/N/issues") testing.expect.false( created_note_id_1 in review_data["notes"], "created note #1 in reviews/N/notes") testing.expect.false( created_issue_id_2 in review_data["issues"], "created issue #2 in reviews/N/issues") testing.expect.false( created_note_id_2 in review_data["notes"], "created note #2 in reviews/N/notes") # Find another review. result = frontend.operation( "searchreview", data={ "query": "branch:r/004-createreview" }) testing.expect.check(1, len(result["reviews"])) other_review_id = result["reviews"][0]["id"] with frontend.signin("alice"): frontend.json( "comments/%d" % created_issue_id_1, put={ "text": "JSON general issue #1 (edited)" }, expect={ "id": int, "type": "issue", "is_draft": True, "state": "open", "review": review_id, "author": instance.userid("alice"), "location": None, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": "JSON general issue #1 (edited)", "replies": [], "draft_changes": draft_changes_json("alice", is_draft=True), }) frontend.json( "comments/%d" % created_note_id_1, delete=True, expected_http_status=204) review_data = frontend.json( "reviews/%d" % review_id, params={ "fields": "issues,notes" }, expect={ "issues": [int, int, int, int, int, int], "notes": [int, int, int] }) testing.expect.true( created_issue_id_1 in review_data["issues"], "created issue #1 in reviews/N/issues") testing.expect.false( created_note_id_1 in review_data["notes"], "created note #1 in reviews/N/notes") testing.expect.true( created_issue_id_2 in review_data["issues"], "created issue #2 in reviews/N/issues") testing.expect.true( created_note_id_2 in review_data["notes"], "created note #2 in reviews/N/notes") frontend.json( "comments/%d,%d,%d" % (created_issue_id_1, created_issue_id_2, created_note_id_2), put={ "text": "Common text (edited)" }, expect={ "comments": [ { "id": created_issue_id_1, "text": "Common text (edited)", "*": "*" }, { "id": created_issue_id_2, "text": "Common text (edited)", "*": "*" }, { "id": created_note_id_2, "text": "Common text (edited)", "*": "*" } ] }) # Error handling. # Create comment without specifying a review. frontend.json( "comments", post={ "type": "issue", "text": "Invalid issue" }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "No review specified" } }) # Create comment without specifying conflicting reviews. frontend.json( "comments", params={ "review": review_id }, post={ "type": "issue", "review": other_review_id, "text": "Invalid issue" }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Conflicting reviews specified" } }) # Create comment as another user. frontend.json( "comments", post={ "type": "issue", "review": review_id, "author": instance.userid("bob"), "text": "Invalid issue" }, expected_http_status=403, expect={ "error": { "title": "Permission denied", "message": "Must be an administrator" } }) # Try to edit text of published comment. frontend.json( "comments/%d" % published_note_ids[0], put={ "text": "Invalid edit" }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Published comments cannot be edited" } }) # Try to delete a published comment. frontend.json( "comments/%d" % published_note_ids[0], delete=True, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Published comments cannot be deleted" } }) frontend.operation( "abortchanges", data={ "review_id": review_id, "what": { "approval": False, "comments": True, "metacomments": False } }) # # Create review which modifies a file a couple of times. # with repository.workcopy() as work: review = Review(work, "alice", "200-json/006-comments") review.addFile(the_file="200-json/006-comments.txt") review.commit("reference commit", reference=True, the_file=["1st line", "2nd line", "3rd line", "4th line", "5th line", "6th line", "7th line", "8th line"]) review.commit("first reviewed commit", the_file=["1st line", "2nd line (edited)", "3rd line (edited)", "4th line", "5th line", "6th line", "7th line", "8th line"]) review.commit("second reviewed commit", the_file=["1st line", "2nd line (edited)", "3rd line (edited)", "4th line", " 1st added line", " 2nd added line", " 3rd added line", "5th line", "6th line (edited)", "7th line (edited)", "8th line"]), review.commit("third reviewed commit", the_file=["1st line", "2nd line (edited)", "3rd line (edited)", "4th line", " 1st added line", " 2nd added line", " 3rd added line", "5th line (edited)", "6th line (edited) (edited)", "7th line (edited)", "8th line"]) review.submit() review_id = review.id sha1s = review.sha1s with frontend.signin("alice"): issue_1 = frontend.json( "reviews/%d/issues" % review_id, post={ "text": "Issue on 1st line", "location": { "type": "file-version", "changeset": fetch_changeset({ "from": sha1s[0], "to": sha1s[1] })["id"], "file": "200-json/006-comments.txt", "first_line": 1, "last_line": 1, "side": "new", }, })["id"] issue_2 = frontend.json( "reviews/%d/issues" % review_id, post={ "text": "Issue on 1st-3rd line", "location": { "type": "file-version", "changeset": fetch_changeset({ "from": sha1s[0], "to": sha1s[2] })["id"], "file": "200-json/006-comments.txt", "first_line": 1, "last_line": 3, "side": "new", }, })["id"] issue_3 = frontend.json( "reviews/%d/issues" % review_id, post={ "text": "Issue on 8th line", "location": { "type": "file-version", "changeset": fetch_changeset({ "from": sha1s[1], "to": sha1s[3] })["id"], "file": "200-json/006-comments.txt", "first_line": 11, "last_line": 11, "side": "new", }, })["id"] issue_4 = frontend.json( "reviews/%d/issues" % review_id, post={ "text": "Issue on 6th-7th line", "location": { "type": "file-version", "commit": sha1s[2], "file": "200-json/006-comments.txt", "first_line": 9, "last_line": 10, }, })["id"] frontend.json( "reviews/%d/comments" % review_id, params={ "commit": sha1s[1], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 8, "last_line": 8, } }], }) frontend.json( "reviews/%d/comments" % review_id, params={ "commit": sha1s[2], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 11, "last_line": 11, } }, { "id": issue_4, "location": { "first_line": 9, "last_line": 10, } }], }) frontend.json( "reviews/%d/comments" % review_id, params={ "commit": sha1s[3], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 11, "last_line": 11, } }], }) frontend.json( "reviews/%d/comments" % review_id, params={ "changeset": fetch_changeset({ "from": sha1s[0], "to": sha1s[2] })["id"], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 11, "last_line": 11, } }, { "id": issue_4, "location": { "first_line": 9, "last_line": 10, } }], }) frontend.json( "reviews/%d/comments" % review_id, params={ "changeset": fetch_changeset({ "from": sha1s[1], "to": sha1s[3] })["id"], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 11, "last_line": 11, } }], }) frontend.json( "reviews/%d/comments" % review_id, params={ "changeset": fetch_changeset({ "from": sha1s[0], "to": sha1s[3] })["id"], "fields": "id,location.first_line,location.last_line", }, expect={ "comments": [{ "id": issue_1, "location": { "first_line": 1, "last_line": 1, } }, { "id": issue_2, "location": { "first_line": 1, "last_line": 3, } }, { "id": issue_3, "location": { "first_line": 11, "last_line": 11, } }], }) # end of file ================================================ FILE: testing/tests/001-main/003-self/200-json/007-filechanges.py ================================================ # @dependency 001-main/002-createrepository.py # @dependency 001-main/003-self/200-json/006-changesets.py FROM_SHA1 = "573c5ff15ad95cfbc3e2f2efb0a638a4a78c17a7" FROM_SINGLE_SHA1 = "aabc2b10c930a9e72fe9587a6e8634087bb3efe1" TO_SHA1 = "6dc8e9c2d952028286d4b83475947bd0b1410860" ROOT_SHA1 = "ee37c47f6f6a14afa6912c1cc58a9f49d2a29acd" GENERIC_FILECHANGE = { "file": int, "changeset": int, "old_sha1": str, "new_sha1": str, "new_mode": None, "old_mode": None } files = {} def fetch_file(path): result = frontend.json( "files", params={ "path": path }, expect={ "id": int, "path": path }) files[path] = result["id"] fetch_file("testing/__init__.py") fetch_file("testing/repository.py") fetch_file("testing/virtualbox.py") # Filechanges for changeset from single commit single_changeset = fetch_changeset({ "commit": TO_SHA1 }) frontend.json( "filechanges", params={ "repository": 1, "changeset": single_changeset["id"] }, expect={"filechanges": [ {"changeset": single_changeset["id"], "old_sha1": "a2ffb3a6cd3b021c34592f4bd8f32905e4dd5830", "new_sha1": "2d06e47848827d8d8312542f3687f0380ebbc3ed", "file": files["testing/__init__.py"], "new_mode": None, "old_mode": None}, {"changeset": single_changeset["id"], "old_sha1": "e285e7c535dd8eee185d71c5adec1a328e586a58", "new_sha1": "ac6fe72b7ffefb9d5d4c6637aa94c02e756b2665", "file": files["testing/repository.py"], "new_mode": None, "old_mode": None}, {"changeset": single_changeset["id"], "old_sha1": "0f5b7b313b6152f9c4f342c151fa1038a83e03f4", "new_sha1": "c2e9ee01afb2b0cdde940532f93a6823013c8a91", "file": files["testing/virtualbox.py"], "new_mode": None, "old_mode": None}]}) # Single filechange for changeset from two commits custom_changeset = fetch_changeset({ "from": FROM_SHA1, "to": TO_SHA1 }) frontend.json( "filechanges/" + str(custom_changeset["files"][0]), params={ "repository": 1, "changeset": custom_changeset["id"] }, expect=GENERIC_FILECHANGE) # Invalid filechange id frontend.json( "filechanges/-1", params={ "repository": 1, "changeset": custom_changeset["id"] }, expect={ "error": { "message": "Invalid numeric id: '-1'", "title": "Invalid API request" } }, expected_http_status=400) ================================================ FILE: testing/tests/001-main/003-self/200-json/007-replies.py ================================================ # @dependency 001-main/003-self/100-reviewing/001-comments.basic.py # Fetch the id of a review which contains some comments. result = frontend.operation( "searchreview", data={ "query": "branch:r/100-reviewing/001-comment.basic" }) testing.expect.check(1, len(result["reviews"])) review_id = result["reviews"][0]["id"] result = frontend.json( "reviews/%d" % review_id, params={ "fields": "issues,notes" }, expect={ "issues": [int, int, int, int], "notes": [int, int] }) issue0 = frontend.json( "comments/%d/replies" % result["issues"][0], expect={ "replies": [reply_json("dave"), reply_json("bob"), reply_json("erin"), reply_json("bob"), reply_json("alice")] }) issue1 = frontend.json( "comments/%d/replies" % result["issues"][1], expect={ "replies": [reply_json("alice"), reply_json("bob")] }) issue2 = frontend.json( "comments/%d/replies" % result["issues"][2], expect={ "replies": [reply_json("bob"), reply_json("erin"), reply_json("alice")] }) note0 = frontend.json( "comments/%d/replies" % result["notes"][0], expect={ "replies": [] }) issue3 = frontend.json( "comments/%d/replies" % result["issues"][3], expect={ "replies": [reply_json("bob")] }) note1 = frontend.json( "comments/%d/replies" % result["notes"][1], expect={ "replies": [] }) def check_with_reply(comment_id, replies): for reply_json in replies: frontend.json( "comments", params={ "with_reply": reply_json["id"], "fields": "id" }, expect={ "id": comment_id }) check_with_reply(result["issues"][0], issue0["replies"]) check_with_reply(result["issues"][1], issue1["replies"]) check_with_reply(result["issues"][2], issue2["replies"]) check_with_reply(result["issues"][3], issue3["replies"]) check_with_reply(result["notes"][0], note0["replies"]) check_with_reply(result["notes"][1], note1["replies"]) with frontend.signin("alice"): # Use a comment with no replies to test with. comment_id = result["notes"][1] published_reply_id = issue0["replies"][-1]["id"] reply_id = frontend.json( "comments/%d/replies" % comment_id, post={ "text": "JSON reply #1", }, expect={ "id": int, "is_draft": True, "author": instance.userid("alice"), "timestamp": float, "text": "JSON reply #1" })["id"] frontend.json( "replies/%d" % reply_id, expect={ "id": reply_id, "is_draft": True, "author": instance.userid("alice"), "timestamp": float, "text": "JSON reply #1" }) frontend.json( "comments/%d" % comment_id, expect={ "id": comment_id, "type": "note", "is_draft": False, "state": None, "review": review_id, "author": instance.userid("alice"), "location": { "type": "file-version", "first_line": int, "last_line": int, "file": int, "changeset": int, "side": "new", "commit": None, "is_translated": False }, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": str, "replies": [], "draft_changes": draft_changes_json("alice", reply=reply_id), }) with frontend.signin("bob"): # Check that Bob doesn't see the Alice's draft reply. frontend.json( "comments/%d" % comment_id, expect={ "id": comment_id, "type": "note", "is_draft": False, "state": None, "review": review_id, "author": instance.userid("alice"), "location": { "type": "file-version", "first_line": int, "last_line": int, "file": int, "changeset": int, "side": "new", "commit": None, "is_translated": False }, "resolved_by": None, "addressed_by": None, "timestamp": float, "text": str, "replies": [], "draft_changes": None, }) with frontend.signin("alice"): frontend.json( "replies/%d" % reply_id, put={ "text": "JSON reply (edited)" }, expect={ "id": reply_id, "is_draft": True, "author": instance.userid("alice"), "timestamp": float, "text": "JSON reply (edited)" }) frontend.json( "replies/%d" % reply_id, delete=True, expected_http_status=204) reply_id = frontend.json( "replies", params={ "comment": comment_id }, post={ "text": "JSON reply #2" }, expect={ "id": int, "is_draft": True, "author": instance.userid("alice"), "timestamp": float, "text": "JSON reply #2" })["id"] frontend.json( "replies/%d" % reply_id, delete=True, expected_http_status=204) reply_id = frontend.json( "replies", post={ "comment": comment_id, "text": "JSON reply #3" }, expect={ "id": int, "is_draft": True, "author": instance.userid("alice"), "timestamp": float, "text": "JSON reply #3" })["id"] frontend.json( "replies", post={ "comment": comment_id, "text": "JSON reply (invalid)" }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Comment already has a draft reply" } }) frontend.json( "replies/%d" % reply_id, delete=True, expected_http_status=204) frontend.json( "comments/%d/replies" % comment_id, post={ "text": " " }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Empty reply" } }) frontend.json( "replies/%d" % published_reply_id, put={ "text": "Invalid edit" }, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Published replies cannot be edited" } }) frontend.json( "replies/%d" % published_reply_id, delete=True, expected_http_status=400, expect={ "error": { "title": "Invalid API request", "message": "Published replies cannot be deleted" } }) # end of file ================================================ FILE: testing/tests/001-main/003-self/200-json/008-batches.py ================================================ # @dependency 001-main/002-createrepository.py with repository.workcopy() as work: review = Review(work, "alice", "200-json/008-batches") review.addFile(first="200-json/008-batches/first.txt", second="200-json/008-batches/second.txt", third="200-json/008-batches/third.txt") review.commit("Reference commit", reference=True, first=["First", "=====", "Initial line"], second=["Second", "======", "Initial line"], third=["Third", "=====", "Initial line"]) review.commit("First commit", first=["First", "=====", "Initial line", "Added line"]) review.commit("Second commit", second=["Second", "======", "Initial line", "Added line"]) review.commit("Third commit", third=["Third", "=====", "Initial line", "Added line"]) review.addFilter("bob", "reviewer", "200-json/008-batches/") review.addFilter("dave", "reviewer", "200-json/008-batches/") review.submit() changesets = { "first": fetch_changeset({ "from": review.sha1s[0], "to": review.sha1s[1], }), "second": fetch_changeset({ "from": review.sha1s[1], "to": review.sha1s[2], }), "third": fetch_changeset({ "from": review.sha1s[2], "to": review.sha1s[3], }), "all": fetch_changeset({ "from": review.sha1s[0], "to": review.sha1s[3], }), } issues = { "alice": [], "bob": [], "dave": [] } changes = {} def fetch_changes(key): changes[key] = frontend.json( ("reviews/%d/changesets/%d/reviewablefilechanges" % (review.id, changesets[key]["id"])), expect={ "reviewablefilechanges": [{ "id": int, "review": review.id, "changeset": changesets[key]["id"], "file": review.getFileId(key), "deleted_lines": int, "inserted_lines": int, "is_reviewed": False, "reviewed_by": None, "assigned_reviewers": [instance.userid("bob"), instance.userid("dave")], "draft_changes": None, }], })["reviewablefilechanges"] fetch_changes("first") fetch_changes("second") fetch_changes("third") with frontend.signin("alice"): frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "alice", "draft")) issues["alice"].append( frontend.json( "reviews/%d/issues" % review.id, post={ "text": "Alice's issue #1", "location": { "type": "file-version", "changeset": changesets["first"]["id"], "side": "new", "file": review.getFilename("first"), "first_line": 1, "last_line": 4, } })["id"]) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "alice", "draft", created_comments=[issues["alice"][0]])) issues["alice"].append( frontend.json( "reviews/%d/issues" % review.id, post={ "text": "Alice's issue #2", "location": { "type": "file-version", "changeset": changesets["second"]["id"], "side": "new", "file": review.getFilename("second"), "first_line": 1, "last_line": 2, } })["id"]) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "alice", "draft", created_comments=[issues["alice"][0], issues["alice"][1]])) frontend.json( "reviews/%d/batches" % review.id, post={}, expect=batch_json(review.id, "alice", "published", created_comments=[issues["alice"][0], issues["alice"][1]])) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "alice", "draft")) with frontend.signin("bob"): frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "bob", "draft")) issues["bob"].append( frontend.json( "reviews/%d/issues" % review.id, post={ "text": "Bob's issue #1", "location": { "type": "file-version", "changeset": changesets["second"]["id"], "side": "new", "file": review.getFilename("second"), "first_line": 3, "last_line": 4, } })["id"]) frontend.json( "comments/%d" % issues["alice"][0], put={ "draft_changes": { "new_state": "resolved", }, }, expect={ "id": issues["alice"][0], "state": "open", "draft_changes": draft_changes_json( "bob", new_state="resolved"), "*": "*", }) frontend.json( ("reviews/%d/changesets/%d/reviewablefilechanges" % (review.id, changesets["second"]["id"])), put={ "draft_changes": { "new_is_reviewed": True, } }, expect={ "reviewablefilechanges": [{ "id": int, "review": review.id, "changeset": changesets["second"]["id"], "file": review.getFileId("second"), "deleted_lines": int, "inserted_lines": int, "is_reviewed": False, "reviewed_by": None, "assigned_reviewers": [instance.userid("bob"), instance.userid("dave")], "draft_changes": { "author": instance.userid("bob"), "new_is_reviewed": True, "new_reviewed_by": instance.userid("bob"), }, }], }) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "bob", "draft", created_comments=[issues["bob"][0]], resolved_issues=[issues["alice"][0]], reviewed_changes=[changes["second"][0]["id"]])) with frontend.signin("dave"): frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "dave", "draft")) issues["dave"].append( frontend.json( "reviews/%d/issues" % review.id, post={ "text": "Dave's issue #1", "location": { "type": "file-version", "changeset": changesets["all"]["id"], "side": "new", "file": review.getFilename("third"), "first_line": 1, "last_line": 4, } })["id"]) frontend.json( "comments/%d" % issues["alice"][0], put={ "draft_changes": { "new_state": "resolved", }, }, expect={ "id": issues["alice"][0], "state": "open", "draft_changes": draft_changes_json( "dave", new_state="resolved"), "*": "*", }) frontend.json( "comments/%d" % issues["alice"][1], put={ "draft_changes": { "new_state": "resolved", }, }, expect={ "id": issues["alice"][1], "state": "open", "draft_changes": draft_changes_json( "dave", new_state="resolved"), "*": "*", }) frontend.json( "reviewablefilechanges/%d,%d" % (changes["second"][0]["id"], changes["third"][0]["id"]), put={ "draft_changes": { "new_is_reviewed": True, } }, expect={ "reviewablefilechanges": [{ "id": int, "review": review.id, "changeset": changesets["second"]["id"], "file": review.getFileId("second"), "deleted_lines": int, "inserted_lines": int, "is_reviewed": False, "reviewed_by": None, "assigned_reviewers": [instance.userid("bob"), instance.userid("dave")], "draft_changes": { "author": instance.userid("dave"), "new_is_reviewed": True, "new_reviewed_by": instance.userid("dave"), }, }, { "id": int, "review": review.id, "changeset": changesets["third"]["id"], "file": review.getFileId("third"), "deleted_lines": int, "inserted_lines": int, "is_reviewed": False, "reviewed_by": None, "assigned_reviewers": [instance.userid("bob"), instance.userid("dave")], "draft_changes": { "author": instance.userid("dave"), "new_is_reviewed": True, "new_reviewed_by": instance.userid("dave"), }, }], }) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "dave", "draft", created_comments=[issues["dave"][0]], resolved_issues=[issues["alice"][0], issues["alice"][1]], reviewed_changes=[changes["second"][0]["id"], changes["third"][0]["id"]])) with frontend.signin("bob"): frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "bob", "draft", created_comments=[issues["bob"][0]], resolved_issues=[issues["alice"][0]], reviewed_changes=[changes["second"][0]["id"]])) frontend.json( "reviews/%d/batches" % review.id, post={ "comment": "This looks good!", }, expect=batch_json(review.id, "bob", "published", comment=int, created_comments=[issues["bob"][0]], resolved_issues=[issues["alice"][0]], reviewed_changes=[changes["second"][0]["id"]])) frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "bob", "draft")) with frontend.signin("dave"): frontend.json( "reviews/%d/batches" % review.id, params={ "unpublished": "yes", }, expect=batch_json(review.id, "dave", "draft", created_comments=[issues["dave"][0]], resolved_issues=[issues["alice"][1]], reviewed_changes=[changes["third"][0]["id"]])) # eof ================================================ FILE: testing/tests/001-main/003-self/200-json/__init__.py ================================================ critic_json = { "id": 1, "name": "critic", "path": instance.repository_path(), "relative_path": "critic.git", "url": str } other_json = { "id": 2, "name": "other", "path": instance.repository_path("other"), "relative_path": "other.git", "url": str } def user_json(name, fullname=None, status="current", no_email=False): if fullname is None: fullname = name.capitalize() + " von Testing" if no_email: email = None else: email = name + "@example.org" return { "id": instance.userid(name), "name": name, "fullname": fullname, "status": status, "email": email } generic_commit_json = { "id": int, "sha1": str, "summary": str, "message": str, "parents": list, "author": { "name": str, "email": str, "timestamp": float }, "committer": { "name": str, "email": str, "timestamp": float }, } def reply_json(author): return { "id": int, "is_draft": bool, "author": instance.userid(author), "timestamp": float, "text": "This is a reply from %s." % author.capitalize() } def batch_json(review_id, author, batch_type, **fields): expected = { "id": int, "is_empty": not fields, "review": review_id, "author": instance.userid(author), "comment": None, "timestamp": float, "created_comments": [], "written_replies": [], "resolved_issues": [], "reopened_issues": [], "morphed_comments": [], "reviewed_changes": [], "unreviewed_changes": [], } if batch_type == "draft": expected.update({ "id": None, "timestamp": None, }) expected.update(fields) return expected def fetch_changeset(params, repository="critic"): params.setdefault("repository", repository) result = frontend.json( "changesets", params=params, expected_http_status=[200, 202]) if "error" in result: instance.synchronize_service("changeset") result = frontend.json( "changesets", params=params, expect={ "id": int, "*": "*" }) return result def draft_changes_json(author, **kwargs): result = { "author": instance.userid(author), "is_draft": False, "reply": None, "new_type": None, "new_state": None, "new_location": None, } result.update(kwargs) return result # eof ================================================ FILE: testing/tests/001-main/003-self/__init__.py ================================================ import os class Review(object): def __init__(self, workcopy, as_user, work_branch_name): self.workcopy = workcopy self.work_branch_name = work_branch_name self.review_branch_name = "r/" + work_branch_name self.summary = work_branch_name self.as_user = as_user self.sha1s = [] self.reference_commits = 0 self.pushed_commits = 0 self.files = {} self.review_id = None self.filters = [] self.users = set([as_user]) self.workcopy.run(["checkout", "-b", self.work_branch_name]) class File(object): def __init__(self, filename): self.filename = filename self.content = None def write(self, path): filename = os.path.join(path, self.filename) if self.content is None: if os.path.isfile(filename): os.unlink(filename) else: if isinstance(self.content, list): content = "\n".join(self.content) + "\n" else: content = self.content if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with open(filename, "w") as fileobj: fileobj.write(content) def addFile(self, **files): for key, filename in files.items(): self.files[key] = Review.File(filename) def getFilename(self, key): return self.files[key].filename def getFileId(self, key): result = frontend.json( "files", params={ "path": self.files[key].filename, }, expect={ "id": int, "path": self.files[key].filename, }) return result["id"] def commit(self, message, **files): if files.pop("reference", False): assert len(self.sha1s) == self.reference_commits self.reference_commits += 1 for key, content in files.items(): self.files[key].content = content for file_ in self.files.values(): file_.write(self.workcopy.path) self.workcopy.run(["add", file_.filename]) self.workcopy.run(["commit", "-m", message]) self.sha1s.append(self.workcopy.run(["rev-parse", "HEAD"]).strip()) def addFilter(self, username, filter_type, path): assert filter_type in ("reviewer", "watcher") self.users.add(username) self.filters.append({ "username": username, "type": filter_type, "path": path }) def submit(self): assert len(self.sha1s) > self.reference_commits self.workcopy.run( ["push", instance.repository_url(self.as_user), "HEAD"]) self.pushed_commits = len(self.sha1s) with frontend.signin(self.as_user): result = frontend.operation( "submitreview", data={ "repository": "critic", "branch": self.review_branch_name, "summary": self.work_branch_name, "commit_sha1s": self.sha1s[self.reference_commits:], "reviewfilters": self.filters, "frombranch": self.work_branch_name, }) self.id = result["review_id"] for username in self.users: mailbox.pop(accept=[ testing.mailbox.ToRecipient(username + "@example.org"), testing.mailbox.WithSubject("New Review: " + self.summary) ]) def push(self): assert self.review_id is not None, "call review.submit() first!" self.workcopy.run( ["push", instance.repository_url(self.as_user), "HEAD:%s" % self.review_branch_name]) for username in self.users: mailbox.pop(accept=[ testing.mailbox.ToRecipient(username + "@example.org"), testing.mailbox.WithSubject("Updated Review: " + self.summary) ]) ================================================ FILE: testing/tests/001-main/004-extensions/001-enable.py ================================================ # Enable extensions. instance.extend(repository) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/001-tutorial.py ================================================ frontend.page("tutorial", expect={ "document_title": testing.expect.document_title(u"Tutorials"), "content_title": testing.expect.paleyellow_title(0, u"Tutorials"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "extensions" }, expect={ "document_title": testing.expect.document_title(u"Critic Extensions"), "content_title": testing.expect.paleyellow_title(0, u"Critic Extensions"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_no_user() }) frontend.page("tutorial", params={ "item": "extensions-api" }, expect={ "document_title": testing.expect.document_title(u"Critic Extensions API"), "content_title": testing.expect.paleyellow_title(0, u"Critic Extensions API"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_no_user() }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/002-manageextensions.py ================================================ frontend.page( "manageextensions", expect={ "document_title": testing.expect.document_title(u"Manage Extensions"), "content_title": testing.expect.paleyellow_title(0, u"Available Extensions"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_anonymous_user() }) frontend.page( "manageextensions", params={ "what": "available" }, expect={ "document_title": testing.expect.document_title(u"Manage Extensions"), "content_title": testing.expect.paleyellow_title(0, u"Available Extensions"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_anonymous_user() }) frontend.page( "manageextensions", params={ "what": "installed" }, expect={ "document_title": testing.expect.document_title(u"Manage Extensions"), "content_title": testing.expect.paleyellow_title(0, u"Installed Extensions"), "pageheader_links": testing.expect.pageheader_links("anonymous", "extensions"), "script_user": testing.expect.script_anonymous_user() }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/003-install-TestExtension.py ================================================ def check_extension(installed): def check(document): tr_item = document.find("tr", attrs={ "class": "item" }) td_name = tr_item.find("td", attrs={ "class": "name" }) testing.expect.check("Extension:", td_name.string) td_value = tr_item.find("td", attrs={ "class": "value" }) span_name = td_value.find("span", attrs={ "class": "name" }) testing.expect.check("TestExtension", span_name.contents[0].string) testing.expect.check(" hosted by Alice von Testing", span_name.contents[1]) span_installed = td_value.find("span", attrs={ "class": "installed" }) if installed: testing.expect.check(" [installed]", span_installed.string) elif span_installed: testing.expect.check("<no installed indicator>", "<found installed indicator>") return check try: instance.execute( ["sudo", "mkdir", "~alice/CriticExtensions", "&&", "sudo", "cp", "-R", "~/critic/testing/input/TestExtension", "~alice/CriticExtensions", "&&", "sudo", "chown", "-R", "alice.critic", "~alice/CriticExtensions", "&&", "sudo", "chmod", "-R", "u+rwX,go+rX", "~alice/CriticExtensions"]) instance.execute( ["sudo", "-H", "-u", "alice", "git", "init", "&&", "sudo", "-H", "-u", "alice", "git", "add", ".", "&&", "sudo", "-H", "-u", "alice", "git", "commit", "-mInitial", "&&", "sudo", "-H", "-u", "alice", "git", "checkout", "-b", "version/stable", "&&", "sudo", "su", "-c", "'echo stable:1 > version.txt'", "alice", "&&", "sudo", "-H", "-u", "alice", "git", "add", "version.txt", "&&", "sudo", "-H", "-u", "alice", "git", "commit", "-mStable:1", "&&", "sudo", "su", "-c", "'echo stable:2 > version.txt'", "alice", "&&", "sudo", "-H", "-u", "alice", "git", "commit", "-mStable:2", "version.txt", "&&", "sudo", "-H", "-u", "alice", "git", "checkout", "master", "&&", "sudo", "su", "-c", "'echo live > version.txt'", "alice"], cwd="~alice/CriticExtensions/TestExtension") except testing.InstanceError as error: raise testing.TestFailure(error.message) with frontend.signin("alice"): frontend.page( "manageextensions", expect={ "test_extension": check_extension(False) }) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.page( "manageextensions", expect={ "test_extension": check_extension(True) }) frontend.page( "manageextensions", expect={ "test_extension": check_extension(False) }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/001-echo.py ================================================ import json def check_arguments(expected): def check(document): try: result = json.loads(document) except ValueError: testing.expect.check("<valid json>", repr(document)) else: actual = result["arguments"] testing.expect.check(expected, actual) return check def check_json(expected): def check(actual): try: return expected, json.loads(actual) except ValueError: return "<valid JSON>", actual return check with frontend.signin("alice"): frontend.page( "echo", expected_content_type="text/json", expect={ "json": check_arguments( ["GET", "echo", None]) }) frontend.page( "echo?foo=bar", expected_content_type="text/json", expect={ "json": check_arguments( ["GET", "echo", { "raw": "foo=bar", "params": { "foo": "bar" }}]) }) frontend.page( "echo?foo=bar&x=10&y=20", expected_content_type="text/json", expect={ "json": check_arguments( ["GET", "echo", { "raw": "foo=bar&x=10&y=20", "params": { "foo": "bar", "x": "10", "y": "20" }}]) }) frontend.operation( "echo", data={}, expect={ "arguments": ["POST", "echo", None], "stdin": check_json({}) }) frontend.operation( "echo", data={ "foo": "bar", "positions": [{ "x": 10, "y": 20 }, { "x": 11, "y": 21 }]}, expect={ "arguments": ["POST", "echo", None], "stdin": check_json({ "foo": "bar", "positions": [{ "x": 10, "y": 20 }, { "x": 11, "y": 21 }]}) }) # Verify that Alice's extension install doesn't affect Bob. with frontend.signin("bob"): frontend.page("echo", expected_http_status=404) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/002-nothandled.py ================================================ with frontend.signin("alice"): frontend.page("nothandled", expected_http_status=404) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/003-empty.py ================================================ def empty(document): if document != "": testing.expect.check("<empty string>", document) with frontend.signin("alice"): frontend.page( "empty", expected_content_type="text/plain", expect={ "empty": empty }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/004-Review.list.py ================================================ with frontend.signin("alice"): result = frontend.operation("Review.list", data={}) for failed in result["failed"]: logger.error("Review.list: %(test)s: %(message)s" % failed) for passed in result["passed"]: logger.debug("Review.list: %(test)s: passed (%(result)s)" % passed) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/005-MailTransaction.py ================================================ mailbox.check_empty() with frontend.signin("alice"): to_alice = testing.mailbox.ToRecipient("alice@example.org") to_bob = testing.mailbox.ToRecipient("bob@example.org") frontend.operation( "MailTransaction", data={ "mails": [{ "to": ["alice", "bob"], "subject": "MailTransaction test #1", "body": "This is the mail body.\n\nBye, bye." }] }, expect={ "message": None }) def recipients_equal(expected, actual): return set(expected) == set(map(str.strip, actual.split(","))) def check_mail1(mail): testing.expect.check("Alice von Testing <alice@example.org>", mail.header("From")) testing.expect.check(["Alice von Testing <alice@example.org>", "Bob von Testing <bob@example.org>"], mail.header("To"), equal=recipients_equal) testing.expect.check("MailTransaction test #1", mail.header("Subject")) testing.expect.check(["This is the mail body.", "", "Bye, bye."], mail.lines) check_mail1(mailbox.pop(to_alice)) check_mail1(mailbox.pop(to_bob)) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/006-inject.py ================================================ import json from BeautifulSoup import Comment def check_injected(key, expected): def check(document): comments = document.findAll(text=lambda text: isinstance(text, Comment)) for comment in comments: if comment.strip().startswith("[alice/TestExtension] Extension error:"): logger.error(comment.strip()) return for script in document.findAll("script"): if script.has_key("src"): src = script["src"] if src.startswith("data:text/javascript,var %s=" % key) \ and src[-1] == ";": injected = src[len("data:text/javascript,var %s=" % key):-1] break else: testing.expect.check("<injected script>", "<expected content not found>") try: actual = json.loads(injected) except ValueError: testing.expect.check("<valid json>", repr(injected)) else: testing.expect.check(expected, actual) return check def check_not_injected(key): def check(document): comments = document.findAll(text=lambda text: isinstance(text, Comment)) for comment in comments: if comment.strip().startswith("[alice/TestExtension] Extension error:"): logger.error(comment.strip()) return for script in document.findAll("script"): if script.has_key("src"): src = script["src"] if src.startswith("data:text/javascript,var %s=" % key) \ and src[-1] == ";": testing.expect.check("<no injected script>", "<injected script found>") break return check def check_error(message): def check(document): comments = document.findAll(text=lambda text: isinstance(text, Comment)) for comment in comments: comment = comment.strip() if comment.startswith("[alice/TestExtension] Extension error:"): comment = comment[len("[alice/TestExtension] Extension error:"):] testing.expect.check(message, comment.strip()) return testing.expect.check("<error message>", "<expected content not found>") return check with frontend.signin("alice"): frontend.page( "home", expect={ "injected": check_injected( "injected", ["home", None]) }) frontend.page( "home?foo=bar", expect={ "injected": check_injected( "injected", ["home", { "raw": "foo=bar", "params": { "foo": "bar" }}]) }) frontend.page( "home?foo=bar&x=10&y=20", expect={ "injected": check_injected( "injected", ["home", { "raw": "foo=bar&x=10&y=20", "params": { "foo": "bar", "x": "10", "y": "20" }}]) }) sha1 = repository.run(["rev-parse", "master"]).strip() frontend.page( "critic/master", expect={ "showcommitShort": check_injected( "showcommitShort", ["critic/master", None]), "showcommitLong": check_injected( "showcommitLong", ["showcommit", { "raw": "repository=critic&sha1=" + sha1, "params": { "repository": "critic", "sha1": sha1 }}]) }) frontend.page( "showcommit?repository=critic&sha1=master", expect={ "showcommitShort": check_not_injected( "showcommitShort"), "showcommitLong": check_injected( "showcommitLong", ["showcommit", { "raw": "repository=critic&sha1=master", "params": { "repository": "critic", "sha1": "master" }}]) }) frontend.page( "home?expr=path", expect={ "injected": check_injected("injectedCustom", "home") }) frontend.page( "home", params={ "expr": "while(true){}" }, expect={ "injected": check_error("Process timed out after 5 seconds") }) # Verify that Alice's extension install doesn't affect Bob. with frontend.signin("bob"): frontend.page( "home", expect={ "injected": check_not_injected("injected") }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/007-version.py ================================================ def check_version(expected): def check(actual): testing.expect.check(expected, actual.strip()) return check with frontend.signin("alice"): frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("live") }) with frontend.signin("bob"): frontend.page( "version", expected_http_status=404) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension", "version": "version/stable" }) frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("stable:2") }) frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("live") }) frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension", "version": "version/stable", "universal": True }, expect={ "status": "failure", "code": "notallowed" }) with frontend.signin("admin"): frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension", "version": "version/stable", "universal": True }) with frontend.signin("bob"): frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("stable:2") }) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("live") }) frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("stable:2") }) with frontend.signin("admin"): frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension", "universal": True }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/008-processcommits.py ================================================ import os import re def to(name): return testing.mailbox.ToRecipient("%s@example.org" % name) def about(subject): return testing.mailbox.WithSubject(subject) FILENAME = "008-processcommits.txt" SUMMARY = "Added %s" % FILENAME SETTINGS = { "review.createViaPush": True } review_id = None with testing.utils.settings("alice", SETTINGS), frontend.signin("alice"): with repository.workcopy() as work: base_sha1 = work.run(["rev-parse", "HEAD"]).strip() work.run(["remote", "add", "critic", "alice@%s:/var/git/critic.git" % instance.hostname]) def commit(fixup_message=None): if fixup_message: full_message = "fixup! %s\n\n%s" % (SUMMARY, fixup_message) else: full_message = SUMMARY work.run(["add", FILENAME]) work.run(["commit", "-m", full_message], GIT_AUTHOR_NAME="Alice von Testing", GIT_AUTHOR_EMAIL="alice@example.org", GIT_COMMITTER_NAME="Alice von Testing", GIT_COMMITTER_EMAIL="alice@example.org") return work.run(["rev-parse", "HEAD"]).strip() def push(): output = work.run( ["push", "-q", "critic", "HEAD:refs/heads/r/008-processcommits"]) all_lines = [] for line in output.splitlines(): if not line.startswith("remote:"): continue all_lines.append(line[len("remote:"):].split("\x1b", 1)[0].strip()) extension_lines = [] for line in all_lines: if line.startswith("[TestExtension] "): extension_lines.append(line[len("[TestExtension] "):]) return all_lines, extension_lines with open(os.path.join(work.path, FILENAME), "w") as text_file: print >>text_file, "First line." first_commit = commit() all_lines, extension_lines = push() next_is_review_url = False for line in all_lines: if line == "Submitted review:": next_is_review_url = True elif next_is_review_url: review_id = int(re.search(r"/r/(\d+)$", line).group(1)) break testing.expect.check(["processcommits.js::processcommits()", "===================================", "r/%d" % review_id, "%s..%s" % (base_sha1[:8], first_commit[:8]), "%s" % first_commit[:8]], extension_lines) mailbox.pop(accept=[to("alice"), about("New Review: %s" % SUMMARY)]) with open(os.path.join(work.path, FILENAME), "a") as text_file: print >>text_file, "Second line." second_commit = commit("Added second line") with open(os.path.join(work.path, FILENAME), "a") as text_file: print >>text_file, "Third line." third_commit = commit("Added third line") with open(os.path.join(work.path, FILENAME), "a") as text_file: print >>text_file, "Fourth line." fourth_commit = commit("Added fourth line") all_lines, extension_lines = push() testing.expect.check(["processcommits.js::processcommits()", "===================================", "r/%d" % review_id, "%s..%s" % (first_commit[:8], fourth_commit[:8]), "%s,%s,%s" % (fourth_commit[:8], third_commit[:8], second_commit[:8])], extension_lines) mailbox.pop(accept=[to("alice"), about("Updated Review: %s" % SUMMARY)]) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/009-error-messages.py ================================================ import re def check_compilation(document): testing.expect.check( expected="""\ Extension failure: returned 1 Failed to load 'error\\.compilation\\.js': SyntaxError: Duplicate parameter name not allowed in this context""", actual=document, equal=re.match) def check_runtime(document): testing.expect.check( expected="""\ Extension failure: returned 1 Failed to call 'error\\.runtime\\.js::test\\(\\)': CriticError: nosuchuser: no such user new CriticUser\\(\\) at <Library>/critic-user\\.js:\\d+""", actual=document, equal=re.match) with frontend.signin("alice"): frontend.page( "error.compilation", expected_content_type="text/plain", expected_http_status=500, expect={ "message": check_compilation }) frontend.page( "error.runtime", expected_content_type="text/plain", expected_http_status=500, expect={ "message": check_runtime }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/010-restrictions.py ================================================ with frontend.signin("alice"): frontend.operation( "restrictions", data={}, expect={ "database_connection": "PostgreSQL is not defined" }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/011-User.py ================================================ DUMP_USER = """\ var user = %s; return [user.name, user.email, user.fullname, user.isAnonymous];""" def dump_user(user): return DUMP_USER % user with frontend.signin("alice"): frontend.operation( "evaluate", data={ "source": dump_user("critic.User.current") }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) frontend.operation( "evaluate", data={ "source": dump_user("new critic.User(%d)" % instance.userid("alice")) }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) frontend.operation( "evaluate", data={ "source": dump_user("new critic.User({ id: %d })" % instance.userid("alice")) }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) frontend.operation( "evaluate", data={ "source": dump_user("new critic.User('alice')") }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) frontend.operation( "evaluate", data={ "source": dump_user("new critic.User({ name: 'alice' })") }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) frontend.operation( "evaluate", data={ "source": dump_user("new critic.User({ id: %d, name: 'alice' })" % instance.userid("alice")) }, expect={ "result": ["alice", "alice@example.org", "Alice von Testing", False] }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/012-resources.py ================================================ with frontend.signin("alice"): frontend.page( "extension-resource/TestExtension/helloworld.html", expected_content_type="text/html") frontend.page( "extension-resource/TestExtension/helloworld.css", expected_content_type="text/css") # This resource has an extra period in its name, the check that this doesn't # interfere with the content type guessing. frontend.page( "extension-resource/TestExtension/hello.world.js", expected_content_type="text/javascript") frontend.page( "extension-resource/TestExtension/helloworld.txt", expected_http_status=404) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/013-storage.py ================================================ with frontend.signin("alice"): frontend.operation( "evaluate", data={ "source": "return critic.storage.has('the key');" }, expect={ "result": False }) frontend.operation( "evaluate", data={ "source": "return critic.storage.get('the key');" }, expect={ "result": None }) frontend.operation( "evaluate", data={ "source": "critic.storage.set('the key', 'the value');" }, expect={ "result": None }) frontend.operation( "evaluate", data={ "source": "return critic.storage.has('the key');" }, expect={ "result": True }) frontend.operation( "evaluate", data={ "source": "return critic.storage.has('the other key');" }, expect={ "result": False }) frontend.operation( "evaluate", data={ "source": "return critic.storage.get('the key');" }, expect={ "result": "the value" }) frontend.operation( "clearextensionstorage", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.operation( "evaluate", data={ "source": "return critic.storage.has('the key');" }, expect={ "result": False }) frontend.operation( "evaluate", data={ "source": "return critic.storage.get('the key');" }, expect={ "result": None }) frontend.operation( "evaluate", data={ "source": """ critic.storage.set('a', '1'); critic.storage.set('b', '4'); critic.storage.set('aa', '2'); critic.storage.set('bb', '5'); critic.storage.set('aaa', '3');""" }, expect={ "result": None }) frontend.operation( "evaluate", data={ "source": "return critic.storage.list();" }, expect={ "result": ["a", "aa", "aaa", "b", "bb"] }) frontend.operation( "evaluate", data={ "source": "return critic.storage.list({ like: 'a%' });" }, expect={ "result": ["a", "aa", "aaa"] }) frontend.operation( "evaluate", data={ "source": "return critic.storage.list({ like: 'aa%' });" }, expect={ "result": ["aa", "aaa"] }) frontend.operation( "evaluate", data={ "source": "return critic.storage.list({ regexp: 'a+' });" }, expect={ "result": ["a", "aa", "aaa"] }) frontend.operation( "evaluate", data={ "source": "return critic.storage.list({ regexp: '[ab]*' });" }, expect={ "result": ["a", "aa", "aaa", "b", "bb"] }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/014-Repository.run.py ================================================ import re RE_NAME_EMAIL = re.compile(r"(author|committer)\s+(.*?)\s+<(.*?)>") with frontend.signin("alice"): result = frontend.operation( "evaluate", data={ "source": """\ var repository = new critic.Repository("critic"); var tree_sha1 = repository.revparse("HEAD^{tree}"); var parent_sha1 = repository.revparse("HEAD^"); var workcopy = repository.getWorkCopy(); var sha1 = workcopy.run("commit-tree", tree_sha1, "-p", parent_sha1, { stdin: "Fix some stuff!\\n\\nFTW!\\n", GIT_AUTHOR_NAME: "Bob von Testing", GIT_AUTHOR_EMAIL: "bob@example.com", GIT_COMMITTER_NAME: "Alice von Testing", GIT_COMMITTER_EMAIL: "alice@example.com" }); sha1 = sha1.trim(); // includes a line-break workcopy.run("push", "origin", sha1 + ":refs/heads/014-Repository.run-1"); return sha1;""" }) with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") work.run(["fetch", REMOTE_URL, "refs/heads/014-Repository.run-1"]) message = None for line in work.run(["cat-file", "commit", "FETCH_HEAD"]).splitlines(): if message is None: if not line: message = [] continue match = RE_NAME_EMAIL.match(line) if match: field, name, email = match.groups() if field == "author": testing.expect.check("Bob von Testing", name) testing.expect.check("bob@example.com", email) else: testing.expect.check("Alice von Testing", name) testing.expect.check("alice@example.com", email) elif not (line.startswith("tree") or line.startswith("parent")): testing.logger.error("Unexpected line: %r" % line) else: message.append(line) testing.expect.check(["Fix some stuff!", "", "FTW!"], message) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/015-filterhook.py ================================================ import os with_class = testing.expect.with_class extract_text = testing.expect.extract_text def check_echo_filter_type(document): filterdialog = document.find("div", attrs=with_class("filterdialog")) type_select = filterdialog.find("select", attrs={ "name": "type" }) type_options = type_select.findAll("option") testing.expect.check(4, len(type_options)) testing.expect.check("echo", extract_text(type_options[-1])) testing.expect.check("extensionhook", type_options[-1]["value"]) testing.expect.check("echo", type_options[-1]["data-filterhook-name"]) try: int(type_options[-1]["data-extension-id"]) except (KeyError, ValueError): testing.logger.error("invalid or missing data-extension-id attribute") def check_echo_filter(filter_id): def check(document): filters = document.find("table", attrs=with_class("filters")) for tr in filters.findAll("tr"): path_td = tr.find("td", attrs=with_class("path")) if not path_td or extract_text(path_td) != "015-filterhook/include/": continue title_td = tr.find("td", attrs=with_class("title")) testing.expect.check("echo", extract_text(title_td)) data_td = tr.find("td", attrs=with_class("data")) testing.expect.check("this is the data", extract_text(data_td)) break else: testing.logger.error("echo extension hook filter not found") return check with frontend.signin("alice"): frontend.page( "home", expect={ "check echo filter type": check_echo_filter_type }) result = frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/TestExtension", "repository": "critic", "filterhook_name": "echo", "path": "015-filterhook/include/", "data": "this is the data" }) filter_id = result["filter_id"] frontend.page( "home", expect={ "check echo filter": check_echo_filter(filter_id) }) with repository.workcopy() as work: work.run(["checkout", "-b", "r/015-filterhook"]) include = os.path.join(work.path, "015-filterhook", "include") exclude = os.path.join(work.path, "015-filterhook", "exclude") os.makedirs(include) os.makedirs(exclude) def make(directory, filename): with open(os.path.join(directory, filename), "w") as file: print >>file, filename make(include, "file1") make(include, "file2") make(exclude, "file3") work.run(["add", "015-filterhook"]) work.run(["commit", "-mFirst"]) review_id = testing.utils.createReviewViaPush(work, "alice") instance.synchronize_service("extensiontasks") instance.synchronize_service("maildelivery") # Ignore this mail; not very interesting in this context. mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("New Review: First")]) to_alice = mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("filterhook.js::filterhook")]) testing.expect.check( ['data: "this is the data"', 'review.id: %d' % review_id, 'user.name: alice', 'commits: ["First\\n"]', 'files: ["015-filterhook/include/file1","015-filterhook/include/file2"]'], to_alice.lines) mailbox.check_empty() make(exclude, "file4") work.run(["add", "015-filterhook"]) work.run(["commit", "-mSecond"]) make(include, "file5") make(include, "file6") work.run(["add", "015-filterhook"]) work.run(["commit", "-mThird"]) work.run(["push", "bob@%s:/var/git/critic.git" % instance.hostname, "HEAD"]) instance.synchronize_service("extensiontasks") instance.synchronize_service("maildelivery") # Ignore this mail; not very interesting in this context. mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("Updated Review: First")]) to_alice = mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("filterhook.js::filterhook")]) testing.expect.check( ['data: "this is the data"', 'review.id: %d' % review_id, 'user.name: bob', 'commits: ["Third\\n","Second\\n"]', 'files: ["015-filterhook/include/file5","015-filterhook/include/file6"]'], to_alice.lines) mailbox.check_empty() make(include, "explode") work.run(["add", "015-filterhook"]) work.run(["commit", "-mExplode"]) explode_sha1 = work.run(["rev-parse", "HEAD"]).strip() work.run(["push", "bob@%s:/var/git/critic.git" % instance.hostname, "HEAD"]) instance.synchronize_service("extensiontasks") instance.synchronize_service("maildelivery") # Ignore this mail; not very interesting in this context. mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("Updated Review: First")]) to_alice = mailbox.pop( [testing.mailbox.ToRecipient("alice@example.org"), testing.mailbox.WithSubject("Failed: echo")]) testing.expect.check( ['An error occurred while processing an extension hook filter event!', '', 'Filter details:', '', ' Extension: TestExtension hosted by Alice von Testing', ' Filter hook: echo', ' Repository: critic', ' Path: 015-filterhook/include/', ' Data: "this is the data"', '', 'Event details:', '', ' Review: r/%d "First"' % review_id, ' Commits: %s "Explode"' % explode_sha1[:8], '', 'Error details:', '', ' Error: Process returned non-zero exit status 1', ' Output:', '', " Failed to call 'filterhook.js::filterhook()':", ' Error: Boom!', ' Error: Boom!', ' at filterhook.js:9:15', ' at filterhook (filterhook.js:6:9)', '', '-- critic'], to_alice.lines) mailbox.check_empty() with frontend.signin("bob"): frontend.operation( "deleteextensionhookfilter", data={ "subject": "bob", "filter_id": filter_id }, expect={ "status": "failure", "message": ("Filter to delete does not exist " "or belongs to another user!") }) frontend.operation( "deleteextensionhookfilter", data={ "subject": "alice", "filter_id": filter_id }, expect={ "status": "failure", "message": ("Operation not permitted, user " "that lacks role 'administrator'.") }) with frontend.signin("admin"): frontend.operation( "deleteextensionhookfilter", data={ "subject": "alice", "filter_id": filter_id }) result = frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/TestExtension", "repository": "critic", "filterhook_name": "echo", "path": "015-filterhook/include/", "data": "this is the data" }) filter_id = result["filter_id"] with frontend.signin("alice"): result = frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/TestExtension", "repository": "critic", "filterhook_name": "echo", "path": "015-filterhook/include/", "data": "this is the data", "replaced_filter_id": filter_id }) filter_id = result["filter_id"] frontend.operation( "deleteextensionhookfilter", data={ "subject": "alice", "filter_id": filter_id }) frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/TestExtension", "repository": "critic", "filterhook_name": "explode", "path": "/" }, expect={ "status": "failure", "code": "invalidrequest", "message": ("The extension doesn't have a filter " "hook role named 'explode'!") }) frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/WrongExtension", "repository": "critic", "filterhook_name": "echo", "path": "/" }, expect={ "status": "failure", "code": "invalidextension", "message": ("Invalid or inaccessible extension dir: " "/home/alice/CriticExtensions/WrongExtension") }) frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": 4711, "repository": "critic", "filterhook_name": "echo", "path": "/" }, expect={ "status": "failure", "code": "invalidextension", "message": "Invalid extension id: 4711" }) frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) frontend.operation( "addextensionhookfilter", data={ "subject": "alice", "extension": "alice/TestExtension", "repository": "critic", "filterhook_name": "echo", "path": "/" }, expect={ "status": "failure", "code": "invalidrequest", "message": ("The extension \"TestExtension hosted by Alice " "von Testing\" must be installed first!") }) frontend.operation( "installextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/016-accesscontrol.py ================================================ # Create an access token, and restrict it to not allow execution of # Alice's TestExtension. with frontend.signin("alice"): access_token = frontend.json( "users/me/accesstokens", post={ "title": "token for 016-accesscontrol.py" }) extension = frontend.json( "extensions", params={ "key": "alice/TestExtension" }) frontend.json( ("users/me/accesstokens/%d/profile/extensions/exceptions" % access_token["id"]), post={ "access_type": "execute", "extension": "alice/TestExtension" }, expect={ "profile/extensions/exceptions": [{ "id": int, "access_type": "execute", "extension": extension["id"] }] }) with frontend.signin(access_token=access_token): # Trying to execute an extension role should give a "403 Forbidden". frontend.page( "echo", expected_http_status=403) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/004-TestExtension/999-missing.py ================================================ document_title = testing.expect.document_title def check_version(expected): def check(actual): testing.expect.check(expected, actual.strip()) return check with frontend.signin("alice"): # Check that alice still has the LIVE version installed. frontend.page( "version", expected_content_type="text/plain", expect={ "version": check_version("live") }) instance.execute(["sudo", "rm", "-rf", "~alice/CriticExtensions"]) with frontend.signin("alice"): # Check that the extension is ignored, and that /version just returns 404. frontend.page( "version", expected_http_status=404) # Check that /home, where the extension injects things, loads as expected. frontend.page( "home", expect={ "title": document_title(u"Alice von Testing's Home") }) # Check that /manageextensions also loads as expected. frontend.page( "manageextensions", expect={ "title": document_title(u"Manage Extensions") }) # ... even with what=installed. frontend.page( "manageextensions", params={ "what": "installed" }, expect={ "title": document_title(u"Manage Extensions") }) # Check that the extension can be uninstalled. frontend.operation( "uninstallextension", data={ "author_name": "alice", "extension_name": "TestExtension" }) # Check that /manageextensions still loads. frontend.page( "manageextensions", params={ "what": "installed" }, expect={ "title": document_title(u"Manage Extensions") }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/005-install-SystemExtension.py ================================================ def check_extension(installed): def check(document): tr_items = document.findAll("tr", attrs={ "class": "item" }) for tr_item in tr_items: td_name = tr_item.find("td", attrs={ "class": "name" }) testing.expect.check("Extension:", td_name.string) td_value = tr_item.find("td", attrs={ "class": "value" }) span_name = td_value.find("span", attrs={ "class": "name" }) if span_name.contents[0].string != "SystemExtension": # Wrong extension. continue testing.expect.check(1, len(span_name.contents)) span_installed = td_value.find("span", attrs={ "class": "installed" }) if installed: testing.expect.check(" [installed]", span_installed.string) elif span_installed: testing.expect.check("<no installed indicator>", "<found installed indicator>") return else: testing.expect.check("<SystemExtension entry>", "<expected content not found>") return check def check_helloworld(document): testing.expect.check("Hello world!\n", document) try: instance.execute( ["sudo", "mkdir", "/var/lib/critic/extensions", "&&", "sudo", "cp", "-R", "~/critic/testing/input/SystemExtension", "/var/lib/critic/extensions", "&&", "sudo", "chown", "-R", "critic.critic", "/var/lib/critic/extensions", "&&", "sudo", "chmod", "-R", "u+rwX,go+rX", "/var/lib/critic/extensions"]) except testing.InstanceError as error: raise testing.TestFailure(error.message) with frontend.signin("alice"): frontend.page( "manageextensions", expect={ "system_extension": check_extension(installed=False) }) frontend.operation( "installextension", data={ "extension_name": "SystemExtension" }) frontend.page( "manageextensions", expect={ "system_extension": check_extension(installed=True) }) frontend.operation( "check", data={}) frontend.page( "extension-resource/SystemExtension/HelloWorld.txt", expected_content_type="text/plain", expect={ "hello_world": check_helloworld }) frontend.page( "manageextensions", expect={ "system_extension": check_extension(False) }) ================================================ FILE: testing/tests/001-main/004-extensions/002-tests/006-manifest-checks.py ================================================ import os import tempfile instance.execute( ["mkdir", "-p", "~/CriticExtensions/InvalidExtension", "&&", "chmod", "u+rwX,go+rX", "~/CriticExtensions"], as_user="alice") EXTENSION_PATH = "/home/alice/CriticExtensions/InvalidExtension" MANIFEST_PATH = os.path.join(EXTENSION_PATH, "MANIFEST") class TransferredFile(object): def __init__(self, target_name, source): self.target_path = os.path.join(EXTENSION_PATH, target_name) self.source = source def __enter__(self, *args): source_file = tempfile.NamedTemporaryFile() source_file.write(self.source) source_file.flush() instance.copyto(source_file.name, self.target_path, as_user="alice") instance.execute(["chmod", "g+r", self.target_path], as_user="alice") source_file.close() return self def __exit__(self, *args): instance.execute(["rm", "-f", self.target_path], as_user="alice") def error_message(linenr, message): expected = "%s:%d: manifest error: %s" % (MANIFEST_PATH, linenr, message) def check(actual): testing.expect.check(expected, actual) return check def injected_script(document): scripts = document.findAll("script") expected = "<injected script>" actual = "<expected content not found>" for script in scripts: if script["src"] == "injected": actual = expected testing.expect.check(expected, actual) script_js = TransferredFile("script.js", """\ function page(method, path, query) { writeln("200"); writeln("Content-Type: text/json"); writeln(); writeln("%r", { status: 'ok', method: method, path: path, query: query }); } function inject(path, query) { writeln("script %r", "injected"); } """) with frontend.signin("alice"): with TransferredFile("MANIFEST", """\ Author = Alice von Testing <alice@example.org> Description = Extension with invalid MANIFEST [Page /foo] Description = Page role with invalid pattern Script = script.js Function = page """): frontend.page( "loadmanifest", params={ "key": "alice/InvalidExtension" }, expected_content_type="text/plain", expect={ "error_message": error_message(4, "path pattern should not start with a '/'") }) with script_js, TransferredFile("MANIFEST", """\ Author = Alice von Testing <alice@example.org> Description = Extension with soon to be missing MANIFEST [Page foo] Description = Dummy page role Script = script.js Function = page [Inject tutorial] Description = Dummy page role Script = script.js Function = inject """): frontend.operation( "installextension", data={ "extension_name": "InvalidExtension", "author_name": "alice" }) frontend.operation( "foo", data={}, expect={ "method": "POST", "path": "foo" }) frontend.page( "tutorial", expect={ "injected_script": injected_script }) frontend.page( "foo", expected_http_status=404) frontend.page("tutorial") frontend.operation( "uninstallextension", data={ "extension_name": "InvalidExtension", "author_name": "alice" }) with TransferredFile("MANIFEST", """\ Author = Alice von Testing <alice@example.org> Description = Soon to be inaccessible extension [Page foo] Description = Dummy page role Script = script.js Function = page """): frontend.operation( "installextension", data={ "extension_name": "InvalidExtension", "author_name": "alice" }) instance.execute( ["chmod", "go-rx", "~/CriticExtensions/InvalidExtension"], as_user="alice") frontend.page( "foo", expected_http_status=404) frontend.operation( "uninstallextension", data={ "extension_name": "InvalidExtension", "author_name": "alice" }) ================================================ FILE: testing/tests/001-main/004-extensions/__init__.py ================================================ # @flag extensions ================================================ FILE: testing/tests/001-main/005-unittests/001-local/001-independence.py ================================================ # These tests simply check that some modules can be imported. instance.unittest("base", ["independence"]) instance.unittest("dbutils", ["independence"]) instance.unittest("textutils", ["independence"]) instance.unittest("htmlutils", ["independence"]) instance.unittest("operation", ["independence"]) instance.unittest("extensions", ["independence"]) ================================================ FILE: testing/tests/001-main/005-unittests/001-local/002-operation.py ================================================ # @dependency 001-main/005-unittests/001-local/001-independence.py # @flag local # These tests simply check that some modules can be imported. instance.unittest("operation.basictypes", ["basic"]) instance.unittest("operation.typechecker", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/001-local/005-dbutils.database.py ================================================ instance.unittest("dbutils.database", ["analyzeQuery"]) ================================================ FILE: testing/tests/001-main/005-unittests/001-local/__init__.py ================================================ # @dependency none # @flag local ================================================ FILE: testing/tests/001-main/005-unittests/002-api/001-commit.py ================================================ # @dependency 001-main/002-createrepository.py instance.unittest("api.commit", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/002-review.py ================================================ # @dependency 001-main/003-self/004-createreview.py instance.unittest("api.review", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/003-user.py ================================================ # @dependency 001-main/001-empty/003-criticctl/002-adduser-deluser.py # @dependency 001-main/001-empty/004-mixed/003-oauth.py # @dependency 001-main/001-empty/004-mixed/004-password.py # @dependency 001-main/003-self/028-gitemails.py args = [] if not testing.has_flag(instance.install_commit, "reliable-git-emails"): args.append("--unreliable-git-emails") if not testing.has_flag(instance.install_commit, "reliable-admin-newswriter"): args.append("--unreliable-admin-newswriter") instance.unittest("api.user", ["basic"], args) settings_per_user = testing.utils.settings( "alice", { "commit.diff.visualTabs": True }) settings_per_repository = testing.utils.settings( "alice", { "commit.expandAllFiles": True }, repository="critic") with settings_per_user, settings_per_repository: instance.unittest("api.user", ["preferences"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/004-config.py ================================================ # @dependency 001-main/000-install.py instance.unittest("api.config", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/005-log.partition.py ================================================ # @dependency 001-main/003-self/020-reviewrebase.py instance.unittest("api.log.partition", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/006-log.rebase.py ================================================ # @dependency 001-main/003-self/012-replayrebase.py # @dependency 001-main/003-self/020-reviewrebase.py instance.unittest("api.log.rebase", ["basic"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/007-repository.py ================================================ # @dependency 001-main/003-self/028-gitemails.py HEAD = repository.run(["rev-parse", "HEAD"]).strip() SHA1 = "66f25ae79dcc5e200b136388771b5924a1b5ae56" with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") work.run(["tag", "007-repository/simple-tag"]) work.run(["push", REMOTE_URL, "007-repository/simple-tag"]) work.run(["tag", "-mAnnotated", "007-repository/annotated-tag"]) work.run(["push", REMOTE_URL, "007-repository/annotated-tag"]) try: instance.unittest("api.repository", ["basic"], args=["--head=" + HEAD, "--sha1=" + SHA1, "--path=" + instance.repository_path()]) finally: work.run(["push", REMOTE_URL, ":007-repository/simple-tag", ":007-repository/annotated-tag"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/008-branch.py ================================================ # @dependency 001-main/002-createrepository.py SHA1 = "66f25ae79dcc5e200b136388771b5924a1b5ae56" with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") work.run(["checkout", "-b", "008-branch", SHA1]) work.run(["rebase", "--force-rebase", "HEAD~5"]) work.run(["push", REMOTE_URL, "008-branch"]) sha1 = work.run(["rev-parse", "HEAD"]).strip() try: instance.unittest("api.branch", ["basic"], args=["--sha1=" + sha1, "--name=008-branch"]) finally: work.run(["push", REMOTE_URL, ":008-branch"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/009-commitset.py ================================================ # @dependency 001-main/002-createrepository.py import os import time with repository.workcopy() as work: REMOTE_URL = instance.repository_url("alice") os.mkdir(os.path.join(work.path, "009-commitset")) # Generate this set of commits: # # (X) # | # A # / \ # C B (Y) # | |\ / # D | G # \ / | # E H # | |\ # F K \ # \ / I # M | # | J # N # | # L # # Commits are named in (committer date) chronological order; A is oldest, M # is youngest. X and Y are the tails of the set. commits = {} timestamp = int(time.time()) - 3600 def commit(letter, delta=0): global timestamp filename = os.path.join(work.path, "009-commitset", letter) with open(filename, "w") as file: print >>file, letter work.run(["add", os.path.join("009-commitset", letter)]) work.run(["commit", "-m" + letter], GIT_COMMITTER_DATE="%d +0000" % (timestamp + delta)) timestamp += 10 commits[letter] = work.run(["rev-parse", "HEAD"]).strip() def merge(letter, what, delta=0): global timestamp work.run(["merge", "-m" + letter, what], GIT_COMMITTER_DATE="%d +0000" % (timestamp + delta)) timestamp += 10 commits[letter] = work.run(["rev-parse", "HEAD"]).strip() work.run(["checkout", "-b", "X"]) commit("X") work.run(["checkout", "-b", "Y"]) commit("Y") work.run(["checkout", "-b", "ACD", "X"]) commit("A") work.run(["checkout", "-b", "B"]) commit("B") work.run(["checkout", "ACD"]) commit("C") commit("D") work.run(["checkout", "-b", "EF"]) merge("E", "B") commit("F") work.run(["checkout", "-b", "GHK", commits["B"]]) merge("G", "Y") commit("H") work.run(["checkout", "-b", "IJ"]) commit("I") commit("J") work.run(["checkout", "GHK"]) commit("K") work.run(["checkout", "-b", "MNL", commits["F"]]) merge("M", "GHK", delta=10) commit("N", delta=10) commit("L", delta=-20) work.run(["push", REMOTE_URL] + ["%s:refs/heads/009-commitset/%s" % (sha1, letter) for letter, sha1 in commits.items()]) try: instance.unittest("api.commitset", ["basic"], args=["--prefix=009-commitset/"]) finally: work.run(["push", REMOTE_URL] + [":refs/heads/009-commitset/%s" % letter for letter in commits.keys()]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/010-comment.py ================================================ # @dependency 001-main/003-self/100-reviewing/001-comments.basic.py args = ["--review=r/100-reviewing/001-comment.basic"] instance.unittest("api.comment", ["basic"], args) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/011-reply.py ================================================ # @dependency 001-main/003-self/100-reviewing/001-comments.basic.py args = ["--review=r/100-reviewing/001-comment.basic"] instance.unittest("api.reply", ["basic"], args) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/012-changeset.py ================================================ instance.unittest("api.changeset", ["pre"]) instance.synchronize_service("changeset") # wait for changeset creation to finish instance.unittest("api.changeset", ["post"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/013-filechange.py ================================================ instance.unittest("api.filechange", ["pre"]) instance.synchronize_service("changeset") # wait for changeset creation to finish instance.unittest("api.filechange", ["post"]) ================================================ FILE: testing/tests/001-main/005-unittests/002-api/014-filediff.py ================================================ instance.unittest("api.filediff", ["pre1"]) instance.synchronize_service("changeset") # wait for changeset creation to finish instance.unittest("api.filediff", ["pre2"]) instance.synchronize_service("highlight") # wait for syntax highlighting to finish instance.unittest("api.filediff", ["post"]) ================================================ FILE: testing/tests/001-main/005-unittests/003-other/001-dbutils.database.py ================================================ instance.unittest("dbutils.database", ["cursors"]) ================================================ FILE: testing/tests/001-main/005-unittests/003-other/__init__.py ================================================ # @dependency 001-main/000-install.py ================================================ FILE: testing/tests/001-main/900-uninstall-reinstall.py ================================================ # @flag uninstall # Uninstall Critic. instance.uninstall() # Delete the repository clone (the install() call recreates it.) instance.execute(["rm", "-rf", "critic"]) # Install (and upgrade, optionally) Critic with the default arguments. instance.install(repository, other_cwd=True) instance.upgrade() ================================================ FILE: testing/tools/__init__.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ================================================ FILE: testing/tools/install.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import argparse import subprocess import testing def main(): parser = argparse.ArgumentParser( description="Critic testing framework: Quick install utility") parser.add_argument("--debug", help="Enable DEBUG level logging", action="store_true") parser.add_argument("--quiet", help="Disable INFO level logging", action="store_true") parser.add_argument("--commit", default="HEAD", help="Commit (symbolic ref or SHA-1) to test [default=HEAD]") parser.add_argument("--upgrade-from", help="Commit (symbolic ref or SHA-1) to install first and upgrade from") parser.add_argument("--vm-identifier", required=True, help="VirtualBox instance name or UUID") parser.add_argument("--vm-hostname", help="VirtualBox instance hostname [default=VM_IDENTIFIER]") parser.add_argument("--vm-snapshot", default="clean", help="VirtualBox snapshot (name or UUID) to upgrade [default=clean]") parser.add_argument("--vm-ssh-port", type=int, default=22, help="VirtualBox instance SSH port [default=22]") parser.add_argument("--git-daemon-port", type=int, help="Port to tell 'git daemon' to bind to") parser.add_argument("--interactive", "-i", action="store_true", help="Install interactively (without arguments)") arguments = parser.parse_args() logger = testing.configureLogging(arguments) logger.info("Critic testing framework: Quick install") tested_commit = subprocess.check_output( ["git", "rev-parse", "--verify", arguments.commit]).strip() if arguments.upgrade_from: install_commit = subprocess.check_output( ["git", "rev-parse", "--verify", arguments.upgrade_from]).strip() upgrade_commit = tested_commit else: install_commit = tested_commit upgrade_commit = None install_commit_description = subprocess.check_output( ["git", "log", "--oneline", "-1", install_commit]).strip() if upgrade_commit: upgrade_commit_description = subprocess.check_output( ["git", "log", "--oneline", "-1", upgrade_commit]).strip() else: upgrade_commit_description = None instance = testing.virtualbox.Instance( arguments, install_commit=(install_commit, install_commit_description), upgrade_commit=(upgrade_commit, upgrade_commit_description)) repository = testing.repository.Repository( arguments.git_daemon_port, install_commit, arguments.vm_hostname) mailbox = testing.mailbox.Mailbox() with repository, mailbox, instance: if not repository.export(): return instance.mailbox = mailbox instance.start() if arguments.interactive: print """ Note: To use the simple SMTP server built into the Critic testing framework, enter "host" as the SMTP host and "%d" as the SMTP port. Also note: The administrator user's password will be "testing" (password input doesn't work over this channel.)""" % mailbox.port instance.install(repository, quick=True, interactive=arguments.interactive) instance.upgrade(interactive=arguments.interactive) testing.pause("Press ENTER to stop VM: ") try: while True: mail = mailbox.pop() logger.info("Mail to <%s>:\n%s" % (mail.recipient, mail)) except testing.mailbox.MissingMail: pass if __name__ == "__main__": main() ================================================ FILE: testing/tools/upgrade.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import argparse import time import testing def main(): parser = argparse.ArgumentParser( description="Critic testing framework: instance upgrade utility") parser.add_argument("--debug", help="Enable DEBUG level logging", action="store_true") parser.add_argument("--quiet", help="Disable INFO level logging", action="store_true") parser.add_argument("--vm-identifier", help="VirtualBox instance name or UUID", required=True) parser.add_argument("--vm-hostname", help="VirtualBox instance hostname [default=VM_IDENTIFIER]") parser.add_argument("--vm-snapshot", help="VirtualBox snapshot (name or UUID) to upgrade", default="clean") parser.add_argument("--vm-ssh-port", help="VirtualBox instance SSH port [default=22]", type=int, default=22) parser.add_argument("--pause-before-upgrade", help="Pause before upgrading", action="store_true") parser.add_argument("--pause-after-upgrade", help="Pause after upgrading", action="store_true") parser.add_argument("--no-upgrade", action="store_true", help="Do not upgrade installed packages") parser.add_argument("--install", action="append", help="Install named package") parser.add_argument("--custom", action="store_true", help="Stop for custom maintenance, and always retake snapshot") parser.add_argument("--reboot", action="store_true", help="Reboot VM before retaking snapshot") arguments = parser.parse_args() logger = testing.configureLogging(arguments) logger.info("Critic Testing Framework: Instance Upgrade") instance = testing.virtualbox.Instance(arguments) with instance: instance.start() if not arguments.no_upgrade: logger.debug("Upgrading guest OS ...") update_output = instance.execute( ["sudo", "DEBIAN_FRONTEND=noninteractive", "apt-get", "-q", "-y", "update"]) logger.debug("Output from 'apt-get -q -y update':\n" + update_output) upgrade_output = instance.execute( ["sudo", "DEBIAN_FRONTEND=noninteractive", "apt-get", "-q", "-y", "upgrade"]) logger.debug("Output from 'apt-get -q -y upgrade':\n" + upgrade_output) retake_snapshot = False if "The following packages will be upgraded:" in upgrade_output.splitlines(): retake_snapshot = True if arguments.install: install_output = instance.execute( ["sudo", "DEBIAN_FRONTEND=noninteractive", "apt-get", "-q", "-y", "install"] + arguments.install) logger.debug("Output from 'apt-get -q -y install':\n" + install_output) retake_snapshot = True if arguments.custom: testing.pause() retake_snapshot = True if retake_snapshot: if arguments.reboot: instance.execute(["sudo", "reboot"]) logger.debug("Sleeping 10 seconds ...") time.sleep(10) instance.wait() logger.debug("Sleeping 10 seconds ...") time.sleep(10) logger.info("Rebooted VM: %s" % arguments.vm_identifier) logger.info("Upgraded guest OS") logger.debug("Retaking snapshot ...") instance.retake_snapshot(arguments.vm_snapshot) logger.info("Snapshot '%s' upgraded!" % arguments.vm_snapshot) else: logger.info("No packages upgraded in guest OS") if __name__ == "__main__": main() ================================================ FILE: testing/utils.py ================================================ import re import contextlib instance = None frontend = None RE_REVIEW_URL = re.compile(r"^remote:\s+http://.*/r/(\d+)\s*$") @contextlib.contextmanager def settings(user, settings, repository=None): data = { "settings": [{ "item": item, "value": value } for item, value in settings.items()] } if repository: data["repository"] = repository # Set requested settings. with frontend.signin(user): frontend.operation("savesettings", data=data) try: yield finally: data = { "settings": [{ "item": item } for item, value in settings.items()] } if repository: data["repository"] = repository # Reset settings back to the default. with frontend.signin(user): frontend.operation("savesettings", data=data) @contextlib.contextmanager def access_token(user, profile): with frontend.signin(user): access_token = frontend.json( "users/me/accesstokens", post={ "title": "by testing.utils.access_token()", "profile": profile }, expect={ "id": int, "access_type": "user", "user": instance.userid(user), "title": "by testing.utils.access_token()", "part1": str, "part2": str, "profile": dict }) try: yield access_token finally: with frontend.signin(user): frontend.json( "accesstokens/%d" % access_token["id"], delete=True, expected_http_status=204) def createReviewViaPush(work, owner, commit="HEAD"): with settings(owner, { "review.createViaPush": True }): remote_url = instance.repository_url(owner) output = work.run(["push", remote_url, "HEAD"], TERM="dumb") for line in output.splitlines(): match = RE_REVIEW_URL.match(line) if match: return int(match.group(1)) else: testing.expect.check("<review URL in 'git push' output>", "<no review URL found>") ================================================ FILE: testing/virtualbox.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2013 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import sys import os import subprocess import time import fcntl import select import errno import datetime import signal import json import testing # Directory (on guest system) to store coverage data in. COVERAGE_DIR = "/var/tmp/critic/coverage" def setnonblocking(fd): fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) class HostCommandError(testing.CommandError): pass class GuestCommandError(testing.CommandError): pass class Instance(testing.Instance): def __init__(self, arguments, install_commit=None, upgrade_commit=None, frontend=None): super(Instance, self).__init__() self.arguments = arguments self.vboxhost = getattr(arguments, "vbox_host", "host") self.identifier = arguments.vm_identifier self.snapshot = arguments.vm_snapshot self.hostname = arguments.vm_hostname or self.identifier self.ssh_port = arguments.vm_ssh_port self.test_extensions = arguments.test_extensions if install_commit: self.install_commit, self.install_commit_description = install_commit self.tested_commit = self.install_commit if upgrade_commit: self.upgrade_commit, self.upgrade_commit_description = upgrade_commit if self.upgrade_commit: self.tested_commit = self.upgrade_commit self.frontend = frontend self.strict_fs_permissions = getattr(arguments, "strict_fs_permissions", False) self.coverage = getattr(arguments, "coverage", False) self.mailbox = None self.etc_dir = "/etc/critic" # Check that the identified VM actually exists: output = subprocess.check_output( ["VBoxManage", "list", "vms"], stderr=subprocess.STDOUT) if not self.__isincluded(output): raise testing.Error("Invalid VM identifier: %s" % self.identifier) # Check that the identified snapshot actually exists (and that there # aren't multiple snapshots with the same name): count = self.count_snapshots(self.snapshot) if count == 0: raise testing.Error("Invalid VM snapshot: %s (not found)" % self.snapshot) elif count > 1: raise testing.Error("Invalid VM snapshot: %s (matches multiple snapshots)" % self.snapshot) # Check that the VM isn't running: state = self.state() if state != "poweroff": raise testing.Error("Invalid VM state: %s (expected 'poweroff')" % state) self.__reset() def __reset(self): self.__started = False self.__installed = False self.__upgraded = False self.resetusers() self.registeruser("admin") def __enter__(self): return self def __exit__(self, *args): if self.__started: self.stop() self.__reset() return False def __vmcommand(self, command, *arguments): argv = ["VBoxManage", command, self.identifier] + list(arguments) try: testing.logger.debug("Running: " + " ".join(argv)) return subprocess.check_output(argv, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as error: raise HostCommandError(argv, error.output) def __isincluded(self, output): name = '"%s"' % self.identifier uuid = '{%s}' % self.identifier for line in output.splitlines(): if name in line or uuid in line: return True else: return False def isrunning(self): output = subprocess.check_output( ["VBoxManage", "list", "runningvms"], stderr=subprocess.STDOUT) return self.__isincluded(output) def state(self): output = self.__vmcommand("showvminfo", "--machinereadable") for line in output.splitlines(): if line.startswith("VMState="): return eval(line[len("VMState="):]) return "<not found>" def count_snapshots(self, identifier): try: output = subprocess.check_output( ["VBoxManage", "snapshot", self.identifier, "list"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: # Assuming we've already checked that 'self.identifier' is a valid # VM identifier, the most likely cause of this failure is that the # VM has no snapshots. return 0 else: name = "Name: %s (" % identifier uuid = "(UUID: %s)" % identifier count = 0 for line in output.splitlines(): if name in line or uuid in line: count += 1 return count def wait(self): testing.logger.debug("Waiting for VM to come online ...") while True: try: self.execute(["true"], timeout=1) except GuestCommandError: time.sleep(0.5) else: break def start(self): testing.logger.debug("Starting VM: %s ..." % self.identifier) self.__vmcommand("snapshot", "restore", self.snapshot) self.__vmcommand("startvm", "--type", "headless") self.__started = True self.wait() # Set the guest system's clock to match the host system's. Since we're # restoring the same snapshot over and over, the guest system's clock is # probably quite far from the truth. now = datetime.datetime.utcnow().strftime("%m%d%H%M%Y.%S") self.execute(["sudo", "date", "--utc", now]) testing.logger.info("Started VM: %s" % self.identifier) def stop(self): testing.logger.debug("Stopping VM: %s ..." % self.identifier) self.__vmcommand("controlvm", "poweroff") while self.state() != "poweroff": time.sleep(0.1) # It appears the VirtualBox "session" can be locked for a while after # the "controlvm poweroff" command, and possibly after the VM state # changes to "poweroff", so sleep a little longer to avoid problems. time.sleep(0.5) testing.logger.info("Stopped VM: %s" % self.identifier) def retake_snapshot(self, name): index = 1 while True: temporary_name = "%s-%d" % (name, index) if self.count_snapshots(temporary_name) == 0: break index += 1 self.__vmcommand("snapshot", "take", temporary_name, "--pause") self.__vmcommand("snapshot", "delete", name) self.__vmcommand("snapshot", "edit", temporary_name, "--name", name) def execute(self, argv, cwd=None, timeout=None, interactive=False, as_user=None, log_stdout=True, log_stderr=True): guest_argv = list(argv) if cwd is not None: guest_argv[:0] = ["cd", cwd, "&&"] host_argv = ["ssh"] if self.ssh_port != 22: host_argv.extend(["-p", str(self.ssh_port)]) if timeout is not None: host_argv.extend(["-o", "ConnectTimeout=%d" % timeout]) if not interactive: host_argv.append("-n") if as_user is not None: host_argv.extend(["-l", as_user]) host_argv.append(self.hostname) testing.logger.debug("Running: " + " ".join(host_argv + guest_argv)) process = subprocess.Popen( host_argv + guest_argv, stdout=subprocess.PIPE if not interactive else None, stderr=subprocess.PIPE if not interactive else None) class BufferedLineReader(object): def __init__(self, source): self.source = source self.buffer = "" def readline(self): try: while self.source is not None: try: line, self.buffer = self.buffer.split("\n", 1) except ValueError: pass else: return line + "\n" data = self.source.read(1024) if not data: self.source = None break self.buffer += data line = self.buffer self.buffer = "" return line except IOError as error: if error.errno == errno.EAGAIN: return None raise stdout_data = "" stdout_reader = BufferedLineReader(process.stdout) stderr_data = "" stderr_reader = BufferedLineReader(process.stderr) if not interactive: setnonblocking(process.stdout) setnonblocking(process.stderr) poll = select.poll() poll.register(process.stdout) poll.register(process.stderr) stdout_done = False stderr_done = False while not (stdout_done and stderr_done): poll.poll() while not stdout_done: line = stdout_reader.readline() if line is None: break elif not line: poll.unregister(process.stdout) stdout_done = True break else: stdout_data += line if log_stdout: testing.logger.log(testing.STDOUT, line.rstrip("\n")) while not stderr_done: line = stderr_reader.readline() if line is None: break elif not line: poll.unregister(process.stderr) stderr_done = True break else: stderr_data += line if log_stderr: testing.logger.log(testing.STDERR, line.rstrip("\n")) process.wait() if process.returncode != 0: raise GuestCommandError(argv, stdout_data, stderr_data) return stdout_data def copyto(self, source, target, as_user=None): target = "%s:%s" % (self.hostname, target) if as_user: target = "%s@%s" % (as_user, target) argv = ["scp", "-q", "-P", str(self.ssh_port), source, target] try: testing.logger.debug("Running: " + " ".join(argv)) return subprocess.check_output(argv, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as error: raise GuestCommandError(argv, error.output) def copyfrom(self, source, target, as_user=None): source = "%s:%s" % (self.hostname, source) if as_user: source = "%s@%s" % (as_user, source) argv = ["scp", "-q", "-P", str(self.ssh_port), source, target] try: testing.logger.debug("Running: " + " ".join(argv)) return subprocess.check_output(argv, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as error: raise GuestCommandError(argv, error.output) def criticctl(self, argv): try: return self.execute(["sudo", "criticctl"] + argv) except GuestCommandError as error: raise testing.CriticctlError(error.command, error.stdout, error.stderr) def adduser(self, name, email=None, fullname=None, password=None): if email is None: email = "%s@example.org" % name if fullname is None: fullname = "%s von Testing" % name.capitalize() if password is None: password = "testing" self.execute([ "sudo", "criticctl", "adduser", "--name", name, "--email", email, "--fullname", "'%s'" % fullname, "--password", password, "&&", "sudo", "adduser", "--ingroup", "critic", "--disabled-password", "--gecos", "''", name]) # Running all commands with a single self.execute() call is just an # optimization; SSH sessions are fairly expensive to start. self.execute([ "sudo", "mkdir", ".ssh", "&&", "sudo", "cp", "$HOME/.ssh/authorized_keys", ".ssh/", "&&", "sudo", "chown", "-R", name, ".ssh/", "&&", "sudo", "-H", "-u", name, "git", "config", "--global", "user.name", "'%s'" % fullname, "&&", "sudo", "-H", "-u", name, "git", "config", "--global", "user.email", email], cwd="/home/%s" % name) self.registeruser(name) def has_flag(self, flag): if self.upgrade_commit and self.__upgraded: check_commit = self.upgrade_commit else: check_commit = self.install_commit return testing.has_flag(check_commit, flag) def repository_path(self, repository="critic"): return "/var/git/%s.git" % repository def repository_url(self, name=None, repository="critic"): if name is None: user_prefix = "" else: user_prefix = name + "@" return "%s%s:/var/git/%s.git" % (user_prefix, self.hostname, repository) def restrict_access(self): if not self.strict_fs_permissions: return # Set restrictive access bits on home directory of the installing user # and of root, to make sure that no part of Critic's installation # process, or the background processes started by it, depend on being # able to access them as the Critic system user. self.execute(["sudo", "chmod", "-R", "go-rwx", "$HOME", "/root"]) # Running install.py may have left files owned by root in $HOME. The # command above will have made them inaccessible for sure, so change # the ownership back to us. self.execute(["sudo", "chown", "-R", "$LOGNAME", "$HOME"]) def install(self, repository, override_arguments={}, other_cwd=False, quick=False, interactive=False): testing.logger.debug("Installing Critic ...") if not interactive: use_arguments = { "--headless": True, "--system-hostname": self.hostname, "--auth-mode": "critic", "--session-type": "cookie", "--admin-username": "admin", "--admin-email": "admin@example.org", "--admin-fullname": "'Testing Administrator'", "--admin-password": "testing", "--smtp-host": self.vboxhost, "--smtp-port": str(self.mailbox.port), "--smtp-no-ssl-tls": True, "--skip-testmail-check": True } if self.mailbox.credentials: use_arguments["--smtp-username"] = self.mailbox.credentials["username"] use_arguments["--smtp-password"] = self.mailbox.credentials["password"] if self.coverage: use_arguments["--coverage-dir"] = COVERAGE_DIR if self.has_flag("system-recipients"): use_arguments["--system-recipient"] = "system@example.org" else: use_arguments = { "--admin-password": "testing" } if self.has_flag("minimum-password-hash-time"): use_arguments["--minimum-password-hash-time"] = "0.01" if self.has_flag("is-testing"): use_arguments["--is-testing"] = True if self.has_flag("web-server-integration") and self.arguments.vm_web_server: use_arguments["--web-server-integration"] = self.arguments.vm_web_server for name, value in override_arguments.items(): if value is None: if name in use_arguments: del use_arguments[name] else: use_arguments[name] = value arguments = [] for name, value in use_arguments.items(): arguments.append(name) if value is not True: arguments.append(value) # First install (if necessary) Git. try: self.execute(["git", "--version"]) except GuestCommandError: testing.logger.debug("Installing Git ...") self.execute(["sudo", "DEBIAN_FRONTEND=noninteractive", "apt-get", "-qq", "update"]) self.execute(["sudo", "DEBIAN_FRONTEND=noninteractive", "apt-get", "-qq", "-y", "install", "git-core"]) testing.logger.info("Installed Git: %s" % self.execute(["git", "--version"]).strip()) self.execute(["git", "clone", repository.url, "critic"]) self.execute(["git", "fetch", "--quiet", "&&", "git", "checkout", "--quiet", self.install_commit], cwd="critic") if self.upgrade_commit: output = subprocess.check_output( ["git", "log", "--oneline", self.install_commit, "--", "background/servicemanager.py"]) for line in output.splitlines(): sha1, subject = line.split(" ", 1) if subject == "Make sure background services run with correct $HOME": self.restrict_access() break else: self.restrict_access() if other_cwd and self.has_flag("pwd-independence"): install_py = "critic/install.py" cwd = None else: install_py = "install.py" cwd = "critic" self.execute( ["sudo", "python", "-u", install_py] + arguments, cwd=cwd, interactive="--headless" not in use_arguments) if not quick: try: testmail = self.mailbox.pop( testing.mailbox.WithSubject("Test email from Critic")) if not testmail: testing.expect.check("<test email>", "<no test email received>") else: testing.expect.check("admin@example.org", testmail.header("To")) testing.expect.check("This is the configuration test email from Critic.", "\n".join(testmail.lines)) self.mailbox.check_empty() self.check_service_logs() except testing.TestFailure as error: if error.message: testing.logger.error("Basic test: %s" % error.message) # If basic tests fail, there's no reason to further test this # instance; it seems to be properly broken. raise testing.InstanceError # Add "developer" role to get stacktraces in error messages. self.execute(["sudo", "criticctl", "addrole", "--name", "admin", "--role", "developer"]) # Add some regular users. for name in ("alice", "bob", "dave", "erin"): self.adduser(name) self.adduser("howard") self.execute(["sudo", "criticctl", "addrole", "--name", "howard", "--role", "newswriter"]) self.current_commit = self.install_commit if not quick: try: self.frontend.run_basic_tests() self.mailbox.check_empty() except testing.TestFailure as error: if error.message: testing.logger.error("Basic test: %s" % error.message) # If basic tests fail, there's no reason to further test this # instance; it seems to be properly broken. raise testing.InstanceError testing.logger.info("Installed Critic: %s" % self.install_commit_description) self.__installed = True def check_upgrade(self): if not self.upgrade_commit: raise testing.NotSupported("--upgrade-from argument not given") def upgrade(self, override_arguments={}, other_cwd=False, quick=False, interactive=False, is_after_test=False): if self.upgrade_commit \ and self.upgrade_commit != self.current_commit \ and (is_after_test or not self.arguments.upgrade_after): testing.logger.debug("Upgrading Critic ...") self.restrict_access() if not interactive: use_arguments = { "--headless": True } else: use_arguments = {} if not self.has_flag("minimum-password-hash-time"): use_arguments["--minimum-password-hash-time"] = "0.01" if not self.has_flag("is-testing"): use_arguments["--is-testing"] = True if not self.has_flag("system-recipients"): use_arguments["--system-recipient"] = "system@example.org" for name, value in override_arguments.items(): if value is None: if name in use_arguments: del use_arguments[name] else: use_arguments[name] = value arguments = [] for name, value in use_arguments.items(): arguments.append(name) if value is not True: arguments.append(value) self.execute(["git", "checkout", self.upgrade_commit], cwd="critic") self.execute(["git", "submodule", "update", "--recursive"], cwd="critic") # Setting this will make has_flag() from now on (including when used # in the rest of this function) check the upgraded-to commit rather # than the initially installed commit. self.__upgraded = True if other_cwd and self.has_flag("pwd-independence"): upgrade_py = "critic/upgrade.py" cwd = None else: upgrade_py = "upgrade.py" cwd = "critic" self.execute(["sudo", "python", "-u", upgrade_py] + arguments, cwd=cwd, interactive="--headless" not in use_arguments) self.current_commit = self.upgrade_commit if not quick: self.frontend.run_basic_tests() testing.logger.info("Upgraded Critic: %s" % self.upgrade_commit_description) def check_extend(self, repository, pre_upgrade=False): commit = self.install_commit if pre_upgrade else self.tested_commit if not testing.exists_at(commit, "extend.py"): raise testing.NotSupported("tested commit lacks extend.py") if not self.arguments.test_extensions: raise testing.NotSupported("--test-extensions argument not given") if not repository.v8_jsshell_path: raise testing.NotSupported("v8-jsshell sub-module not initialized") def extend(self, repository): self.check_extend(repository) testing.logger.debug("Extending Critic ...") def internal(action, extra_argv=None): argv = ["sudo", "python", "-u", "extend.py", "--headless", "--%s" % action] if extra_argv: argv.extend(extra_argv) self.execute(argv, cwd="critic") internal("prereqs", ["--libcurl-flavor=gnutls"]) submodule_path = "installation/externals/v8-jsshell" v8_jsshell_sha1 = testing.repository.submodule_sha1( os.getcwd(), self.current_commit, submodule_path) cached_executable = os.path.join(self.arguments.cache_dir, self.identifier, "v8-jsshell", v8_jsshell_sha1 + "-gnutls") if self.upgrade_commit is not None \ and self.install_commit == self.current_commit \ and self.upgrade_commit != self.current_commit: # We're extending before upgrading. Don't use a cached executable # now if the upgrade changes the sub-module reference, since this # breaks upgrade.py's automatic invocation of extend.py. upgraded_v8_jsshell_sha1 = testing.repository.submodule_sha1( os.getcwd(), self.upgrade_commit, submodule_path) if upgraded_v8_jsshell_sha1 != v8_jsshell_sha1: testing.logger.debug("Caching of v8-jsshell disabled") cached_executable = None if cached_executable and os.path.isfile(cached_executable): self.execute(["mkdir", "installation/externals/v8-jsshell/out"], cwd="critic") self.copyto(cached_executable, "critic/installation/externals/v8-jsshell/out/jsshell") testing.logger.debug("Copied cached v8-jsshell executable to instance") else: if repository.v8_url: extra_argv = ["--with-v8=%s" % repository.v8_url] else: extra_argv = None internal("fetch", extra_argv) # v8_sha1 = subprocess.check_output( # ["git", "ls-tree", "HEAD", "v8"], # cwd="installation/externals/v8-jsshell").split()[2] # cached_v8deps = os.path.join(self.arguments.cache_dir, # "v8-dependencies", # "%s.tar.bz2" % v8_sha1) # if os.path.isfile(cached_v8deps): # self.copyto(cached_v8deps, "v8deps.tar.bz2") # internal("import-v8-dependencies=~/v8deps.tar.bz2") # else: # internal("export-v8-dependencies=~/v8deps.tar.bz2") # if not os.path.isdir(os.path.dirname(cached_v8deps)): # os.makedirs(os.path.dirname(cached_v8deps)) # self.copyfrom("v8deps.tar.bz2", cached_v8deps) internal("build") if cached_executable: if not os.path.isdir(os.path.dirname(cached_executable)): os.makedirs(os.path.dirname(cached_executable)) self.copyfrom("critic/installation/externals/v8-jsshell/out/jsshell", cached_executable) testing.logger.debug("Copied built v8-jsshell executable from instance") internal("install") internal("enable") self.frontend.run_basic_tests() testing.logger.info("Extensions enabled") def restart(self): self.execute(["sudo", "criticctl", "restart"]) def uninstall(self): self.execute( ["sudo", "python", "uninstall.py", "--headless", "--keep-going"], cwd="critic") # Delete the regular users. for name in ("alice", "bob", "dave", "erin"): self.execute(["sudo", "deluser", "--remove-home", name]) self.execute(["sudo", "deluser", "--remove-home", "howard"]) self.__installed = False self.__upgraded = False def finish(self): if not self.__started: return if self.__installed: self.execute(["sudo", "criticctl", "stop"]) if self.coverage: sys.stdout.write(self.execute( ["sudo", "python", "coverage.py", "--coverage-dir", COVERAGE_DIR, "--critic-dir", "/etc/critic/main", "--critic-dir", "/usr/share/critic"], cwd="/usr/share/critic")) # Check that we didn't leave any files owned by root anywhere in the # directory we installed from. self.execute(["chmod", "-R", "a+r", "critic"]) self.execute(["rm", "-r", "critic"]) def run_unittest(self, args): if self.coverage: args = ["--coverage"] + args return self.execute( ["cd", "/usr/share/critic", "&&", "sudo", "-H", "-u", "critic", "PYTHONPATH=/etc/critic/main:/usr/share/critic", "python", "-u", "-m", "run_unittest"] + args, log_stderr=False) def gc(self, repository): self.execute(["git", "gc", "--prune=now"], cwd=os.path.join("/var/git", repository), as_user="alice") def synchronize_service(self, service_name, force_maintenance=False, timeout=30): helper = "testing/input/service_synchronization_helper.py" if not (self.__upgraded or testing.exists_at(self.install_commit, helper)): # We're upgrading from a commit where background services don't # support synchronization, and haven't upgraded yet. Sleep a (long) # while and pray that the service is idle when we wake up. testing.logger.debug("Synchronizing service: %s (sleeping %d seconds)" % (service_name, timeout)) time.sleep(timeout) return testing.logger.debug("Synchronizing service: %s" % service_name) pidfile_path = os.path.join("/var/run/critic/main", service_name + ".pid") if force_maintenance: signum = signal.SIGUSR2 else: signum = signal.SIGUSR1 before = time.time() self.execute( ["sudo", "python", "critic/" + helper, pidfile_path, str(signum), str(timeout)]) after = time.time() testing.logger.debug("Synchronized service: %s in %.2f seconds" % (service_name, after - before)) def filter_service_logs(self, level, service_names): helper = "testing/input/service_log_filter.py" if not (self.__upgraded or testing.exists_at(self.install_commit, helper)): # We're upgrading from a commit where the helper for filtering # service logs isn't supported, and haven't upgraded yet. return logfile_paths = { os.path.join("/var/log/critic/main", service_name + ".log"): service_name for service_name in service_names } try: data = json.loads(self.execute( ["sudo", "python", "critic/" + helper, level] + logfile_paths.keys(), log_stdout=False)) return { logfile_paths[logfile_path]: entries for logfile_path, entries in sorted(data.items()) } except GuestCommandError: return None ================================================ FILE: uninstall.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Martin Olsson # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import grp import sys import argparse import subprocess import installation def enum(*sequential, **named): enums = dict(zip(sequential, range(len(sequential))), **named) return type('Enum', (), enums) ExitStatus = enum('EXIT_SUCCESS', 'MUST_RUN_AS_ROOT', 'INVALID_ETC_DIR', 'UNEXPECTED_ERROR') def check(value): if value.strip() != "deletemydata": return "to continue with uninstall, enter 'deletemydata', to abort uninstall press CTRL-C" def abort_if_no_keep_going_param(arguments, error_msg): if not arguments.keep_going: print error_msg print "Unexpected error encountered. Critic uninstall aborted." print "Re-run with --keep-going to ignore errors." sys.exit(ExitStatus.UNEXPECTED_ERROR) def get_all_configurations(arguments): configurations = [] etc_dir = arguments.etc_dir original_sys_path = list(sys.path) for critic_instance in os.listdir(etc_dir): etc_path = os.path.join(etc_dir, critic_instance) if not os.path.isdir(etc_path): abort_if_no_keep_going_param(arguments, "ERROR: %s is not a directory." % etc_path) sys.path = list(original_sys_path) sys.path.insert(0, etc_path) try: import configuration configurations.append(configuration) except ImportError: abort_if_no_keep_going_param(arguments, "ERROR: Failed to load Critic instance configuration from %s." % etc_path) sys.path = list(original_sys_path) return configurations def run_command(arguments, command_parts): try: subprocess.check_output(command_parts) except: abort_if_no_keep_going_param(arguments, "Error while running command: " + ' '.join(command_parts)) def rmdir_if_empty(directories): for dir in directories: try: os.rmdir(dir) except OSError: pass def main(): parser = argparse.ArgumentParser(description="Critic uninstall script") parser.add_argument("--headless", help=argparse.SUPPRESS, action="store_true") parser.add_argument("--etc-dir", default="/etc/critic", help="root directory for Critic system configurations i.e. specifying /etc/critic will read configuration data from /etc/critic/*/configuration/*.py", action="store") parser.add_argument("--keep-going", help="keep going even if errors are encountered (useful for purging broken installations)", action="store_true") arguments = parser.parse_args() if os.getuid() != 0: print """ ERROR: This script must be run as root. """ sys.exit(ExitStatus.MUST_RUN_AS_ROOT) if not arguments.headless: print """\ !!!! WARNING !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! This uninstall script will delete Critic, all Critic logs, caches and configuration files, and it will also DELETE ALL DATA related to Critic. It will drop the entire Critic database from postgresql and it will permanently delete the Critic git repositories. If there are multiple instances of Critic on this system, all of them will be removed. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! This step cannot be undone! To abort the uninstall script, press CTRL-C now. """ installation.input.string("To continue the uninstall script and DELETE ALL YOUR DATA, enter 'deletemydata' here:", default="", check=check) if not os.path.isdir(arguments.etc_dir): print "%s: no such directory. Invalid --etc-dir parameter." % arguments.etc_dir sys.exit(ExitStatus.INVALID_ETC_DIR) run_command(arguments, ["service", "apache2", "stop"]) # Sets of system users/groups to delete will be collected (to avoid trying to delete the same user/group twice). users_to_delete = set() groups_to_delete = set() for configuration in get_all_configurations(arguments): users_to_delete.add(configuration.base.SYSTEM_USER_NAME) groups_to_delete.add(configuration.base.SYSTEM_GROUP_NAME) run_command(arguments, ["service", "critic-%s" % configuration.base.SYSTEM_IDENTITY, "stop"]) run_command(arguments, ["rm", "-rf", configuration.paths.DATA_DIR, configuration.paths.LOG_DIR]) run_command(arguments, ["rm", "-rf", configuration.paths.CACHE_DIR, configuration.paths.RUN_DIR]) run_command(arguments, ["rm", "-rf", configuration.paths.INSTALL_DIR, configuration.paths.GIT_DIR]) if configuration.base.WEB_SERVER_INTEGRATION == "apache": run_command(arguments, ["rm", "-f", "/etc/apache2/sites-available/critic-%s" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["rm", "-f", "/etc/apache2/sites-enabled/critic-%s" % configuration.base.SYSTEM_IDENTITY]) elif configuration.base.WEB_SERVER_INTEGRATION == "nginx+uwsgi": run_command(arguments, ["rm", "-f", "/etc/nginx/sites-available/critic-%s" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["rm", "-f", "/etc/nginx/sites-enabled/critic-%s" % configuration.base.SYSTEM_IDENTITY]) else: run_command(arguments, ["rm", "-f", "/etc/uwsgi/apps-available/critic-frontend-%s.ini" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["rm", "-f", "/etc/uwsgi/apps-enabled/critic-frontend-%s.ini" % configuration.base.SYSTEM_IDENTITY]) if configuration.base.WEB_SERVER_INTEGRATION in ("nginx+uwsgi", "uwsgi"): run_command(arguments, ["rm", "-f", "/etc/uwsgi/apps-available/critic-backend-%s.ini" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["rm", "-f", "/etc/uwsgi/apps-enabled/critic-backend-%s.ini" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["rm", "-f", "/etc/init.d/critic-%s" % configuration.base.SYSTEM_IDENTITY]) run_command(arguments, ["update-rc.d", "critic-%s" % configuration.base.SYSTEM_IDENTITY, "remove"]) # Typically the postgres user does not have access to the cwd during uninstall so we use "-i" # with sudo which makes the command run with the postgres user's homedir as cwd instead. # This avoids a harmless but pointless error message "could not change directory to X" when the # /usr/bin/psql perl script tries to chdir back to the previous cwd after doing some stuff. run_command(arguments, ["sudo", "-u", "postgres", "-i", "psql", "-v", "ON_ERROR_STOP=1", "-c", "DROP DATABASE IF EXISTS %s;" % configuration.database.PARAMETERS["database"]]) run_command(arguments, ["sudo", "-u", "postgres", "-i", "psql", "-v", "ON_ERROR_STOP=1", "-c", "DROP ROLE IF EXISTS %s;" % configuration.database.PARAMETERS["user"]]) for user in users_to_delete: run_command(arguments, ["deluser", "--system", user]) for group in groups_to_delete: try: # Revoke push rights for all users that have been added to the Critic system group. # delgroup doesn't do this automatically and we want to avoid users gettings errors like: # "groups: cannot find name for group ID 132" for group_member in grp.getgrnam(group).gr_mem: subprocess.check_output(["gpasswd", "-d", group_member, group]) except KeyError: abort_if_no_keep_going_param(arguments, "ERROR: Could not find group '%s'." % group) run_command(arguments, ["delgroup", "--system", group]) # Delete non-instance specific parts. run_command(arguments, ["rm", "-rf", arguments.etc_dir, "/usr/bin/criticctl"]) if configuration.base.WEB_SERVER_INTEGRATION == "apache": run_command(arguments, ["service", "apache2", "restart"]) elif configuration.base.WEB_SERVER_INTEGRATION == "nginx+uwsgi": run_command(arguments, ["service", "nginx", "restart"]) if configuration.base.WEB_SERVER_INTEGRATION in ("nginx+uwsgi", "uwsgi"): run_command(arguments, ["service", "uwsgi", "restart"]) # When default paths are used in install.py we put some extra effort into # completely cleaning the system on uninstall, with custom paths it's # trickier to know if the user really wants to delete empty parent dirs. rmdir_if_empty(["/var/log/critic", "/var/run/critic", "/var/cache/critic"]) run_command(arguments, ["rm", "-f", os.path.join(installation.root_dir, ".installed")]) print print "SUCCESS: Uninstall complete." print return ExitStatus.EXIT_SUCCESS if __name__ == "__main__": sys.exit(main()) ================================================ FILE: upgrade.py ================================================ # -*- mode: python; encoding: utf-8 -*- # # Copyright 2012 Jens Lindström, Opera Software ASA # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import sys import traceback import subprocess # To avoid accidentally creating files owned by root. sys.dont_write_bytecode = True # Python version check is done before imports below so # that python 2.6/2.5 users can see the error message. import pythonversion pythonversion.check() if sys.flags.optimize > 0: print """ ERROR: Please run this script without -O or -OO options. """ sys.exit(1) import argparse sys.path.insert(0, os.path.join(os.getcwd(), "src")) import installation parser = argparse.ArgumentParser(description="Critic upgrade script") # Uses default values for everything that has a default value (and isn't # overridden by other command-line arguments) and signals an error for anything # that doesn't have a default value and isn't set by a command-line argument. parser.add_argument("--headless", help=argparse.SUPPRESS, action="store_true") parser.add_argument("--etc-dir", default="/etc/critic", help="directory where the Critic system configuration is stored", action="store") parser.add_argument("--identity", "-i", default="main", help="system identity to upgrade", action="store") parser.add_argument("--dry-run", "-n", help="produce output but don't modify the system at all", action="store_true") parser.add_argument("--force", "-f", help="force upgrade even if same commit is checked out", action="store_true") for module in installation.modules: if hasattr(module, "add_arguments"): module.add_arguments("upgrade", parser) arguments = parser.parse_args() if arguments.headless: installation.input.headless = True if os.getuid() != 0: print """ ERROR: This script must be run as root. """ sys.exit(1) def abort(): print print "ERROR: Upgrade aborted." print for module in reversed(installation.modules): try: if hasattr(module, "undo"): module.undo() except: print >>sys.stderr, "FAILED: %s.undo()" % module.__name__ traceback.print_exc() sys.exit(1) data = installation.utils.read_install_data(arguments) print """ Critic Upgrade ============== """ git = data["installation.prereqs.git"] if "sha1" not in data: try: guess_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD@{1}"], cwd=installation.root_dir).strip() except: guess_sha1 = None print """ The SHA-1 of the commit you initially installed was not recorded. This means you installed a version before the install.py script was changed to record the SHA-1 currently checked out.""" if guess_sha1: print """ A reasonable guess is HEAD@{1}, or "where HEAD was before the last operation that changed HEAD". Otherwise, please figure out what you installed. If you need to guess, guessing on something too old (i.e. a commit that is an ancestor of the actual commit) is safer than guessing on something too recent.""" default = "HEAD@{1}" else: print """ Please figure out what you installed. If you need to guess, guessing on something too old (i.e. a commit that is an ancestor of the actual commit) is safer than guessing on something too recent.""" default = None print """ The commit can be specified as a SHA-1 or any symbolic ref understood by "git rev-parse". """ def revparse(value): return installation.utils.run_git([git, "rev-parse", "--verify", value], cwd=installation.root_dir).strip() def valid_commit(value): try: sha1 = revparse(value) except subprocess.CalledProcessError: return "not a valid ref (checked with \"git rev-parse --verify\")" try: installation.utils.run_git([git, "cat-file", "commit", sha1], cwd=installation.root_dir) except subprocess.CalledProcessError: return "not a commit" sha1 = revparse(installation.input.string(prompt="What commit was originally installed?", default=default, check=valid_commit)) data["sha1"] = sha1 old_critic_sha1 = data["sha1"] new_critic_sha1 = installation.utils.run_git([git, "rev-parse", "HEAD"], cwd=installation.root_dir).strip() old_lifecycle = installation.utils.read_lifecycle(git, old_critic_sha1) new_lifecycle = installation.utils.read_lifecycle() if old_lifecycle["stable"] != new_lifecycle["stable"]: if old_lifecycle["stable"]: print """ WARNING: You're about to switch to an unstable development version of Critic! If this is a production system, you are most likely better off staying on the current branch, or switching to the latest stable branch, if there the current branch isn't it. The latest stable branch is the default branch (i.e. HEAD) in Critic's GitHub repository at https://github.com/jensl/critic.git To interrogate it from the command-line, run $ git ls-remote --symref https://github.com/jensl/critic.git HEAD HINT: If you installed from the 'master' branch prior to October 2017, then it was at that time a stable branch (and also the only available option.) At this point in time, a stable branch 'stable/1' was branched off, and 'master' became an unstable development branch. In other words, if you are currently on 'master', you most likely want to switch to 'stable/1', or the latest stable branch (see above,) now. """ if not installation.input.yes_or_no( "Do you want to continue upgrading to the unstable version?", default=arguments.headless): print print "Installation aborted." print sys.exit(1) else: print """ NOTE: Switching from an unstable version to a stable version. """ print """ Previously installed version: %s Will now upgrade to version: %s """ % (old_critic_sha1, new_critic_sha1) if old_critic_sha1 == new_critic_sha1 and not arguments.force: print "Old and new commit are the same, nothing to do." sys.exit(0) status_output = installation.utils.run_git([git, "status", "--porcelain"], cwd=installation.root_dir).strip() if status_output: print """\ ERROR: This Git repository has local modifications.""" if len(status_output.splitlines()) \ and "installation/externals/v8-jsshell" in status_output: print """\ HINT: You might just need to run "git submodule update --recursive".""" print """ Installing from a Git repository with local changes is not supported. Please commit or stash the changes and then try again. """ sys.exit(1) try: for module in installation.modules: try: if hasattr(module, "prepare") and not module.prepare("upgrade", arguments, data): abort() except KeyboardInterrupt: abort() except SystemExit: raise except: print >>sys.stderr, "FAILED: %s.upgrade()" % module.__name__ traceback.print_exc() abort() if not arguments.dry_run: if not installation.httpd.stop(): abort() if not installation.initd.stop(): abort() for module in installation.modules: try: if hasattr(module, "upgrade") and not module.upgrade(arguments, data): abort() except KeyboardInterrupt: abort() except SystemExit: raise except: print >>sys.stderr, "FAILED: %s.upgrade()" % module.__name__ traceback.print_exc() abort() import configuration if not arguments.dry_run: # Before bugfix "Fix recreation of /var/run/critic/IDENTITY after reboot" # it was possible that /var/run/critic/IDENTITY was accidentally # recreated owned by root:root instead of critic:critic (on reboot). # If this had happened the service manager restart that is done during # upgrade would fail so upgrades always failed. Further, it was not # possible to write a migration script for this because migrations # execute after the service manager restart. Because of this the # following 3 line workaround was necessary: if os.path.exists(configuration.paths.RUN_DIR): os.chown(configuration.paths.RUN_DIR, installation.system.uid, installation.system.gid) if not installation.initd.start(): abort() if not installation.httpd.start(): abort() data["sha1"] = new_critic_sha1 with installation.utils.as_critic_system_user(): import dbaccess db = dbaccess.connect() cursor = db.cursor() cursor.execute("UPDATE systemidentities SET installed_sha1=%s, installed_at=NOW() WHERE name=%s", (new_critic_sha1, arguments.identity)) if not arguments.dry_run: db.commit() for module in installation.modules: try: if hasattr(module, "finish"): module.finish("upgrade", arguments, data) except: print >>sys.stderr, "WARNING: %s.finish() failed" % module.__name__ traceback.print_exc() installation.utils.write_install_data(arguments, data) installation.utils.clean_root_pyc_files() print print "SUCCESS: Upgrade complete!" print if configuration.extensions.ENABLED: try: installation.utils.run_git( [git, "diff", "--quiet", "%s..%s" % (old_critic_sha1, new_critic_sha1), "--", "installation/externals/v8-jsshell"]) except subprocess.CalledProcessError: # Non-zero exit status means there were changes. print """ Updated v8-jsshell submodule ============================ The v8-jsshell program used to run extensions has been updated and needs to be rebuilt. If this is not done, the extensions mechanism may malfunction. It can be done manually later by running this command as root: python extend.py """ rebuild_v8_jsshell = installation.input.yes_or_no( "Do you want to rebuild the v8-jsshell program now?", default=True) if rebuild_v8_jsshell: try: args = [] if arguments.headless: args.append("--headless") subprocess.check_call([sys.executable, "extend.py"] + args) except subprocess.CalledProcessError: # We have already finished the main upgrade, so just # propagate the exit status if extend.py failed. It will # have output enough error messages, for sure. sys.exit(1) except SystemExit: raise except: traceback.print_exc() abort()