Repository: jamiesun/SublimeEvernote
Branch: master
Commit: 62eeccabca6c
Files: 67
Total size: 1.7 MB
Directory structure:
gitextract__f99c86o/
├── .gitattributes
├── .gitignore
├── Context.sublime-menu
├── Default (Linux).sublime-keymap
├── Default (OSX).sublime-keymap
├── Default (Windows).sublime-keymap
├── Default.sublime-commands
├── Readme.md
├── SublimeEvernoteMetadata.sublime-snippet
├── lib/
│ ├── __init__.py
│ ├── evernote/
│ │ ├── __init__.py
│ │ ├── api/
│ │ │ ├── __init__.py
│ │ │ └── client.py
│ │ └── edam/
│ │ ├── __init__.py
│ │ ├── error/
│ │ │ ├── __init__.py
│ │ │ ├── constants.py
│ │ │ └── ttypes.py
│ │ ├── limits/
│ │ │ ├── __init__.py
│ │ │ ├── constants.py
│ │ │ └── ttypes.py
│ │ ├── notestore/
│ │ │ ├── NoteStore-remote
│ │ │ ├── NoteStore.py
│ │ │ ├── __init__.py
│ │ │ ├── constants.py
│ │ │ └── ttypes.py
│ │ ├── type/
│ │ │ ├── __init__.py
│ │ │ ├── constants.py
│ │ │ └── ttypes.py
│ │ └── userstore/
│ │ ├── UserStore-remote
│ │ ├── UserStore.py
│ │ ├── __init__.py
│ │ ├── constants.py
│ │ └── ttypes.py
│ ├── httplib.py
│ ├── httplib2/
│ │ ├── __init__.py
│ │ ├── cacerts.txt
│ │ ├── iri2uri.py
│ │ └── socks.py
│ ├── markdown2.py
│ ├── oauth2/
│ │ ├── __init__.py
│ │ ├── _version.py
│ │ └── clients/
│ │ ├── __init__.py
│ │ ├── imap.py
│ │ └── smtp.py
│ └── thrift/
│ ├── TSCons.py
│ ├── TSerialization.py
│ ├── Thrift.py
│ ├── __init__.py
│ ├── protocol/
│ │ ├── TBase.py
│ │ ├── TBinaryProtocol.py
│ │ ├── TCompactProtocol.py
│ │ ├── TProtocol.py
│ │ ├── __init__.py
│ │ └── fastbinary.c
│ ├── server/
│ │ ├── THttpServer.py
│ │ ├── TNonblockingServer.py
│ │ ├── TProcessPoolServer.py
│ │ ├── TServer.py
│ │ └── __init__.py
│ └── transport/
│ ├── THttpClient.py
│ ├── TSSLSocket.py
│ ├── TSocket.py
│ ├── TTransport.py
│ ├── TTwisted.py
│ ├── TZlibTransport.py
│ └── __init__.py
└── sublime_evernote.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitattributes
================================================
# Auto detect text files and perform LF normalization
* text=auto
# Custom for Visual Studio
*.cs diff=csharp
*.sln merge=union
*.csproj merge=union
*.vbproj merge=union
*.fsproj merge=union
*.dbproj merge=union
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain
================================================
FILE: .gitignore
================================================
#################
## Eclipse
#################
*.pydevproject
.project
.metadata
bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.classpath
.settings/
.loadpath
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# CDT-specific
.cproject
# PDT-specific
.buildpath
#################
## Visual Studio
#################
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# User-specific files
*.suo
*.user
*.sln.docstates
# Build results
[Dd]ebug/
[Rr]elease/
*_i.c
*_p.c
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.vspscc
.builds
*.dotCover
## TODO: If you have NuGet Package Restore enabled, uncomment this
#packages/
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opensdf
*.sdf
# Visual Studio profiler
*.psess
*.vsp
# ReSharper is a .NET coding add-in
_ReSharper*
# Installshield output folder
[Ee]xpress
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish
# Others
[Bb]in
[Oo]bj
sql
TestResults
*.Cache
ClientBin
stylecop.*
~$*
*.dbmdl
Generated_Code #added for RIA/Silverlight projects
# Backup & report files from converting an old project file to a newer
# Visual Studio version. Backup files are not needed, because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
############
## Windows
############
# Windows image file caches
Thumbs.db
# Folder config file
Desktop.ini
#############
## Python
#############
*.py[co]
# Packages
*.egg
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
# Installer logs
pip-log.txt
# Unit test / coverage reports
.coverage
.tox
#Translations
*.mo
#Mr Developer
.mr.developer.cfg
# Mac crap
.DS_Store
sftp-config.json
sftp-config-alt.json
logs
sessions
================================================
FILE: Context.sublime-menu
================================================
[
{ "caption": "-" },
{
"command":"send_to_evernote",
"caption":"Send to Evernote"
},
{
"caption": "Evernote Settings",
"command": "open_file", "args":
{
"file": "${packages}/User/SublimeEvernote.sublime-settings"
}
},
{ "caption": "-" }
]
================================================
FILE: Default (Linux).sublime-keymap
================================================
[
{ "keys": ["ctrl+alt+e"], "command": "send_to_evernote" }
]
================================================
FILE: Default (OSX).sublime-keymap
================================================
[
{ "keys": ["super+alt+e"], "command": "send_to_evernote" }
]
================================================
FILE: Default (Windows).sublime-keymap
================================================
[
{ "keys": ["ctrl+alt+e"], "command": "send_to_evernote" }
]
================================================
FILE: Default.sublime-commands
================================================
[
{ "command": "send_to_evernote", "caption": "Send to evernote" },
{
"caption": "Evernote Settings",
"command": "open_file", "args":
{
"file": "${packages}/User/SublimeEvernote.sublime-settings"
}
}
]
================================================
FILE: Readme.md
================================================
SublimeEvernote
===============
[Sublime Text 2](http://www.sublimetext.com/2) plugin for [Evernote](http://www.evernote.com)
### Install
Through [Package Control](http://wbond.net/sublime_packages/package_control)
`Command Palette` > `Package Control: Install Package` > `SublimeEvernote`
or
`Command Palette` > `Package Control: add Repository` && `input 'http://github.com/jamiesun/SublimeEvernote`
`Command Palette` > `Package Control: Install Package` > `SublimeEvernote`
or clone this repository in
* Windows: `%APPDATA%/Roaming/Sublime Text 2/Packages/`
* OSX: `~/Library/Application Support/Sublime Text 2/Packages/`
* Linux: `~/.Sublime Text 2/Packages/`
* Portable Installation: `Sublime Text 2/Data/`
### Usage
`Command Palette` > `Send to evernote`
`Context menu` > `Send to Evernote`
`Context menu` > `Evernote settings`
#### Markdown Support ####
Write notes in Markdown and they will be processed when they are sent to Evernote.
This:

Turns into this:

#### Authenticating with Evernote ####
In order to send notes you need to authenticate and allow the plugin permissions via Evernote's oauth.
This is a bit of a manual process now as there are no callbacks to Sublime to handle this process automatically.
Here are a collection of screenshots to step you through the process.
##### Step 1 - Sublime text2 open your browser,you need login:

##### Step 2 - Authorize plugin with Evernote:

##### Step 3 - Copy oauth verifier


##### Step 4 - Verify token on Sublime

##### Step 5 - Rejoice!

#### Metadata ####
Use metadata block to specify title and tags.
---
title: My Note
tags: tag1,tag2
---
================================================
FILE: SublimeEvernoteMetadata.sublime-snippet
================================================
Looks for a user account with the provided userId on this NoteStore shard and determines whether that account contains a public notebook with the given URI. If the account is not found, or no public notebook exists with this URI, this will throw an EDAMNotFoundException, otherwise this will return the information for that Notebook.
If a notebook is visible on the web with a full URL like http://www.evernote.com/pub/sethdemo/api Then 'sethdemo' is the username that can be used to look up the userId, and 'api' is the publicUri.
@param userId The numeric identifier for the user who owns the public notebook. To find this value based on a username string, you can invoke UserStore.getPublicUserInfo @param publicUri The uri string for the public notebook, from Notebook.publishing.uri. @throws EDAMNotFoundExceptionLooks for a user account with the provided userId on this NoteStore shard and determines whether that account contains a public notebook with the given URI. If the account is not found, or no public notebook exists with this URI, this will throw an EDAMNotFoundException, otherwise this will return the information for that Notebook.
If a notebook is visible on the web with a full URL like http://www.evernote.com/pub/sethdemo/api Then 'sethdemo' is the username that can be used to look up the userId, and 'api' is the publicUri.
@param userId The numeric identifier for the user who owns the public notebook. To find this value based on a username string, you can invoke UserStore.getPublicUserInfo @param publicUri The uri string for the public notebook, from Notebook.publishing.uri. @throws EDAMNotFoundExceptionincludeContainingNotebooks is set to true
in the RelatedResultSpec, return the list of notebooks to
to which the returned related notes belong. The notebooks in this
list will occur once per notebook GUID and are represented as
NotebookDescriptor objects.true, return the containingNotebooks field
in the RelatedResult, which will contain the list of notebooks to
to which the returned related notes belong.To safely add or modify your application's entry in the map, use NoteStore.setResourceApplicationDataEntry. To safely remove your application's entry from the map, use NoteStore.unsetResourceApplicationDataEntry.
Minimum length of a name (key): EDAM_APPLICATIONDATA_NAME_LEN_MINApplications should set contentClass only when they are creating notes that contain structured information that needs to be maintained in order for the user to be able to use the note within that application. Setting contentClass makes a note read-only in other applications, so there is a trade-off when an application chooses to use contentClass. Applications that set contentClass when creating notes must use a contentClass string of the form CompanyName.ApplicationName to ensure uniqueness.
Length restrictions: EDAM_NOTE_CONTENT_CLASS_LEN_MIN, EDAM_NOTE_CONTENT_CLASS_LEN_MAXTo safely add or modify your application's entry in the map, use NoteStore.setNoteApplicationDataEntry. To safely remove your application's entry from the map, use NoteStore.unsetNoteApplicationDataEntry.
Minimum length of a name (key): EDAM_APPLICATIONDATA_NAME_LEN_MINSpecifies the set of notes that should be included in the search, if possible.
Clients are expected to search as much of the desired scope as possible, with the understanding that a given client may not be able to cover the full specified scope. For example, when executing a search that includes notes in both the owner's account and business notebooks, a mobile client may choose to only search within the user's account because it is not capable of searching both scopes simultaneously. When a search across multiple scopes is not possible, a client may choose which scope to search based on the current application context. If a client cannot search any of the desired scopes, it should refuse to execute the search.
The UserStore service is primarily used by EDAM clients to establish authentication via username and password over a trusted connection (e.g. SSL). A client's first call to this interface should be checkVersion() to ensure that the client's software is up to date.
All calls which require an authenticationToken may throw an EDAMUserException for the following reasons:The result of the authentication. If the authentication was successful, the AuthenticationResult.user field will be set with the full information about the User.
If the user has two-factor authentication enabled, AuthenticationResult.secondFactorRequired will be set and AuthenticationResult.authenticationToken will contain a short-lived token that may only be used to complete the two-factor authentication process by calling UserStore.completeTwoFactorAuthentication.
@throws EDAMUserExceptionThe result of the authentication. The level of detail provided in the returned AuthenticationResult.User structure depends on the access level granted by calling application's API key.
If the user has two-factor authentication enabled, AuthenticationResult.secondFactorRequired will be set and AuthenticationResult.authenticationToken will contain a short-lived token that may only be used to complete the two-factor authentication process by calling UserStore.completeTwoFactorAuthentication.
@throws EDAMUserExceptionThe UserStore service is primarily used by EDAM clients to establish authentication via username and password over a trusted connection (e.g. SSL). A client's first call to this interface should be checkVersion() to ensure that the client's software is up to date.
All calls which require an authenticationToken may throw an EDAMUserException for the following reasons:The result of the authentication. If the authentication was successful, the AuthenticationResult.user field will be set with the full information about the User.
If the user has two-factor authentication enabled, AuthenticationResult.secondFactorRequired will be set and AuthenticationResult.authenticationToken will contain a short-lived token that may only be used to complete the two-factor authentication process by calling UserStore.completeTwoFactorAuthentication.
@throws EDAMUserExceptionThe result of the authentication. The level of detail provided in the returned AuthenticationResult.User structure depends on the access level granted by calling application's API key.
If the user has two-factor authentication enabled, AuthenticationResult.secondFactorRequired will be set and AuthenticationResult.authenticationToken will contain a short-lived token that may only be used to complete the two-factor authentication process by calling UserStore.completeTwoFactorAuthentication.
@throws EDAMUserException tags.
"""
yield 0, ""
for tup in inner:
yield tup
yield 0, ""
def wrap(self, source, outfile):
"""Return the source with a code, pre, and div."""
return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
formatter_opts.setdefault("cssclass", "codehilite")
formatter = HtmlCodeFormatter(**formatter_opts)
return pygments.highlight(codeblock, lexer, formatter)
def _code_block_sub(self, match, is_fenced_code_block=False):
lexer_name = None
if is_fenced_code_block:
lexer_name = match.group(1)
if lexer_name:
formatter_opts = self.extras['fenced-code-blocks'] or {}
codeblock = match.group(2)
codeblock = codeblock[:-1] # drop one trailing newline
else:
codeblock = match.group(1)
codeblock = self._outdent(codeblock)
codeblock = self._detab(codeblock)
codeblock = codeblock.lstrip('\n') # trim leading newlines
codeblock = codeblock.rstrip() # trim trailing whitespace
# Note: "code-color" extra is DEPRECATED.
if "code-color" in self.extras and codeblock.startswith(":::"):
lexer_name, rest = codeblock.split('\n', 1)
lexer_name = lexer_name[3:].strip()
codeblock = rest.lstrip("\n") # Remove lexer declaration line.
formatter_opts = self.extras['code-color'] or {}
if lexer_name:
lexer = self._get_pygments_lexer(lexer_name)
if lexer:
colored = self._color_with_pygments(codeblock, lexer,
**formatter_opts)
return "\n\n%s\n\n" % colored
codeblock = self._encode_code(codeblock)
pre_class_str = self._html_class_str_from_tag("pre")
code_class_str = self._html_class_str_from_tag("code")
return "\n\n%s\n
\n\n" % (
pre_class_str, code_class_str, codeblock)
def _html_class_str_from_tag(self, tag):
"""Get the appropriate ' class="..."' string (note the leading
space), if any, for the given tag.
"""
if "html-classes" not in self.extras:
return ""
try:
html_classes_from_tag = self.extras["html-classes"]
except TypeError:
return ""
else:
if tag in html_classes_from_tag:
return ' class="%s"' % html_classes_from_tag[tag]
return ""
def _do_code_blocks(self, text):
"""Process Markdown `` blocks."""
code_block_re = re.compile(r'''
(?:\n\n|\A\n?)
( # $1 = the code block -- one or more lines, starting with a space/tab
(?:
(?:[ ]{%d} | \t) # Lines must start with a tab or a tab-width of spaces
.*\n+
)+
)
((?=^[ ]{0,%d}\S)|\Z) # Lookahead for non-space at line-start, or end of doc
''' % (self.tab_width, self.tab_width),
re.M | re.X)
return code_block_re.sub(self._code_block_sub, text)
_fenced_code_block_re = re.compile(r'''
(?:\n\n|\A\n?)
^```([\w+-]+)?[ \t]*\n # opening fence, $1 = optional lang
(.*?) # $2 = code block content
^```[ \t]*\n # closing fence
''', re.M | re.X | re.S)
def _fenced_code_block_sub(self, match):
return self._code_block_sub(match, is_fenced_code_block=True);
def _do_fenced_code_blocks(self, text):
"""Process ```-fenced unindented code blocks ('fenced-code-blocks' extra)."""
return self._fenced_code_block_re.sub(self._fenced_code_block_sub, text)
# Rules for a code span:
# - backslash escapes are not interpreted in a code span
# - to include one or or a run of more backticks the delimiters must
# be a longer run of backticks
# - cannot start or end a code span with a backtick; pad with a
# space and that space will be removed in the emitted HTML
# See `test/tm-cases/escapes.text` for a number of edge-case
# examples.
_code_span_re = re.compile(r'''
(?%s" % c
def _do_code_spans(self, text):
# * Backtick quotes are used for spans.
#
# * You can use multiple backticks as the delimiters if you want to
# include literal backticks in the code span. So, this input:
#
# Just type ``foo `bar` baz`` at the prompt.
#
# Will translate to:
#
# Just type foo `bar` baz at the prompt.
#
# There's no arbitrary limit to the number of backticks you
# can use as delimters. If you need three consecutive backticks
# in your code, use four for delimiters, etc.
#
# * You can use spaces to get literal backticks at the edges:
#
# ... type `` `bar` `` ...
#
# Turns to:
#
# ... type `bar` ...
return self._code_span_re.sub(self._code_span_sub, text)
def _encode_code(self, text):
"""Encode/escape certain characters inside Markdown code runs.
The point is that in code, these characters are literals,
and lose their special Markdown meanings.
"""
replacements = [
# Encode all ampersands; HTML entities are not
# entities within a Markdown code span.
('&', '&'),
# Do the angle bracket song and dance:
('<', '<'),
('>', '>'),
]
for before, after in replacements:
text = text.replace(before, after)
hashed = _hash_text(text)
self._escape_table[text] = hashed
return hashed
_strong_re = re.compile(r"(\*\*|__)(?=\S)(.+?[*_]*)(?<=\S)\1", re.S)
_em_re = re.compile(r"(\*|_)(?=\S)(.+?)(?<=\S)\1", re.S)
_code_friendly_strong_re = re.compile(r"\*\*(?=\S)(.+?[*_]*)(?<=\S)\*\*", re.S)
_code_friendly_em_re = re.compile(r"\*(?=\S)(.+?)(?<=\S)\*", re.S)
def _do_italics_and_bold(self, text):
# must go first:
if "code-friendly" in self.extras:
text = self._code_friendly_strong_re.sub(r"\1", text)
text = self._code_friendly_em_re.sub(r"\1", text)
else:
text = self._strong_re.sub(r"\2", text)
text = self._em_re.sub(r"\2", text)
return text
# "smarty-pants" extra: Very liberal in interpreting a single prime as an
# apostrophe; e.g. ignores the fact that "round", "bout", "twer", and
# "twixt" can be written without an initial apostrophe. This is fine because
# using scare quotes (single quotation marks) is rare.
_apostrophe_year_re = re.compile(r"'(\d\d)(?=(\s|,|;|\.|\?|!|$))")
_contractions = ["tis", "twas", "twer", "neath", "o", "n",
"round", "bout", "twixt", "nuff", "fraid", "sup"]
def _do_smart_contractions(self, text):
text = self._apostrophe_year_re.sub(r"’\1", text)
for c in self._contractions:
text = text.replace("'%s" % c, "’%s" % c)
text = text.replace("'%s" % c.capitalize(),
"’%s" % c.capitalize())
return text
# Substitute double-quotes before single-quotes.
_opening_single_quote_re = re.compile(r"(?
See "test/tm-cases/smarty_pants.text" for a full discussion of the
support here and
for a
discussion of some diversion from the original SmartyPants.
"""
if "'" in text: # guard for perf
text = self._do_smart_contractions(text)
text = self._opening_single_quote_re.sub("‘", text)
text = self._closing_single_quote_re.sub("’", text)
if '"' in text: # guard for perf
text = self._opening_double_quote_re.sub("“", text)
text = self._closing_double_quote_re.sub("”", text)
text = text.replace("---", "—")
text = text.replace("--", "–")
text = text.replace("...", "…")
text = text.replace(" . . . ", "…")
text = text.replace(". . .", "…")
return text
_block_quote_re = re.compile(r'''
( # Wrap whole match in \1
(
^[ \t]*>[ \t]? # '>' at the start of a line
.+\n # rest of the first line
(.+\n)* # subsequent consecutive lines
\n* # blanks
)+
)
''', re.M | re.X)
_bq_one_level_re = re.compile('^[ \t]*>[ \t]?', re.M);
_html_pre_block_re = re.compile(r'(\s*.+?
)', re.S)
def _dedent_two_spaces_sub(self, match):
return re.sub(r'(?m)^ ', '', match.group(1))
def _block_quote_sub(self, match):
bq = match.group(1)
bq = self._bq_one_level_re.sub('', bq) # trim one level of quoting
bq = self._ws_only_line_re.sub('', bq) # trim whitespace-only lines
bq = self._run_block_gamut(bq) # recurse
bq = re.sub('(?m)^', ' ', bq)
# These leading spaces screw with content, so we need to fix that:
bq = self._html_pre_block_re.sub(self._dedent_two_spaces_sub, bq)
return "\n%s\n
\n\n" % bq
def _do_block_quotes(self, text):
if '>' not in text:
return text
return self._block_quote_re.sub(self._block_quote_sub, text)
def _form_paragraphs(self, text):
# Strip leading and trailing lines:
text = text.strip('\n')
# Wrap tags.
grafs = []
for i, graf in enumerate(re.split(r"\n{2,}", text)):
if graf in self.html_blocks:
# Unhashify HTML blocks
grafs.append(self.html_blocks[graf])
else:
cuddled_list = None
if "cuddled-lists" in self.extras:
# Need to put back trailing '\n' for `_list_item_re`
# match at the end of the paragraph.
li = self._list_item_re.search(graf + '\n')
# Two of the same list marker in this paragraph: a likely
# candidate for a list cuddled to preceding paragraph
# text (issue 33). Note the `[-1]` is a quick way to
# consider numeric bullets (e.g. "1." and "2.") to be
# equal.
if (li and len(li.group(2)) <= 3 and li.group("next_marker")
and li.group("marker")[-1] == li.group("next_marker")[-1]):
start = li.start()
cuddled_list = self._do_lists(graf[start:]).rstrip("\n")
assert cuddled_list.startswith("
") or cuddled_list.startswith("")
graf = graf[:start]
# Wrap tags.
graf = self._run_span_gamut(graf)
grafs.append("
" + graf.lstrip(" \t") + "
")
if cuddled_list:
grafs.append(cuddled_list)
return "\n\n".join(grafs)
def _add_footnotes(self, text):
if self.footnotes:
footer = [
'',
'
',
]
for i, id in enumerate(self.footnote_ids):
if i != 0:
footer.append('')
footer.append('- ' % id)
footer.append(self._run_block_gamut(self.footnotes[id]))
backlink = (''
'↩' % (id, i+1))
if footer[-1].endswith(""):
footer[-1] = footer[-1][:-len("")] \
+ ' ' + backlink + ""
else:
footer.append("\n
%s
" % backlink)
footer.append(' ')
footer.append('')
footer.append('')
return text + '\n\n' + '\n'.join(footer)
else:
return text
# Ampersand-encoding based entirely on Nat Irons's Amputator MT plugin:
# http://bumppo.net/projects/amputator/
_ampersand_re = re.compile(r'&(?!#?[xX]?(?:[0-9a-fA-F]+|\w+);)')
_naked_lt_re = re.compile(r'<(?![a-z/?\$!])', re.I)
_naked_gt_re = re.compile(r'''(?''', re.I)
def _encode_amps_and_angles(self, text):
# Smart processing for ampersands and angle brackets that need
# to be encoded.
text = self._ampersand_re.sub('&', text)
# Encode naked <'s
text = self._naked_lt_re.sub('<', text)
# Encode naked >'s
# Note: Other markdown implementations (e.g. Markdown.pl, PHP
# Markdown) don't do this.
text = self._naked_gt_re.sub('>', text)
return text
def _encode_backslash_escapes(self, text):
for ch, escape in list(self._escape_table.items()):
text = text.replace("\\"+ch, escape)
return text
_auto_link_re = re.compile(r'<((https?|ftp):[^\'">\s]+)>', re.I)
def _auto_link_sub(self, match):
g1 = match.group(1)
return '%s' % (g1, g1)
_auto_email_link_re = re.compile(r"""
<
(?:mailto:)?
(
[-.\w]+
\@
[-\w]+(\.[-\w]+)*\.[a-z]+
)
>
""", re.I | re.X | re.U)
def _auto_email_link_sub(self, match):
return self._encode_email_address(
self._unescape_special_chars(match.group(1)))
def _do_auto_links(self, text):
text = self._auto_link_re.sub(self._auto_link_sub, text)
text = self._auto_email_link_re.sub(self._auto_email_link_sub, text)
return text
def _encode_email_address(self, addr):
# Input: an email address, e.g. "foo@example.com"
#
# Output: the email address as a mailto link, with each character
# of the address encoded as either a decimal or hex entity, in
# the hopes of foiling most address harvesting spam bots. E.g.:
#
# foo
# @example.com
#
# Based on a filter by Matthew Wickline, posted to the BBEdit-Talk
# mailing list:
chars = [_xml_encode_email_char_at_random(ch)
for ch in "mailto:" + addr]
# Strip the mailto: from the visible part.
addr = '%s' \
% (''.join(chars), ''.join(chars[7:]))
return addr
def _do_link_patterns(self, text):
"""Caveat emptor: there isn't much guarding against link
patterns being formed inside other standard Markdown links, e.g.
inside a [link def][like this].
Dev Notes: *Could* consider prefixing regexes with a negative
lookbehind assertion to attempt to guard against this.
"""
link_from_hash = {}
for regex, repl in self.link_patterns:
replacements = []
for match in regex.finditer(text):
if hasattr(repl, "__call__"):
href = repl(match)
else:
href = match.expand(repl)
replacements.append((match.span(), href))
for (start, end), href in reversed(replacements):
escaped_href = (
href.replace('"', '"') # b/c of attr quote
# To avoid markdown and :
.replace('*', self._escape_table['*'])
.replace('_', self._escape_table['_']))
link = '%s' % (escaped_href, text[start:end])
hash = _hash_text(link)
link_from_hash[hash] = link
text = text[:start] + hash + text[end:]
for hash, link in list(link_from_hash.items()):
text = text.replace(hash, link)
return text
def _unescape_special_chars(self, text):
# Swap back in all the special characters we've hidden.
for ch, hash in list(self._escape_table.items()):
text = text.replace(hash, ch)
return text
def _outdent(self, text):
# Remove one level of line-leading tabs or spaces
return self._outdent_re.sub('', text)
class MarkdownWithExtras(Markdown):
"""A markdowner class that enables most extras:
- footnotes
- code-color (only has effect if 'pygments' Python module on path)
These are not included:
- pyshell (specific to Python-related documenting)
- code-friendly (because it *disables* part of the syntax)
- link-patterns (because you need to specify some actual
link-patterns anyway)
"""
extras = ["footnotes", "code-color"]
#---- internal support functions
class UnicodeWithAttrs(unicode):
"""A subclass of unicode used for the return value of conversion to
possibly attach some attributes. E.g. the "toc_html" attribute when
the "toc" extra is used.
"""
metadata = None
_toc = None
def toc_html(self):
"""Return the HTML for the current TOC.
This expects the `_toc` attribute to have been set on this instance.
"""
if self._toc is None:
return None
def indent():
return ' ' * (len(h_stack) - 1)
lines = []
h_stack = [0] # stack of header-level numbers
for level, id, name in self._toc:
if level > h_stack[-1]:
lines.append("%s" % indent())
h_stack.append(level)
elif level == h_stack[-1]:
lines[-1] += ""
else:
while level < h_stack[-1]:
h_stack.pop()
if not lines[-1].endswith(""):
lines[-1] += ""
lines.append("%s
" % indent())
lines.append('%s- %s' % (
indent(), id, name))
while len(h_stack) > 1:
h_stack.pop()
if not lines[-1].endswith("
"):
lines[-1] += ""
lines.append("%s
" % indent())
return '\n'.join(lines) + '\n'
toc_html = property(toc_html)
## {{{ http://code.activestate.com/recipes/577257/ (r1)
_slugify_strip_re = re.compile(r'[^\w\s-]')
_slugify_hyphenate_re = re.compile(r'[-\s]+')
def _slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.
From Django's "django/template/defaultfilters.py".
"""
try:
import unicodedata
value = unicodedata.normalize('NFKD', value)
except ImportError:
pass
value = value.encode('ascii', 'ignore').decode()
value = _slugify_strip_re.sub('', value).strip().lower()
return _slugify_hyphenate_re.sub('-', value)
## end of http://code.activestate.com/recipes/577257/ }}}
# From http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549
def _curry(*args, **kwargs):
function, args = args[0], args[1:]
def result(*rest, **kwrest):
combined = kwargs.copy()
combined.update(kwrest)
return function(*args + rest, **combined)
return result
# Recipe: regex_from_encoded_pattern (1.0)
def _regex_from_encoded_pattern(s):
"""'foo' -> re.compile(re.escape('foo'))
'/foo/' -> re.compile('foo')
'/foo/i' -> re.compile('foo', re.I)
"""
if s.startswith('/') and s.rfind('/') != 0:
# Parse it: /PATTERN/FLAGS
idx = s.rfind('/')
pattern, flags_str = s[1:idx], s[idx+1:]
flag_from_char = {
"i": re.IGNORECASE,
"l": re.LOCALE,
"s": re.DOTALL,
"m": re.MULTILINE,
"u": re.UNICODE,
}
flags = 0
for char in flags_str:
try:
flags |= flag_from_char[char]
except KeyError:
raise ValueError("unsupported regex flag: '%s' in '%s' "
"(must be one of '%s')"
% (char, s, ''.join(list(flag_from_char.keys()))))
return re.compile(s[1:idx], flags)
else: # not an encoded regex
return re.compile(re.escape(s))
# Recipe: dedent (0.1.2)
def _dedentlines(lines, tabsize=8, skip_first_line=False):
"""_dedentlines(lines, tabsize=8, skip_first_line=False) -> dedented lines
"lines" is a list of lines to dedent.
"tabsize" is the tab width to use for indent width calculations.
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
Same as dedent() except operates on a sequence of lines. Note: the
lines list is modified **in-place**.
"""
DEBUG = False
if DEBUG:
print("dedent: dedent(..., tabsize=%d, skip_first_line=%r)"\
% (tabsize, skip_first_line))
indents = []
margin = None
for i, line in enumerate(lines):
if i == 0 and skip_first_line: continue
indent = 0
for ch in line:
if ch == ' ':
indent += 1
elif ch == '\t':
indent += tabsize - (indent % tabsize)
elif ch in '\r\n':
continue # skip all-whitespace lines
else:
break
else:
continue # skip all-whitespace lines
if DEBUG: print("dedent: indent=%d: %r" % (indent, line))
if margin is None:
margin = indent
else:
margin = min(margin, indent)
if DEBUG: print("dedent: margin=%r" % margin)
if margin is not None and margin > 0:
for i, line in enumerate(lines):
if i == 0 and skip_first_line: continue
removed = 0
for j, ch in enumerate(line):
if ch == ' ':
removed += 1
elif ch == '\t':
removed += tabsize - (removed % tabsize)
elif ch in '\r\n':
if DEBUG: print("dedent: %r: EOL -> strip up to EOL" % line)
lines[i] = lines[i][j:]
break
else:
raise ValueError("unexpected non-whitespace char %r in "
"line %r while removing %d-space margin"
% (ch, line, margin))
if DEBUG:
print("dedent: %r: %r -> removed %d/%d"\
% (line, ch, removed, margin))
if removed == margin:
lines[i] = lines[i][j+1:]
break
elif removed > margin:
lines[i] = ' '*(removed-margin) + lines[i][j+1:]
break
else:
if removed:
lines[i] = lines[i][removed:]
return lines
def _dedent(text, tabsize=8, skip_first_line=False):
"""_dedent(text, tabsize=8, skip_first_line=False) -> dedented text
"text" is the text to dedent.
"tabsize" is the tab width to use for indent width calculations.
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
textwrap.dedent(s), but don't expand tabs to spaces
"""
lines = text.splitlines(1)
_dedentlines(lines, tabsize=tabsize, skip_first_line=skip_first_line)
return ''.join(lines)
class _memoized(object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
http://wiki.python.org/moin/PythonDecoratorLibrary
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
self.cache[args] = value = self.func(*args)
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def _xml_oneliner_re_from_tab_width(tab_width):
"""Standalone XML processing instruction regex."""
return re.compile(r"""
(?:
(?<=\n\n) # Starting after a blank line
| # or
\A\n? # the beginning of the doc
)
( # save in $1
[ ]{0,%d}
(?:
<\?\w+\b\s+.*?\?> # XML processing instruction
|
<\w+:\w+\b\s+.*?/> # namespaced single tag
)
[ \t]*
(?=\n{2,}|\Z) # followed by a blank line or end of document
)
""" % (tab_width - 1), re.X)
_xml_oneliner_re_from_tab_width = _memoized(_xml_oneliner_re_from_tab_width)
def _hr_tag_re_from_tab_width(tab_width):
return re.compile(r"""
(?:
(?<=\n\n) # Starting after a blank line
| # or
\A\n? # the beginning of the doc
)
( # save in \1
[ ]{0,%d}
<(hr) # start tag = \2
\b # word break
([^<>])*? #
/?> # the matching end tag
[ \t]*
(?=\n{2,}|\Z) # followed by a blank line or end of document
)
""" % (tab_width - 1), re.X)
_hr_tag_re_from_tab_width = _memoized(_hr_tag_re_from_tab_width)
def _xml_escape_attr(attr, skip_single_quote=True):
"""Escape the given string for use in an HTML/XML tag attribute.
By default this doesn't bother with escaping `'` to `'`, presuming that
the tag attribute is surrounded by double quotes.
"""
escaped = (attr
.replace('&', '&')
.replace('"', '"')
.replace('<', '<')
.replace('>', '>'))
if not skip_single_quote:
escaped = escaped.replace("'", "'")
return escaped
def _xml_encode_email_char_at_random(ch):
r = random()
# Roughly 10% raw, 45% hex, 45% dec.
# '@' *must* be encoded. I [John Gruber] insist.
# Issue 26: '_' must be encoded.
if r > 0.9 and ch not in "@_":
return ch
elif r < 0.45:
# The [1:] is to drop leading '0': 0x63 -> x63
return '%s;' % hex(ord(ch))[1:]
else:
return '%s;' % ord(ch)
#---- mainline
class _NoReflowFormatter(optparse.IndentedHelpFormatter):
"""An optparse formatter that does NOT reflow the description."""
def format_description(self, description):
return description or ""
def _test():
import doctest
doctest.testmod()
def main(argv=None):
if argv is None:
argv = sys.argv
if not logging.root.handlers:
logging.basicConfig()
usage = "usage: %prog [PATHS...]"
version = "%prog "+__version__
parser = optparse.OptionParser(prog="markdown2", usage=usage,
version=version, description=cmdln_desc,
formatter=_NoReflowFormatter())
parser.add_option("-v", "--verbose", dest="log_level",
action="store_const", const=logging.DEBUG,
help="more verbose output")
parser.add_option("--encoding",
help="specify encoding of text content")
parser.add_option("--html4tags", action="store_true", default=False,
help="use HTML 4 style for empty element tags")
parser.add_option("-s", "--safe", metavar="MODE", dest="safe_mode",
help="sanitize literal HTML: 'escape' escapes "
"HTML meta chars, 'replace' replaces with an "
"[HTML_REMOVED] note")
parser.add_option("-x", "--extras", action="append",
help="Turn on specific extra features (not part of "
"the core Markdown spec). See above.")
parser.add_option("--use-file-vars",
help="Look for and use Emacs-style 'markdown-extras' "
"file var to turn on extras. See "
"")
parser.add_option("--link-patterns-file",
help="path to a link pattern file")
parser.add_option("--self-test", action="store_true",
help="run internal self-tests (some doctests)")
parser.add_option("--compare", action="store_true",
help="run against Markdown.pl as well (for testing)")
parser.set_defaults(log_level=logging.INFO, compare=False,
encoding="utf-8", safe_mode=None, use_file_vars=False)
opts, paths = parser.parse_args()
log.setLevel(opts.log_level)
if opts.self_test:
return _test()
if opts.extras:
extras = {}
for s in opts.extras:
splitter = re.compile("[,;: ]+")
for e in splitter.split(s):
if '=' in e:
ename, earg = e.split('=', 1)
try:
earg = int(earg)
except ValueError:
pass
else:
ename, earg = e, None
extras[ename] = earg
else:
extras = None
if opts.link_patterns_file:
link_patterns = []
f = open(opts.link_patterns_file)
try:
for i, line in enumerate(f.readlines()):
if not line.strip(): continue
if line.lstrip().startswith("#"): continue
try:
pat, href = line.rstrip().rsplit(None, 1)
except ValueError:
raise MarkdownError("%s:%d: invalid link pattern line: %r"
% (opts.link_patterns_file, i+1, line))
link_patterns.append(
(_regex_from_encoded_pattern(pat), href))
finally:
f.close()
else:
link_patterns = None
from os.path import join, dirname, abspath, exists
markdown_pl = join(dirname(dirname(abspath(__file__))), "test",
"Markdown.pl")
if not paths:
paths = ['-']
for path in paths:
if path == '-':
text = sys.stdin.read()
else:
fp = codecs.open(path, 'r', opts.encoding)
text = fp.read()
fp.close()
if opts.compare:
from subprocess import Popen, PIPE
print("==== Markdown.pl ====")
p = Popen('perl %s' % markdown_pl, shell=True, stdin=PIPE, stdout=PIPE, close_fds=True)
p.stdin.write(text.encode('utf-8'))
p.stdin.close()
perl_html = p.stdout.read().decode('utf-8')
if py3:
sys.stdout.write(perl_html)
else:
sys.stdout.write(perl_html.encode(
sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
print("==== markdown2.py ====")
html = markdown(text,
html4tags=opts.html4tags,
safe_mode=opts.safe_mode,
extras=extras, link_patterns=link_patterns,
use_file_vars=opts.use_file_vars)
if py3:
sys.stdout.write(html)
else:
sys.stdout.write(html.encode(
sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
if extras and "toc" in extras:
log.debug("toc_html: " +
html.toc_html.encode(sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
if opts.compare:
test_dir = join(dirname(dirname(abspath(__file__))), "test")
if exists(join(test_dir, "test_markdown2.py")):
sys.path.insert(0, test_dir)
from test_markdown2 import norm_html_from_html
norm_html = norm_html_from_html(html)
norm_perl_html = norm_html_from_html(perl_html)
else:
norm_html = html
norm_perl_html = perl_html
print("==== match? %r ====" % (norm_perl_html == norm_html))
if __name__ == "__main__":
sys.exit( main(sys.argv) )
================================================
FILE: lib/oauth2/__init__.py
================================================
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import base64
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs
parse_qs # placate pyflakes
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
try:
from hashlib import sha1
sha = sha1
except ImportError:
# hashlib was added in Python 2.5
import sha
# import _version
# __version__ = _version.__version__
OAUTH_VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def build_xoauth_string(url, consumer, token=None):
"""Build an XOAUTH string for use in SMTP/IMPA authentication."""
request = Request.from_consumer_and_token(consumer, token,
"GET", url)
signing_method = SignatureMethod_HMAC_SHA1()
request.sign_request(signing_method, consumer, token)
params = []
for k, v in sorted(request.iteritems()):
if v is not None:
params.append('%s="%s"' % (k, escape(v)))
return "%s %s %s" % ("GET", url, ','.join(params))
def to_unicode(s):
""" Convert to unicode, raise exception with instructive error
message if s is not unicode, ascii, or utf-8. """
if not isinstance(s, unicode):
if not isinstance(s, str):
raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s))
try:
s = s.decode('utf-8')
except UnicodeDecodeError, le:
raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,))
return s
def to_utf8(s):
return to_unicode(s).encode('utf-8')
def to_unicode_if_string(s):
if isinstance(s, basestring):
return to_unicode(s)
else:
return s
def to_utf8_if_string(s):
if isinstance(s, basestring):
return to_utf8(s)
else:
return s
def to_unicode_optional_iterator(x):
"""
Raise TypeError if x is a str containing non-utf8 bytes or if x is
an iterable which contains such a str.
"""
if isinstance(x, basestring):
return to_unicode(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_unicode(e) for e in l ]
def to_utf8_optional_iterator(x):
"""
Raise TypeError if x is a str or if x is an iterable which
contains a str.
"""
if isinstance(x, basestring):
return to_utf8(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_utf8_if_string(e) for e in l ]
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s.encode('utf-8'), safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
version = OAUTH_VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None,
body='', is_form_encoded=False):
if url is not None:
self.url = to_unicode(url)
self.method = method
if parameters is not None:
for k, v in parameters.iteritems():
k = to_unicode(k)
v = to_unicode_optional_iterator(v)
self[k] = v
self.body = body
self.is_form_encoded = is_form_encoded
@setter
def url(self, value):
self.__dict__['url'] = value
if value is not None:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme not in ('http', 'https'):
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
# Normalized URL excludes params, query, and fragment.
self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
else:
self.normalized_url = None
self.__dict__['url'] = None
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
d = {}
for k, v in self.iteritems():
d[k.encode('utf-8')] = to_utf8_optional_iterator(v)
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(d, True).replace('+', '%20')
def to_url(self):
"""Serialize as a URL for a GET request."""
base_url = urlparse.urlparse(self.url)
try:
query = base_url.query
except AttributeError:
# must be python <2.5
query = base_url[4]
query = parse_qs(query)
for k, v in self.items():
query.setdefault(k, []).append(v)
try:
scheme = base_url.scheme
netloc = base_url.netloc
path = base_url.path
params = base_url.params
fragment = base_url.fragment
except AttributeError:
# must be python <2.5
scheme = base_url[0]
netloc = base_url[1]
path = base_url[2]
params = base_url[3]
fragment = base_url[5]
url = (scheme, netloc, path, params,
urllib.urlencode(query, True), fragment)
return urlparse.urlunparse(url)
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.iteritems():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, basestring):
items.append((to_utf8_if_string(key), to_utf8(value)))
else:
try:
value = list(value)
except TypeError, e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
else:
items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
# Include any query string parameters from the provided URL
query = urlparse.urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ]
items.extend(url_items)
items.sort()
encoded_str = urllib.urlencode(items)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if not self.is_form_encoded:
# according to
# http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
# section 4.1.1 "OAuth Consumers MUST NOT include an
# oauth_body_hash parameter on requests with form-encoded
# request bodies."
self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest())
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None,
body='', is_form_encoded=False):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.verifier:
parameters['oauth_verifier'] = token.verifier
return Request(http_method, http_url, parameters, body=body,
is_form_encoded=is_form_encoded)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body='', headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded'
if not isinstance(headers, dict):
headers = {}
if method == "POST":
headers['Content-Type'] = headers.get('Content-Type',
DEFAULT_POST_CONTENT_TYPE)
is_form_encoded = \
headers.get('Content-Type') == 'application/x-www-form-urlencoded'
if is_form_encoded and body:
parameters = parse_qs(body)
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer,
token=self.token, http_method=method, http_url=uri,
parameters=parameters, body=body, is_form_encoded=is_form_encoded)
req.sign_request(self.method, self.consumer, self.token)
schema, rest = urllib.splittype(uri)
if rest.startswith('//'):
hierpart = '//'
else:
hierpart = ''
host, rest = urllib.splithost(rest)
realm = schema + ':' + hierpart + host
if is_form_encoded:
body = req.to_postdata()
elif method == "GET":
uri = req.to_url()
else:
headers.update(req.to_header(realm=realm))
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = OAUTH_VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
self._check_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _check_version(self, request):
"""Verify the correct version of the request for this server."""
version = self._get_version(request)
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
def _get_version(self, request):
"""Return the version of the request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = OAUTH_VERSION
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now,
self.timestamp_threshold))
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if not hasattr(request, 'normalized_url') or request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw
================================================
FILE: lib/oauth2/_version.py
================================================
# This is the version of this source code.
manual_verstr = "1.5"
auto_build_num = "211"
verstr = manual_verstr + "." + auto_build_num
try:
from pyutil.version_class import Version as pyutil_Version
__version__ = pyutil_Version(verstr)
except (ImportError, ValueError):
# Maybe there is no pyutil installed.
from distutils.version import LooseVersion as distutils_Version
__version__ = distutils_Version(verstr)
================================================
FILE: lib/oauth2/clients/__init__.py
================================================
================================================
FILE: lib/oauth2/clients/imap.py
================================================
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import oauth2
import imaplib
class IMAP4_SSL(imaplib.IMAP4_SSL):
"""IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH."""
def authenticate(self, url, consumer, token):
if consumer is not None and not isinstance(consumer, oauth2.Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, oauth2.Token):
raise ValueError("Invalid token.")
imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH',
lambda x: oauth2.build_xoauth_string(url, consumer, token))
================================================
FILE: lib/oauth2/clients/smtp.py
================================================
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import oauth2
import smtplib
import base64
class SMTP(smtplib.SMTP):
"""SMTP wrapper for smtplib.SMTP that implements XOAUTH."""
def authenticate(self, url, consumer, token):
if consumer is not None and not isinstance(consumer, oauth2.Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, oauth2.Token):
raise ValueError("Invalid token.")
self.docmd('AUTH', 'XOAUTH %s' % \
base64.b64encode(oauth2.build_xoauth_string(url, consumer, token)))
================================================
FILE: lib/thrift/TSCons.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from os import path
from SCons.Builder import Builder
def scons_env(env, add=''):
opath = path.dirname(path.abspath('$TARGET'))
lstr = 'thrift --gen cpp -o ' + opath + ' ' + add + ' $SOURCE'
cppbuild = Builder(action = lstr)
env.Append(BUILDERS = {'ThriftCpp' : cppbuild})
def gen_cpp(env, dir, file):
scons_env(env)
suffixes = ['_types.h', '_types.cpp']
targets = map(lambda s: 'gen-cpp/' + file + s, suffixes)
return env.ThriftCpp(targets, dir+file+'.thrift')
================================================
FILE: lib/thrift/TSerialization.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from protocol import TBinaryProtocol
from transport import TTransport
def serialize(thrift_object, protocol_factory = TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer()
protocol = protocol_factory.getProtocol(transport)
thrift_object.write(protocol)
return transport.getvalue()
def deserialize(base, buf, protocol_factory = TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer(buf)
protocol = protocol_factory.getProtocol(transport)
base.read(protocol)
return base
================================================
FILE: lib/thrift/Thrift.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
class TType:
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
_VALUES_TO_NAMES = ( 'STOP',
'VOID',
'BOOL',
'BYTE',
'DOUBLE',
None,
'I16',
None,
'I32',
None,
'I64',
'STRING',
'STRUCT',
'MAP',
'SET',
'LIST',
'UTF8',
'UTF16' )
class TMessageType:
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
class TProcessor:
"""Base class for procsessor, which works on two streams."""
def process(iprot, oprot):
pass
class TException(Exception):
"""Base class for all thrift exceptions."""
# BaseException.message is deprecated in Python v[2.6,3.0)
if (2,6,0) <= sys.version_info < (3,0):
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def __init__(self, message=None):
Exception.__init__(self, message)
self.message = message
class TApplicationException(TException):
"""Application level thrift exceptions."""
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
INTERNAL_ERROR = 6
PROTOCOL_ERROR = 7
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
def __str__(self):
if self.message:
return self.message
elif self.type == self.UNKNOWN_METHOD:
return 'Unknown method'
elif self.type == self.INVALID_MESSAGE_TYPE:
return 'Invalid message type'
elif self.type == self.WRONG_METHOD_NAME:
return 'Wrong method name'
elif self.type == self.BAD_SEQUENCE_ID:
return 'Bad sequence ID'
elif self.type == self.MISSING_RESULT:
return 'Missing result'
else:
return 'Default (unknown) TApplicationException'
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
oprot.writeStructBegin('TApplicationException')
if self.message != None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message)
oprot.writeFieldEnd()
if self.type != None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
================================================
FILE: lib/thrift/__init__.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__all__ = ['Thrift', 'TSCons']
================================================
FILE: lib/thrift/protocol/TBase.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import *
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class TBase(object):
__slots__ = []
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__ ]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStruct(self, self.thrift_spec)
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStruct(self, self.thrift_spec)
class TExceptionBase(Exception):
# old style class so python2.4 can raise exceptions derived from this
# This can't inherit from TBase because of that limitation.
__slots__ = []
__repr__ = TBase.__repr__.im_func
__eq__ = TBase.__eq__.im_func
__ne__ = TBase.__ne__.im_func
read = TBase.read.im_func
write = TBase.write.im_func
================================================
FILE: lib/thrift/protocol/TBinaryProtocol.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TProtocol import *
from struct import pack, unpack
class TBinaryProtocol(TProtocolBase):
"""Binary implementation of the Thrift protocol driver."""
# NastyHaxx. Python 2.4+ on 32-bit machines forces hex constants to be
# positive, converting this into a long. If we hardcode the int value
# instead it'll stay in 32 bit-land.
# VERSION_MASK = 0xffff0000
VERSION_MASK = -65536
# VERSION_1 = 0x80010000
VERSION_1 = -2147418112
TYPE_MASK = 0x000000ff
def __init__(self, trans, strictRead=False, strictWrite=True):
TProtocolBase.__init__(self, trans)
self.strictRead = strictRead
self.strictWrite = strictWrite
def writeMessageBegin(self, name, type, seqid):
if self.strictWrite:
self.writeI32(TBinaryProtocol.VERSION_1 | type)
self.writeString(name)
self.writeI32(seqid)
else:
self.writeString(name)
self.writeByte(type)
self.writeI32(seqid)
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeFieldBegin(self, name, type, id):
self.writeByte(type)
self.writeI16(id)
def writeFieldEnd(self):
pass
def writeFieldStop(self):
self.writeByte(TType.STOP);
def writeMapBegin(self, ktype, vtype, size):
self.writeByte(ktype)
self.writeByte(vtype)
self.writeI32(size)
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeSetEnd(self):
pass
def writeBool(self, bool):
if bool:
self.writeByte(1)
else:
self.writeByte(0)
def writeByte(self, byte):
buff = pack("!b", byte)
self.trans.write(buff)
def writeI16(self, i16):
buff = pack("!h", i16)
self.trans.write(buff)
def writeI32(self, i32):
buff = pack("!i", i32)
self.trans.write(buff)
def writeI64(self, i64):
buff = pack("!q", i64)
self.trans.write(buff)
def writeDouble(self, dub):
buff = pack("!d", dub)
self.trans.write(buff)
def writeString(self, str):
self.writeI32(len(str))
self.trans.write(str)
def readMessageBegin(self):
sz = self.readI32()
if sz < 0:
version = sz & TBinaryProtocol.VERSION_MASK
if version != TBinaryProtocol.VERSION_1:
raise TProtocolException(type=TProtocolException.BAD_VERSION, message='Bad version in readMessageBegin: %d' % (sz))
type = sz & TBinaryProtocol.TYPE_MASK
name = self.readString()
seqid = self.readI32()
else:
if self.strictRead:
raise TProtocolException(type=TProtocolException.BAD_VERSION, message='No protocol version header')
name = self.trans.readAll(sz)
type = self.readByte()
seqid = self.readI32()
return (name, type, seqid)
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
type = self.readByte()
if type == TType.STOP:
return (None, type, 0)
id = self.readI16()
return (None, type, id)
def readFieldEnd(self):
pass
def readMapBegin(self):
ktype = self.readByte()
vtype = self.readByte()
size = self.readI32()
return (ktype, vtype, size)
def readMapEnd(self):
pass
def readListBegin(self):
etype = self.readByte()
size = self.readI32()
return (etype, size)
def readListEnd(self):
pass
def readSetBegin(self):
etype = self.readByte()
size = self.readI32()
return (etype, size)
def readSetEnd(self):
pass
def readBool(self):
byte = self.readByte()
if byte == 0:
return False
return True
def readByte(self):
buff = self.trans.readAll(1)
val, = unpack('!b', buff)
return val
def readI16(self):
buff = self.trans.readAll(2)
val, = unpack('!h', buff)
return val
def readI32(self):
buff = self.trans.readAll(4)
val, = unpack('!i', buff)
return val
def readI64(self):
buff = self.trans.readAll(8)
val, = unpack('!q', buff)
return val
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def readString(self):
len = self.readI32()
str = self.trans.readAll(len)
return str
class TBinaryProtocolFactory:
def __init__(self, strictRead=False, strictWrite=True):
self.strictRead = strictRead
self.strictWrite = strictWrite
def getProtocol(self, trans):
prot = TBinaryProtocol(trans, self.strictRead, self.strictWrite)
return prot
class TBinaryProtocolAccelerated(TBinaryProtocol):
"""C-Accelerated version of TBinaryProtocol.
This class does not override any of TBinaryProtocol's methods,
but the generated code recognizes it directly and will call into
our C module to do the encoding, bypassing this object entirely.
We inherit from TBinaryProtocol so that the normal TBinaryProtocol
encoding can happen if the fastbinary module doesn't work for some
reason. (TODO(dreiss): Make this happen sanely in more cases.)
In order to take advantage of the C module, just use
TBinaryProtocolAccelerated instead of TBinaryProtocol.
NOTE: This code was contributed by an external developer.
The internal Thrift team has reviewed and tested it,
but we cannot guarantee that it is production-ready.
Please feel free to report bugs and/or success stories
to the public mailing list.
"""
pass
class TBinaryProtocolAcceleratedFactory:
def getProtocol(self, trans):
return TBinaryProtocolAccelerated(trans)
================================================
FILE: lib/thrift/protocol/TCompactProtocol.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TProtocol import *
from struct import pack, unpack
__all__ = ['TCompactProtocol', 'TCompactProtocolFactory']
CLEAR = 0
FIELD_WRITE = 1
VALUE_WRITE = 2
CONTAINER_WRITE = 3
BOOL_WRITE = 4
FIELD_READ = 5
CONTAINER_READ = 6
VALUE_READ = 7
BOOL_READ = 8
def make_helper(v_from, container):
def helper(func):
def nested(self, *args, **kwargs):
assert self.state in (v_from, container), (self.state, v_from, container)
return func(self, *args, **kwargs)
return nested
return helper
writer = make_helper(VALUE_WRITE, CONTAINER_WRITE)
reader = make_helper(VALUE_READ, CONTAINER_READ)
def makeZigZag(n, bits):
return (n << 1) ^ (n >> (bits - 1))
def fromZigZag(n):
return (n >> 1) ^ -(n & 1)
def writeVarint(trans, n):
out = []
while True:
if n & ~0x7f == 0:
out.append(n)
break
else:
out.append((n & 0xff) | 0x80)
n = n >> 7
trans.write(''.join(map(chr, out)))
def readVarint(trans):
result = 0
shift = 0
while True:
x = trans.readAll(1)
byte = ord(x)
result |= (byte & 0x7f) << shift
if byte >> 7 == 0:
return result
shift += 7
class CompactType:
STOP = 0x00
TRUE = 0x01
FALSE = 0x02
BYTE = 0x03
I16 = 0x04
I32 = 0x05
I64 = 0x06
DOUBLE = 0x07
BINARY = 0x08
LIST = 0x09
SET = 0x0A
MAP = 0x0B
STRUCT = 0x0C
CTYPES = {TType.STOP: CompactType.STOP,
TType.BOOL: CompactType.TRUE, # used for collection
TType.BYTE: CompactType.BYTE,
TType.I16: CompactType.I16,
TType.I32: CompactType.I32,
TType.I64: CompactType.I64,
TType.DOUBLE: CompactType.DOUBLE,
TType.STRING: CompactType.BINARY,
TType.STRUCT: CompactType.STRUCT,
TType.LIST: CompactType.LIST,
TType.SET: CompactType.SET,
TType.MAP: CompactType.MAP
}
TTYPES = {}
for k, v in CTYPES.items():
TTYPES[v] = k
TTYPES[CompactType.FALSE] = TType.BOOL
del k
del v
class TCompactProtocol(TProtocolBase):
"Compact implementation of the Thrift protocol driver."
PROTOCOL_ID = 0x82
VERSION = 1
VERSION_MASK = 0x1f
TYPE_MASK = 0xe0
TYPE_SHIFT_AMOUNT = 5
def __init__(self, trans):
TProtocolBase.__init__(self, trans)
self.state = CLEAR
self.__last_fid = 0
self.__bool_fid = None
self.__bool_value = None
self.__structs = []
self.__containers = []
def __writeVarint(self, n):
writeVarint(self.trans, n)
def writeMessageBegin(self, name, type, seqid):
assert self.state == CLEAR
self.__writeUByte(self.PROTOCOL_ID)
self.__writeUByte(self.VERSION | (type << self.TYPE_SHIFT_AMOUNT))
self.__writeVarint(seqid)
self.__writeString(name)
self.state = VALUE_WRITE
def writeMessageEnd(self):
assert self.state == VALUE_WRITE
self.state = CLEAR
def writeStructBegin(self, name):
assert self.state in (CLEAR, CONTAINER_WRITE, VALUE_WRITE), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_WRITE
self.__last_fid = 0
def writeStructEnd(self):
assert self.state == FIELD_WRITE
self.state, self.__last_fid = self.__structs.pop()
def writeFieldStop(self):
self.__writeByte(0)
def __writeFieldHeader(self, type, fid):
delta = fid - self.__last_fid
if 0 < delta <= 15:
self.__writeUByte(delta << 4 | type)
else:
self.__writeByte(type)
self.__writeI16(fid)
self.__last_fid = fid
def writeFieldBegin(self, name, type, fid):
assert self.state == FIELD_WRITE, self.state
if type == TType.BOOL:
self.state = BOOL_WRITE
self.__bool_fid = fid
else:
self.state = VALUE_WRITE
self.__writeFieldHeader(CTYPES[type], fid)
def writeFieldEnd(self):
assert self.state in (VALUE_WRITE, BOOL_WRITE), self.state
self.state = FIELD_WRITE
def __writeUByte(self, byte):
self.trans.write(pack('!B', byte))
def __writeByte(self, byte):
self.trans.write(pack('!b', byte))
def __writeI16(self, i16):
self.__writeVarint(makeZigZag(i16, 16))
def __writeSize(self, i32):
self.__writeVarint(i32)
def writeCollectionBegin(self, etype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size <= 14:
self.__writeUByte(size << 4 | CTYPES[etype])
else:
self.__writeUByte(0xf0 | CTYPES[etype])
self.__writeSize(size)
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
writeSetBegin = writeCollectionBegin
writeListBegin = writeCollectionBegin
def writeMapBegin(self, ktype, vtype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size == 0:
self.__writeByte(0)
else:
self.__writeSize(size)
self.__writeUByte(CTYPES[ktype] << 4 | CTYPES[vtype])
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
def writeCollectionEnd(self):
assert self.state == CONTAINER_WRITE, self.state
self.state = self.__containers.pop()
writeMapEnd = writeCollectionEnd
writeSetEnd = writeCollectionEnd
writeListEnd = writeCollectionEnd
def writeBool(self, bool):
if self.state == BOOL_WRITE:
if bool:
ctype = CompactType.TRUE
else:
ctype = CompactType.FALSE
self.__writeFieldHeader(ctype, self.__bool_fid)
elif self.state == CONTAINER_WRITE:
if bool:
self.__writeByte(CompactType.TRUE)
else:
self.__writeByte(CompactType.FALSE)
else:
raise AssertionError, "Invalid state in compact protocol"
writeByte = writer(__writeByte)
writeI16 = writer(__writeI16)
@writer
def writeI32(self, i32):
self.__writeVarint(makeZigZag(i32, 32))
@writer
def writeI64(self, i64):
self.__writeVarint(makeZigZag(i64, 64))
@writer
def writeDouble(self, dub):
self.trans.write(pack('!d', dub))
def __writeString(self, s):
self.__writeSize(len(s))
self.trans.write(s)
writeString = writer(__writeString)
def readFieldBegin(self):
assert self.state == FIELD_READ, self.state
type = self.__readUByte()
if type & 0x0f == TType.STOP:
return (None, 0, 0)
delta = type >> 4
if delta == 0:
fid = self.__readI16()
else:
fid = self.__last_fid + delta
self.__last_fid = fid
type = type & 0x0f
if type == CompactType.TRUE:
self.state = BOOL_READ
self.__bool_value = True
elif type == CompactType.FALSE:
self.state = BOOL_READ
self.__bool_value = False
else:
self.state = VALUE_READ
return (None, self.__getTType(type), fid)
def readFieldEnd(self):
assert self.state in (VALUE_READ, BOOL_READ), self.state
self.state = FIELD_READ
def __readUByte(self):
result, = unpack('!B', self.trans.readAll(1))
return result
def __readByte(self):
result, = unpack('!b', self.trans.readAll(1))
return result
def __readVarint(self):
return readVarint(self.trans)
def __readZigZag(self):
return fromZigZag(self.__readVarint())
def __readSize(self):
result = self.__readVarint()
if result < 0:
raise TException("Length < 0")
return result
def readMessageBegin(self):
assert self.state == CLEAR
proto_id = self.__readUByte()
if proto_id != self.PROTOCOL_ID:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad protocol id in the message: %d' % proto_id)
ver_type = self.__readUByte()
type = (ver_type & self.TYPE_MASK) >> self.TYPE_SHIFT_AMOUNT
version = ver_type & self.VERSION_MASK
if version != self.VERSION:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad version: %d (expect %d)' % (version, self.VERSION))
seqid = self.__readVarint()
name = self.__readString()
return (name, type, seqid)
def readMessageEnd(self):
assert self.state == CLEAR
assert len(self.__structs) == 0
def readStructBegin(self):
assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_READ
self.__last_fid = 0
def readStructEnd(self):
assert self.state == FIELD_READ
self.state, self.__last_fid = self.__structs.pop()
def readCollectionBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size_type = self.__readUByte()
size = size_type >> 4
type = self.__getTType(size_type)
if size == 15:
size = self.__readSize()
self.__containers.append(self.state)
self.state = CONTAINER_READ
return type, size
readSetBegin = readCollectionBegin
readListBegin = readCollectionBegin
def readMapBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size = self.__readSize()
types = 0
if size > 0:
types = self.__readUByte()
vtype = self.__getTType(types)
ktype = self.__getTType(types >> 4)
self.__containers.append(self.state)
self.state = CONTAINER_READ
return (ktype, vtype, size)
def readCollectionEnd(self):
assert self.state == CONTAINER_READ, self.state
self.state = self.__containers.pop()
readSetEnd = readCollectionEnd
readListEnd = readCollectionEnd
readMapEnd = readCollectionEnd
def readBool(self):
if self.state == BOOL_READ:
return self.__bool_value == CompactType.TRUE
elif self.state == CONTAINER_READ:
return self.__readByte() == CompactType.TRUE
else:
raise AssertionError, "Invalid state in compact protocol: %d" % self.state
readByte = reader(__readByte)
__readI16 = __readZigZag
readI16 = reader(__readZigZag)
readI32 = reader(__readZigZag)
readI64 = reader(__readZigZag)
@reader
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def __readString(self):
len = self.__readSize()
return self.trans.readAll(len)
readString = reader(__readString)
def __getTType(self, byte):
return TTYPES[byte & 0x0f]
class TCompactProtocolFactory:
def __init__(self):
pass
def getProtocol(self, trans):
return TCompactProtocol(trans)
================================================
FILE: lib/thrift/protocol/TProtocol.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.Thrift import *
class TProtocolException(TException):
"""Custom Protocol Exception class"""
UNKNOWN = 0
INVALID_DATA = 1
NEGATIVE_SIZE = 2
SIZE_LIMIT = 3
BAD_VERSION = 4
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
class TProtocolBase:
"""Base class for Thrift protocol driver."""
def __init__(self, trans):
self.trans = trans
def writeMessageBegin(self, name, type, seqid):
pass
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeFieldBegin(self, name, type, id):
pass
def writeFieldEnd(self):
pass
def writeFieldStop(self):
pass
def writeMapBegin(self, ktype, vtype, size):
pass
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
pass
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
pass
def writeSetEnd(self):
pass
def writeBool(self, bool):
pass
def writeByte(self, byte):
pass
def writeI16(self, i16):
pass
def writeI32(self, i32):
pass
def writeI64(self, i64):
pass
def writeDouble(self, dub):
pass
def writeString(self, str):
pass
def readMessageBegin(self):
pass
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
pass
def readFieldEnd(self):
pass
def readMapBegin(self):
pass
def readMapEnd(self):
pass
def readListBegin(self):
pass
def readListEnd(self):
pass
def readSetBegin(self):
pass
def readSetEnd(self):
pass
def readBool(self):
pass
def readByte(self):
pass
def readI16(self):
pass
def readI32(self):
pass
def readI64(self):
pass
def readDouble(self):
pass
def readString(self):
pass
def skip(self, type):
if type == TType.STOP:
return
elif type == TType.BOOL:
self.readBool()
elif type == TType.BYTE:
self.readByte()
elif type == TType.I16:
self.readI16()
elif type == TType.I32:
self.readI32()
elif type == TType.I64:
self.readI64()
elif type == TType.DOUBLE:
self.readDouble()
elif type == TType.STRING:
self.readString()
elif type == TType.STRUCT:
name = self.readStructBegin()
while True:
(name, type, id) = self.readFieldBegin()
if type == TType.STOP:
break
self.skip(type)
self.readFieldEnd()
self.readStructEnd()
elif type == TType.MAP:
(ktype, vtype, size) = self.readMapBegin()
for i in range(size):
self.skip(ktype)
self.skip(vtype)
self.readMapEnd()
elif type == TType.SET:
(etype, size) = self.readSetBegin()
for i in range(size):
self.skip(etype)
self.readSetEnd()
elif type == TType.LIST:
(etype, size) = self.readListBegin()
for i in range(size):
self.skip(etype)
self.readListEnd()
# tuple of: ( 'reader method' name, is_container boolean, 'writer_method' name )
_TTYPE_HANDLERS = (
(None, None, False), # 0 == TType,STOP
(None, None, False), # 1 == TType.VOID # TODO: handle void?
('readBool', 'writeBool', False), # 2 == TType.BOOL
('readByte', 'writeByte', False), # 3 == TType.BYTE and I08
('readDouble', 'writeDouble', False), # 4 == TType.DOUBLE
(None, None, False), # 5, undefined
('readI16', 'writeI16', False), # 6 == TType.I16
(None, None, False), # 7, undefined
('readI32', 'writeI32', False), # 8 == TType.I32
(None, None, False), # 9, undefined
('readI64', 'writeI64', False), # 10 == TType.I64
('readString', 'writeString', False), # 11 == TType.STRING and UTF7
('readContainerStruct', 'writeContainerStruct', True), # 12 == TType.STRUCT
('readContainerMap', 'writeContainerMap', True), # 13 == TType.MAP
('readContainerSet', 'writeContainerSet', True), # 14 == TType.SET
('readContainerList', 'writeContainerList', True), # 15 == TType.LIST
(None, None, False), # 16 == TType.UTF8 # TODO: handle utf8 types?
(None, None, False)# 17 == TType.UTF16 # TODO: handle utf16 types?
)
def readFieldByTType(self, ttype, spec):
try:
(r_handler, w_handler, is_container) = self._TTYPE_HANDLERS[ttype]
except IndexError:
raise TProtocolException(type=TProtocolException.INVALID_DATA,
message='Invalid field type %d' % (ttype))
if r_handler is None:
raise TProtocolException(type=TProtocolException.INVALID_DATA,
message='Invalid field type %d' % (ttype))
reader = getattr(self, r_handler)
if not is_container:
return reader()
return reader(spec)
def readContainerList(self, spec):
results = []
ttype, tspec = spec[0], spec[1]
r_handler = self._TTYPE_HANDLERS[ttype][0]
reader = getattr(self, r_handler)
(list_type, list_len) = self.readListBegin()
if tspec is None:
# list values are simple types
for idx in xrange(list_len):
results.append(reader())
else:
# this is like an inlined readFieldByTType
container_reader = self._TTYPE_HANDLERS[list_type][0]
val_reader = getattr(self, container_reader)
for idx in xrange(list_len):
val = val_reader(tspec)
results.append(val)
self.readListEnd()
return results
def readContainerSet(self, spec):
results = set()
ttype, tspec = spec[0], spec[1]
r_handler = self._TTYPE_HANDLERS[ttype][0]
reader = getattr(self, r_handler)
(set_type, set_len) = self.readSetBegin()
if tspec is None:
# set members are simple types
for idx in xrange(set_len):
results.add(reader())
else:
container_reader = self._TTYPE_HANDLERS[set_type][0]
val_reader = getattr(self, container_reader)
for idx in xrange(set_len):
results.add(val_reader(tspec))
self.readSetEnd()
return results
def readContainerStruct(self, spec):
(obj_class, obj_spec) = spec
obj = obj_class()
obj.read(self)
return obj
def readContainerMap(self, spec):
results = dict()
key_ttype, key_spec = spec[0], spec[1]
val_ttype, val_spec = spec[2], spec[3]
(map_ktype, map_vtype, map_len) = self.readMapBegin()
# TODO: compare types we just decoded with thrift_spec and abort/skip if types disagree
key_reader = getattr(self, self._TTYPE_HANDLERS[key_ttype][0])
val_reader = getattr(self, self._TTYPE_HANDLERS[val_ttype][0])
# list values are simple types
for idx in xrange(map_len):
if key_spec is None:
k_val = key_reader()
else:
k_val = self.readFieldByTType(key_ttype, key_spec)
if val_spec is None:
v_val = val_reader()
else:
v_val = self.readFieldByTType(val_ttype, val_spec)
# this raises a TypeError with unhashable keys types. i.e. d=dict(); d[[0,1]] = 2 fails
results[k_val] = v_val
self.readMapEnd()
return results
def readStruct(self, obj, thrift_spec):
self.readStructBegin()
while True:
(fname, ftype, fid) = self.readFieldBegin()
if ftype == TType.STOP:
break
try:
field = thrift_spec[fid]
except IndexError:
self.skip(ftype)
else:
if field is not None and ftype == field[1]:
fname = field[2]
fspec = field[3]
val = self.readFieldByTType(ftype, fspec)
setattr(obj, fname, val)
else:
self.skip(ftype)
self.readFieldEnd()
self.readStructEnd()
def writeContainerStruct(self, val, spec):
val.write(self)
def writeContainerList(self, val, spec):
self.writeListBegin(spec[0], len(val))
r_handler, w_handler, is_container = self._TTYPE_HANDLERS[spec[0]]
e_writer = getattr(self, w_handler)
if not is_container:
for elem in val:
e_writer(elem)
else:
for elem in val:
e_writer(elem, spec[1])
self.writeListEnd()
def writeContainerSet(self, val, spec):
self.writeSetBegin(spec[0], len(val))
r_handler, w_handler, is_container = self._TTYPE_HANDLERS[spec[0]]
e_writer = getattr(self, w_handler)
if not is_container:
for elem in val:
e_writer(elem)
else:
for elem in val:
e_writer(elem, spec[1])
self.writeSetEnd()
def writeContainerMap(self, val, spec):
k_type = spec[0]
v_type = spec[2]
ignore, ktype_name, k_is_container = self._TTYPE_HANDLERS[k_type]
ignore, vtype_name, v_is_container = self._TTYPE_HANDLERS[v_type]
k_writer = getattr(self, ktype_name)
v_writer = getattr(self, vtype_name)
self.writeMapBegin(k_type, v_type, len(val))
for m_key, m_val in val.iteritems():
if not k_is_container:
k_writer(m_key)
else:
k_writer(m_key, spec[1])
if not v_is_container:
v_writer(m_val)
else:
v_writer(m_val, spec[3])
self.writeMapEnd()
def writeStruct(self, obj, thrift_spec):
self.writeStructBegin(obj.__class__.__name__)
for field in thrift_spec:
if field is None:
continue
fname = field[2]
val = getattr(obj, fname)
if val is None:
# skip writing out unset fields
continue
fid = field[0]
ftype = field[1]
fspec = field[3]
# get the writer method for this value
self.writeFieldBegin(fname, ftype, fid)
self.writeFieldByTType(ftype, val, fspec)
self.writeFieldEnd()
self.writeFieldStop()
self.writeStructEnd()
def writeFieldByTType(self, ttype, val, spec):
r_handler, w_handler, is_container = self._TTYPE_HANDLERS[ttype]
writer = getattr(self, w_handler)
if is_container:
writer(val, spec)
else:
writer(val)
class TProtocolFactory:
def getProtocol(self, trans):
pass
================================================
FILE: lib/thrift/protocol/__init__.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__all__ = ['TProtocol', 'TBinaryProtocol', 'fastbinary', 'TBase']
================================================
FILE: lib/thrift/protocol/fastbinary.c
================================================
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include
#include "cStringIO.h"
#include
#ifndef _WIN32
# include
# include
#else
# include
# pragma comment (lib, "ws2_32.lib")
# define BIG_ENDIAN (4321)
# define LITTLE_ENDIAN (1234)
# define BYTE_ORDER LITTLE_ENDIAN
# if defined(_MSC_VER) && _MSC_VER < 1600
typedef int _Bool;
# define bool _Bool
# define false 0
# define true 1
# endif
# define inline __inline
#endif
/* Fix endianness issues on Solaris */
#if defined (__SVR4) && defined (__sun)
#if defined(__i386) && !defined(__i386__)
#define __i386__
#endif
#ifndef BIG_ENDIAN
#define BIG_ENDIAN (4321)
#endif
#ifndef LITTLE_ENDIAN
#define LITTLE_ENDIAN (1234)
#endif
/* I386 is LE, even on Solaris */
#if !defined(BYTE_ORDER) && defined(__i386__)
#define BYTE_ORDER LITTLE_ENDIAN
#endif
#endif
// TODO(dreiss): defval appears to be unused. Look into removing it.
// TODO(dreiss): Make parse_spec_args recursive, and cache the output
// permanently in the object. (Malloc and orphan.)
// TODO(dreiss): Why do we need cStringIO for reading, why not just char*?
// Can cStringIO let us work with a BufferedTransport?
// TODO(dreiss): Don't ignore the rv from cwrite (maybe).
/* ====== BEGIN UTILITIES ====== */
#define INIT_OUTBUF_SIZE 128
// Stolen out of TProtocol.h.
// It would be a huge pain to have both get this from one place.
typedef enum TType {
T_STOP = 0,
T_VOID = 1,
T_BOOL = 2,
T_BYTE = 3,
T_I08 = 3,
T_I16 = 6,
T_I32 = 8,
T_U64 = 9,
T_I64 = 10,
T_DOUBLE = 4,
T_STRING = 11,
T_UTF7 = 11,
T_STRUCT = 12,
T_MAP = 13,
T_SET = 14,
T_LIST = 15,
T_UTF8 = 16,
T_UTF16 = 17
} TType;
#ifndef __BYTE_ORDER
# if defined(BYTE_ORDER) && defined(LITTLE_ENDIAN) && defined(BIG_ENDIAN)
# define __BYTE_ORDER BYTE_ORDER
# define __LITTLE_ENDIAN LITTLE_ENDIAN
# define __BIG_ENDIAN BIG_ENDIAN
# else
# error "Cannot determine endianness"
# endif
#endif
// Same comment as the enum. Sorry.
#if __BYTE_ORDER == __BIG_ENDIAN
# define ntohll(n) (n)
# define htonll(n) (n)
#elif __BYTE_ORDER == __LITTLE_ENDIAN
# if defined(__GNUC__) && defined(__GLIBC__)
# include
# define ntohll(n) bswap_64(n)
# define htonll(n) bswap_64(n)
# else /* GNUC & GLIBC */
# define ntohll(n) ( (((unsigned long long)ntohl(n)) << 32) + ntohl(n >> 32) )
# define htonll(n) ( (((unsigned long long)htonl(n)) << 32) + htonl(n >> 32) )
# endif /* GNUC & GLIBC */
#else /* __BYTE_ORDER */
# error "Can't define htonll or ntohll!"
#endif
// Doing a benchmark shows that interning actually makes a difference, amazingly.
#define INTERN_STRING(value) _intern_ ## value
#define INT_CONV_ERROR_OCCURRED(v) ( ((v) == -1) && PyErr_Occurred() )
#define CHECK_RANGE(v, min, max) ( ((v) <= (max)) && ((v) >= (min)) )
// Py_ssize_t was not defined before Python 2.5
#if (PY_VERSION_HEX < 0x02050000)
typedef int Py_ssize_t;
#endif
/**
* A cache of the spec_args for a set or list,
* so we don't have to keep calling PyTuple_GET_ITEM.
*/
typedef struct {
TType element_type;
PyObject* typeargs;
} SetListTypeArgs;
/**
* A cache of the spec_args for a map,
* so we don't have to keep calling PyTuple_GET_ITEM.
*/
typedef struct {
TType ktag;
TType vtag;
PyObject* ktypeargs;
PyObject* vtypeargs;
} MapTypeArgs;
/**
* A cache of the spec_args for a struct,
* so we don't have to keep calling PyTuple_GET_ITEM.
*/
typedef struct {
PyObject* klass;
PyObject* spec;
} StructTypeArgs;
/**
* A cache of the item spec from a struct specification,
* so we don't have to keep calling PyTuple_GET_ITEM.
*/
typedef struct {
int tag;
TType type;
PyObject* attrname;
PyObject* typeargs;
PyObject* defval;
} StructItemSpec;
/**
* A cache of the two key attributes of a CReadableTransport,
* so we don't have to keep calling PyObject_GetAttr.
*/
typedef struct {
PyObject* stringiobuf;
PyObject* refill_callable;
} DecodeBuffer;
/** Pointer to interned string to speed up attribute lookup. */
static PyObject* INTERN_STRING(cstringio_buf);
/** Pointer to interned string to speed up attribute lookup. */
static PyObject* INTERN_STRING(cstringio_refill);
static inline bool
check_ssize_t_32(Py_ssize_t len) {
// error from getting the int
if (INT_CONV_ERROR_OCCURRED(len)) {
return false;
}
if (!CHECK_RANGE(len, 0, INT32_MAX)) {
PyErr_SetString(PyExc_OverflowError, "string size out of range");
return false;
}
return true;
}
static inline bool
parse_pyint(PyObject* o, int32_t* ret, int32_t min, int32_t max) {
long val = PyInt_AsLong(o);
if (INT_CONV_ERROR_OCCURRED(val)) {
return false;
}
if (!CHECK_RANGE(val, min, max)) {
PyErr_SetString(PyExc_OverflowError, "int out of range");
return false;
}
*ret = (int32_t) val;
return true;
}
/* --- FUNCTIONS TO PARSE STRUCT SPECIFICATOINS --- */
static bool
parse_set_list_args(SetListTypeArgs* dest, PyObject* typeargs) {
if (PyTuple_Size(typeargs) != 2) {
PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for list/set type args");
return false;
}
dest->element_type = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0));
if (INT_CONV_ERROR_OCCURRED(dest->element_type)) {
return false;
}
dest->typeargs = PyTuple_GET_ITEM(typeargs, 1);
return true;
}
static bool
parse_map_args(MapTypeArgs* dest, PyObject* typeargs) {
if (PyTuple_Size(typeargs) != 4) {
PyErr_SetString(PyExc_TypeError, "expecting 4 arguments for typeargs to map");
return false;
}
dest->ktag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0));
if (INT_CONV_ERROR_OCCURRED(dest->ktag)) {
return false;
}
dest->vtag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 2));
if (INT_CONV_ERROR_OCCURRED(dest->vtag)) {
return false;
}
dest->ktypeargs = PyTuple_GET_ITEM(typeargs, 1);
dest->vtypeargs = PyTuple_GET_ITEM(typeargs, 3);
return true;
}
static bool
parse_struct_args(StructTypeArgs* dest, PyObject* typeargs) {
if (PyTuple_Size(typeargs) != 2) {
PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for struct args");
return false;
}
dest->klass = PyTuple_GET_ITEM(typeargs, 0);
dest->spec = PyTuple_GET_ITEM(typeargs, 1);
return true;
}
static int
parse_struct_item_spec(StructItemSpec* dest, PyObject* spec_tuple) {
// i'd like to use ParseArgs here, but it seems to be a bottleneck.
if (PyTuple_Size(spec_tuple) != 5) {
PyErr_SetString(PyExc_TypeError, "expecting 5 arguments for spec tuple");
return false;
}
dest->tag = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 0));
if (INT_CONV_ERROR_OCCURRED(dest->tag)) {
return false;
}
dest->type = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 1));
if (INT_CONV_ERROR_OCCURRED(dest->type)) {
return false;
}
dest->attrname = PyTuple_GET_ITEM(spec_tuple, 2);
dest->typeargs = PyTuple_GET_ITEM(spec_tuple, 3);
dest->defval = PyTuple_GET_ITEM(spec_tuple, 4);
return true;
}
/* ====== END UTILITIES ====== */
/* ====== BEGIN WRITING FUNCTIONS ====== */
/* --- LOW-LEVEL WRITING FUNCTIONS --- */
static void writeByte(PyObject* outbuf, int8_t val) {
int8_t net = val;
PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int8_t));
}
static void writeI16(PyObject* outbuf, int16_t val) {
int16_t net = (int16_t)htons(val);
PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int16_t));
}
static void writeI32(PyObject* outbuf, int32_t val) {
int32_t net = (int32_t)htonl(val);
PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int32_t));
}
static void writeI64(PyObject* outbuf, int64_t val) {
int64_t net = (int64_t)htonll(val);
PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int64_t));
}
static void writeDouble(PyObject* outbuf, double dub) {
// Unfortunately, bitwise_cast doesn't work in C. Bad C!
union {
double f;
int64_t t;
} transfer;
transfer.f = dub;
writeI64(outbuf, transfer.t);
}
/* --- MAIN RECURSIVE OUTPUT FUCNTION -- */
static int
output_val(PyObject* output, PyObject* value, TType type, PyObject* typeargs) {
/*
* Refcounting Strategy:
*
* We assume that elements of the thrift_spec tuple are not going to be
* mutated, so we don't ref count those at all. Other than that, we try to
* keep a reference to all the user-created objects while we work with them.
* output_val assumes that a reference is already held. The *caller* is
* responsible for handling references
*/
switch (type) {
case T_BOOL: {
int v = PyObject_IsTrue(value);
if (v == -1) {
return false;
}
writeByte(output, (int8_t) v);
break;
}
case T_I08: {
int32_t val;
if (!parse_pyint(value, &val, INT8_MIN, INT8_MAX)) {
return false;
}
writeByte(output, (int8_t) val);
break;
}
case T_I16: {
int32_t val;
if (!parse_pyint(value, &val, INT16_MIN, INT16_MAX)) {
return false;
}
writeI16(output, (int16_t) val);
break;
}
case T_I32: {
int32_t val;
if (!parse_pyint(value, &val, INT32_MIN, INT32_MAX)) {
return false;
}
writeI32(output, val);
break;
}
case T_I64: {
int64_t nval = PyLong_AsLongLong(value);
if (INT_CONV_ERROR_OCCURRED(nval)) {
return false;
}
if (!CHECK_RANGE(nval, INT64_MIN, INT64_MAX)) {
PyErr_SetString(PyExc_OverflowError, "int out of range");
return false;
}
writeI64(output, nval);
break;
}
case T_DOUBLE: {
double nval = PyFloat_AsDouble(value);
if (nval == -1.0 && PyErr_Occurred()) {
return false;
}
writeDouble(output, nval);
break;
}
case T_STRING: {
Py_ssize_t len = PyString_Size(value);
if (!check_ssize_t_32(len)) {
return false;
}
writeI32(output, (int32_t) len);
PycStringIO->cwrite(output, PyString_AsString(value), (int32_t) len);
break;
}
case T_LIST:
case T_SET: {
Py_ssize_t len;
SetListTypeArgs parsedargs;
PyObject *item;
PyObject *iterator;
if (!parse_set_list_args(&parsedargs, typeargs)) {
return false;
}
len = PyObject_Length(value);
if (!check_ssize_t_32(len)) {
return false;
}
writeByte(output, parsedargs.element_type);
writeI32(output, (int32_t) len);
iterator = PyObject_GetIter(value);
if (iterator == NULL) {
return false;
}
while ((item = PyIter_Next(iterator))) {
if (!output_val(output, item, parsedargs.element_type, parsedargs.typeargs)) {
Py_DECREF(item);
Py_DECREF(iterator);
return false;
}
Py_DECREF(item);
}
Py_DECREF(iterator);
if (PyErr_Occurred()) {
return false;
}
break;
}
case T_MAP: {
PyObject *k, *v;
Py_ssize_t pos = 0;
Py_ssize_t len;
MapTypeArgs parsedargs;
len = PyDict_Size(value);
if (!check_ssize_t_32(len)) {
return false;
}
if (!parse_map_args(&parsedargs, typeargs)) {
return false;
}
writeByte(output, parsedargs.ktag);
writeByte(output, parsedargs.vtag);
writeI32(output, len);
// TODO(bmaurer): should support any mapping, not just dicts
while (PyDict_Next(value, &pos, &k, &v)) {
// TODO(dreiss): Think hard about whether these INCREFs actually
// turn any unsafe scenarios into safe scenarios.
Py_INCREF(k);
Py_INCREF(v);
if (!output_val(output, k, parsedargs.ktag, parsedargs.ktypeargs)
|| !output_val(output, v, parsedargs.vtag, parsedargs.vtypeargs)) {
Py_DECREF(k);
Py_DECREF(v);
return false;
}
Py_DECREF(k);
Py_DECREF(v);
}
break;
}
// TODO(dreiss): Consider breaking this out as a function
// the way we did for decode_struct.
case T_STRUCT: {
StructTypeArgs parsedargs;
Py_ssize_t nspec;
Py_ssize_t i;
if (!parse_struct_args(&parsedargs, typeargs)) {
return false;
}
nspec = PyTuple_Size(parsedargs.spec);
if (nspec == -1) {
return false;
}
for (i = 0; i < nspec; i++) {
StructItemSpec parsedspec;
PyObject* spec_tuple;
PyObject* instval = NULL;
spec_tuple = PyTuple_GET_ITEM(parsedargs.spec, i);
if (spec_tuple == Py_None) {
continue;
}
if (!parse_struct_item_spec (&parsedspec, spec_tuple)) {
return false;
}
instval = PyObject_GetAttr(value, parsedspec.attrname);
if (!instval) {
return false;
}
if (instval == Py_None) {
Py_DECREF(instval);
continue;
}
writeByte(output, (int8_t) parsedspec.type);
writeI16(output, parsedspec.tag);
if (!output_val(output, instval, parsedspec.type, parsedspec.typeargs)) {
Py_DECREF(instval);
return false;
}
Py_DECREF(instval);
}
writeByte(output, (int8_t)T_STOP);
break;
}
case T_STOP:
case T_VOID:
case T_UTF16:
case T_UTF8:
case T_U64:
default:
PyErr_SetString(PyExc_TypeError, "Unexpected TType");
return false;
}
return true;
}
/* --- TOP-LEVEL WRAPPER FOR OUTPUT -- */
static PyObject *
encode_binary(PyObject *self, PyObject *args) {
PyObject* enc_obj;
PyObject* type_args;
PyObject* buf;
PyObject* ret = NULL;
if (!PyArg_ParseTuple(args, "OO", &enc_obj, &type_args)) {
return NULL;
}
buf = PycStringIO->NewOutput(INIT_OUTBUF_SIZE);
if (output_val(buf, enc_obj, T_STRUCT, type_args)) {
ret = PycStringIO->cgetvalue(buf);
}
Py_DECREF(buf);
return ret;
}
/* ====== END WRITING FUNCTIONS ====== */
/* ====== BEGIN READING FUNCTIONS ====== */
/* --- LOW-LEVEL READING FUNCTIONS --- */
static void
free_decodebuf(DecodeBuffer* d) {
Py_XDECREF(d->stringiobuf);
Py_XDECREF(d->refill_callable);
}
static bool
decode_buffer_from_obj(DecodeBuffer* dest, PyObject* obj) {
dest->stringiobuf = PyObject_GetAttr(obj, INTERN_STRING(cstringio_buf));
if (!dest->stringiobuf) {
return false;
}
if (!PycStringIO_InputCheck(dest->stringiobuf)) {
free_decodebuf(dest);
PyErr_SetString(PyExc_TypeError, "expecting stringio input");
return false;
}
dest->refill_callable = PyObject_GetAttr(obj, INTERN_STRING(cstringio_refill));
if(!dest->refill_callable) {
free_decodebuf(dest);
return false;
}
if (!PyCallable_Check(dest->refill_callable)) {
free_decodebuf(dest);
PyErr_SetString(PyExc_TypeError, "expecting callable");
return false;
}
return true;
}
static bool readBytes(DecodeBuffer* input, char** output, int len) {
int read;
// TODO(dreiss): Don't fear the malloc. Think about taking a copy of
// the partial read instead of forcing the transport
// to prepend it to its buffer.
read = PycStringIO->cread(input->stringiobuf, output, len);
if (read == len) {
return true;
} else if (read == -1) {
return false;
} else {
PyObject* newiobuf;
// using building functions as this is a rare codepath
newiobuf = PyObject_CallFunction(
input->refill_callable, "s#i", *output, read, len, NULL);
if (newiobuf == NULL) {
return false;
}
// must do this *AFTER* the call so that we don't deref the io buffer
Py_CLEAR(input->stringiobuf);
input->stringiobuf = newiobuf;
read = PycStringIO->cread(input->stringiobuf, output, len);
if (read == len) {
return true;
} else if (read == -1) {
return false;
} else {
// TODO(dreiss): This could be a valid code path for big binary blobs.
PyErr_SetString(PyExc_TypeError,
"refill claimed to have refilled the buffer, but didn't!!");
return false;
}
}
}
static int8_t readByte(DecodeBuffer* input) {
char* buf;
if (!readBytes(input, &buf, sizeof(int8_t))) {
return -1;
}
return *(int8_t*) buf;
}
static int16_t readI16(DecodeBuffer* input) {
char* buf;
if (!readBytes(input, &buf, sizeof(int16_t))) {
return -1;
}
return (int16_t) ntohs(*(int16_t*) buf);
}
static int32_t readI32(DecodeBuffer* input) {
char* buf;
if (!readBytes(input, &buf, sizeof(int32_t))) {
return -1;
}
return (int32_t) ntohl(*(int32_t*) buf);
}
static int64_t readI64(DecodeBuffer* input) {
char* buf;
if (!readBytes(input, &buf, sizeof(int64_t))) {
return -1;
}
return (int64_t) ntohll(*(int64_t*) buf);
}
static double readDouble(DecodeBuffer* input) {
union {
int64_t f;
double t;
} transfer;
transfer.f = readI64(input);
if (transfer.f == -1) {
return -1;
}
return transfer.t;
}
static bool
checkTypeByte(DecodeBuffer* input, TType expected) {
TType got = readByte(input);
if (INT_CONV_ERROR_OCCURRED(got)) {
return false;
}
if (expected != got) {
PyErr_SetString(PyExc_TypeError, "got wrong ttype while reading field");
return false;
}
return true;
}
static bool
skip(DecodeBuffer* input, TType type) {
#define SKIPBYTES(n) \
do { \
if (!readBytes(input, &dummy_buf, (n))) { \
return false; \
} \
} while(0)
char* dummy_buf;
switch (type) {
case T_BOOL:
case T_I08: SKIPBYTES(1); break;
case T_I16: SKIPBYTES(2); break;
case T_I32: SKIPBYTES(4); break;
case T_I64:
case T_DOUBLE: SKIPBYTES(8); break;
case T_STRING: {
// TODO(dreiss): Find out if these check_ssize_t32s are really necessary.
int len = readI32(input);
if (!check_ssize_t_32(len)) {
return false;
}
SKIPBYTES(len);
break;
}
case T_LIST:
case T_SET: {
TType etype;
int len, i;
etype = readByte(input);
if (etype == -1) {
return false;
}
len = readI32(input);
if (!check_ssize_t_32(len)) {
return false;
}
for (i = 0; i < len; i++) {
if (!skip(input, etype)) {
return false;
}
}
break;
}
case T_MAP: {
TType ktype, vtype;
int len, i;
ktype = readByte(input);
if (ktype == -1) {
return false;
}
vtype = readByte(input);
if (vtype == -1) {
return false;
}
len = readI32(input);
if (!check_ssize_t_32(len)) {
return false;
}
for (i = 0; i < len; i++) {
if (!(skip(input, ktype) && skip(input, vtype))) {
return false;
}
}
break;
}
case T_STRUCT: {
while (true) {
TType type;
type = readByte(input);
if (type == -1) {
return false;
}
if (type == T_STOP)
break;
SKIPBYTES(2); // tag
if (!skip(input, type)) {
return false;
}
}
break;
}
case T_STOP:
case T_VOID:
case T_UTF16:
case T_UTF8:
case T_U64:
default:
PyErr_SetString(PyExc_TypeError, "Unexpected TType");
return false;
}
return true;
#undef SKIPBYTES
}
/* --- HELPER FUNCTION FOR DECODE_VAL --- */
static PyObject*
decode_val(DecodeBuffer* input, TType type, PyObject* typeargs);
static bool
decode_struct(DecodeBuffer* input, PyObject* output, PyObject* spec_seq) {
int spec_seq_len = PyTuple_Size(spec_seq);
if (spec_seq_len == -1) {
return false;
}
while (true) {
TType type;
int16_t tag;
PyObject* item_spec;
PyObject* fieldval = NULL;
StructItemSpec parsedspec;
type = readByte(input);
if (type == -1) {
return false;
}
if (type == T_STOP) {
break;
}
tag = readI16(input);
if (INT_CONV_ERROR_OCCURRED(tag)) {
return false;
}
if (tag >= 0 && tag < spec_seq_len) {
item_spec = PyTuple_GET_ITEM(spec_seq, tag);
} else {
item_spec = Py_None;
}
if (item_spec == Py_None) {
if (!skip(input, type)) {
return false;
} else {
continue;
}
}
if (!parse_struct_item_spec(&parsedspec, item_spec)) {
return false;
}
if (parsedspec.type != type) {
if (!skip(input, type)) {
PyErr_SetString(PyExc_TypeError, "struct field had wrong type while reading and can't be skipped");
return false;
} else {
continue;
}
}
fieldval = decode_val(input, parsedspec.type, parsedspec.typeargs);
if (fieldval == NULL) {
return false;
}
if (PyObject_SetAttr(output, parsedspec.attrname, fieldval) == -1) {
Py_DECREF(fieldval);
return false;
}
Py_DECREF(fieldval);
}
return true;
}
/* --- MAIN RECURSIVE INPUT FUCNTION --- */
// Returns a new reference.
static PyObject*
decode_val(DecodeBuffer* input, TType type, PyObject* typeargs) {
switch (type) {
case T_BOOL: {
int8_t v = readByte(input);
if (INT_CONV_ERROR_OCCURRED(v)) {
return NULL;
}
switch (v) {
case 0: Py_RETURN_FALSE;
case 1: Py_RETURN_TRUE;
// Don't laugh. This is a potentially serious issue.
default: PyErr_SetString(PyExc_TypeError, "boolean out of range"); return NULL;
}
break;
}
case T_I08: {
int8_t v = readByte(input);
if (INT_CONV_ERROR_OCCURRED(v)) {
return NULL;
}
return PyInt_FromLong(v);
}
case T_I16: {
int16_t v = readI16(input);
if (INT_CONV_ERROR_OCCURRED(v)) {
return NULL;
}
return PyInt_FromLong(v);
}
case T_I32: {
int32_t v = readI32(input);
if (INT_CONV_ERROR_OCCURRED(v)) {
return NULL;
}
return PyInt_FromLong(v);
}
case T_I64: {
int64_t v = readI64(input);
if (INT_CONV_ERROR_OCCURRED(v)) {
return NULL;
}
// TODO(dreiss): Find out if we can take this fastpath always when
// sizeof(long) == sizeof(long long).
if (CHECK_RANGE(v, LONG_MIN, LONG_MAX)) {
return PyInt_FromLong((long) v);
}
return PyLong_FromLongLong(v);
}
case T_DOUBLE: {
double v = readDouble(input);
if (v == -1.0 && PyErr_Occurred()) {
return false;
}
return PyFloat_FromDouble(v);
}
case T_STRING: {
Py_ssize_t len = readI32(input);
char* buf;
if (!readBytes(input, &buf, len)) {
return NULL;
}
return PyString_FromStringAndSize(buf, len);
}
case T_LIST:
case T_SET: {
SetListTypeArgs parsedargs;
int32_t len;
PyObject* ret = NULL;
int i;
if (!parse_set_list_args(&parsedargs, typeargs)) {
return NULL;
}
if (!checkTypeByte(input, parsedargs.element_type)) {
return NULL;
}
len = readI32(input);
if (!check_ssize_t_32(len)) {
return NULL;
}
ret = PyList_New(len);
if (!ret) {
return NULL;
}
for (i = 0; i < len; i++) {
PyObject* item = decode_val(input, parsedargs.element_type, parsedargs.typeargs);
if (!item) {
Py_DECREF(ret);
return NULL;
}
PyList_SET_ITEM(ret, i, item);
}
// TODO(dreiss): Consider biting the bullet and making two separate cases
// for list and set, avoiding this post facto conversion.
if (type == T_SET) {
PyObject* setret;
#if (PY_VERSION_HEX < 0x02050000)
// hack needed for older versions
setret = PyObject_CallFunctionObjArgs((PyObject*)&PySet_Type, ret, NULL);
#else
// official version
setret = PySet_New(ret);
#endif
Py_DECREF(ret);
return setret;
}
return ret;
}
case T_MAP: {
int32_t len;
int i;
MapTypeArgs parsedargs;
PyObject* ret = NULL;
if (!parse_map_args(&parsedargs, typeargs)) {
return NULL;
}
if (!checkTypeByte(input, parsedargs.ktag)) {
return NULL;
}
if (!checkTypeByte(input, parsedargs.vtag)) {
return NULL;
}
len = readI32(input);
if (!check_ssize_t_32(len)) {
return false;
}
ret = PyDict_New();
if (!ret) {
goto error;
}
for (i = 0; i < len; i++) {
PyObject* k = NULL;
PyObject* v = NULL;
k = decode_val(input, parsedargs.ktag, parsedargs.ktypeargs);
if (k == NULL) {
goto loop_error;
}
v = decode_val(input, parsedargs.vtag, parsedargs.vtypeargs);
if (v == NULL) {
goto loop_error;
}
if (PyDict_SetItem(ret, k, v) == -1) {
goto loop_error;
}
Py_DECREF(k);
Py_DECREF(v);
continue;
// Yuck! Destructors, anyone?
loop_error:
Py_XDECREF(k);
Py_XDECREF(v);
goto error;
}
return ret;
error:
Py_XDECREF(ret);
return NULL;
}
case T_STRUCT: {
StructTypeArgs parsedargs;
PyObject* ret;
if (!parse_struct_args(&parsedargs, typeargs)) {
return NULL;
}
ret = PyObject_CallObject(parsedargs.klass, NULL);
if (!ret) {
return NULL;
}
if (!decode_struct(input, ret, parsedargs.spec)) {
Py_DECREF(ret);
return NULL;
}
return ret;
}
case T_STOP:
case T_VOID:
case T_UTF16:
case T_UTF8:
case T_U64:
default:
PyErr_SetString(PyExc_TypeError, "Unexpected TType");
return NULL;
}
}
/* --- TOP-LEVEL WRAPPER FOR INPUT -- */
static PyObject*
decode_binary(PyObject *self, PyObject *args) {
PyObject* output_obj = NULL;
PyObject* transport = NULL;
PyObject* typeargs = NULL;
StructTypeArgs parsedargs;
DecodeBuffer input = {0, 0};
if (!PyArg_ParseTuple(args, "OOO", &output_obj, &transport, &typeargs)) {
return NULL;
}
if (!parse_struct_args(&parsedargs, typeargs)) {
return NULL;
}
if (!decode_buffer_from_obj(&input, transport)) {
return NULL;
}
if (!decode_struct(&input, output_obj, parsedargs.spec)) {
free_decodebuf(&input);
return NULL;
}
free_decodebuf(&input);
Py_RETURN_NONE;
}
/* ====== END READING FUNCTIONS ====== */
/* -- PYTHON MODULE SETUP STUFF --- */
static PyMethodDef ThriftFastBinaryMethods[] = {
{"encode_binary", encode_binary, METH_VARARGS, ""},
{"decode_binary", decode_binary, METH_VARARGS, ""},
{NULL, NULL, 0, NULL} /* Sentinel */
};
PyMODINIT_FUNC
initfastbinary(void) {
#define INIT_INTERN_STRING(value) \
do { \
INTERN_STRING(value) = PyString_InternFromString(#value); \
if(!INTERN_STRING(value)) return; \
} while(0)
INIT_INTERN_STRING(cstringio_buf);
INIT_INTERN_STRING(cstringio_refill);
#undef INIT_INTERN_STRING
PycString_IMPORT;
if (PycStringIO == NULL) return;
(void) Py_InitModule("thrift.protocol.fastbinary", ThriftFastBinaryMethods);
}
================================================
FILE: lib/thrift/server/THttpServer.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import BaseHTTPServer
from thrift.server import TServer
from thrift.transport import TTransport
class ResponseException(Exception):
"""Allows handlers to override the HTTP response
Normally, THttpServer always sends a 200 response. If a handler wants
to override this behavior (e.g., to simulate a misconfigured or
overloaded web server during testing), it can raise a ResponseException.
The function passed to the constructor will be called with the
RequestHandler as its only argument.
"""
def __init__(self, handler):
self.handler = handler
class THttpServer(TServer.TServer):
"""A simple HTTP-based Thrift server
This class is not very performant, but it is useful (for example) for
acting as a mock version of an Apache-based PHP Thrift endpoint."""
def __init__(self, processor, server_address,
inputProtocolFactory, outputProtocolFactory = None,
server_class = BaseHTTPServer.HTTPServer):
"""Set up protocol factories and HTTP server.
See BaseHTTPServer for server_address.
See TServer for protocol factories."""
if outputProtocolFactory is None:
outputProtocolFactory = inputProtocolFactory
TServer.TServer.__init__(self, processor, None, None, None,
inputProtocolFactory, outputProtocolFactory)
thttpserver = self
class RequestHander(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
# Don't care about the request path.
itrans = TTransport.TFileObjectTransport(self.rfile)
otrans = TTransport.TFileObjectTransport(self.wfile)
itrans = TTransport.TBufferedTransport(itrans, int(self.headers['Content-Length']))
otrans = TTransport.TMemoryBuffer()
iprot = thttpserver.inputProtocolFactory.getProtocol(itrans)
oprot = thttpserver.outputProtocolFactory.getProtocol(otrans)
try:
thttpserver.processor.process(iprot, oprot)
except ResponseException, exn:
exn.handler(self)
else:
self.send_response(200)
self.send_header("content-type", "application/x-thrift")
self.end_headers()
self.wfile.write(otrans.getvalue())
self.httpd = server_class(server_address, RequestHander)
def serve(self):
self.httpd.serve_forever()
================================================
FILE: lib/thrift/server/TNonblockingServer.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""Implementation of non-blocking server.
The main idea of the server is reciving and sending requests
only from main thread.
It also makes thread pool server in tasks terms, not connections.
"""
import threading
import socket
import Queue
import select
import struct
import logging
from thrift.transport import TTransport
from thrift.protocol.TBinaryProtocol import TBinaryProtocolFactory
__all__ = ['TNonblockingServer']
class Worker(threading.Thread):
"""Worker is a small helper to process incoming connection."""
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
"""Process queries from task queue, stop if processor is None."""
while True:
try:
processor, iprot, oprot, otrans, callback = self.queue.get()
if processor is None:
break
processor.process(iprot, oprot)
callback(True, otrans.getvalue())
except Exception:
logging.exception("Exception while processing request")
callback(False, '')
WAIT_LEN = 0
WAIT_MESSAGE = 1
WAIT_PROCESS = 2
SEND_ANSWER = 3
CLOSED = 4
def locked(func):
"Decorator which locks self.lock."
def nested(self, *args, **kwargs):
self.lock.acquire()
try:
return func(self, *args, **kwargs)
finally:
self.lock.release()
return nested
def socket_exception(func):
"Decorator close object on socket.error."
def read(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except socket.error:
self.close()
return read
class Connection:
"""Basic class is represented connection.
It can be in state:
WAIT_LEN --- connection is reading request len.
WAIT_MESSAGE --- connection is reading request.
WAIT_PROCESS --- connection has just read whole request and
waits for call ready routine.
SEND_ANSWER --- connection is sending answer string (including length
of answer).
CLOSED --- socket was closed and connection should be deleted.
"""
def __init__(self, new_socket, wake_up):
self.socket = new_socket
self.socket.setblocking(False)
self.status = WAIT_LEN
self.len = 0
self.message = ''
self.lock = threading.Lock()
self.wake_up = wake_up
def _read_len(self):
"""Reads length of request.
It's really paranoic routine and it may be replaced by
self.socket.recv(4)."""
read = self.socket.recv(4 - len(self.message))
if len(read) == 0:
# if we read 0 bytes and self.message is empty, it means client close
# connection
if len(self.message) != 0:
logging.error("can't read frame size from socket")
self.close()
return
self.message += read
if len(self.message) == 4:
self.len, = struct.unpack('!i', self.message)
if self.len < 0:
logging.error("negative frame size, it seems client"\
" doesn't use FramedTransport")
self.close()
elif self.len == 0:
logging.error("empty frame, it's really strange")
self.close()
else:
self.message = ''
self.status = WAIT_MESSAGE
@socket_exception
def read(self):
"""Reads data from stream and switch state."""
assert self.status in (WAIT_LEN, WAIT_MESSAGE)
if self.status == WAIT_LEN:
self._read_len()
# go back to the main loop here for simplicity instead of
# falling through, even though there is a good chance that
# the message is already available
elif self.status == WAIT_MESSAGE:
read = self.socket.recv(self.len - len(self.message))
if len(read) == 0:
logging.error("can't read frame from socket (get %d of %d bytes)" %
(len(self.message), self.len))
self.close()
return
self.message += read
if len(self.message) == self.len:
self.status = WAIT_PROCESS
@socket_exception
def write(self):
"""Writes data from socket and switch state."""
assert self.status == SEND_ANSWER
sent = self.socket.send(self.message)
if sent == len(self.message):
self.status = WAIT_LEN
self.message = ''
self.len = 0
else:
self.message = self.message[sent:]
@locked
def ready(self, all_ok, message):
"""Callback function for switching state and waking up main thread.
This function is the only function witch can be called asynchronous.
The ready can switch Connection to three states:
WAIT_LEN if request was oneway.
SEND_ANSWER if request was processed in normal way.
CLOSED if request throws unexpected exception.
The one wakes up main thread.
"""
assert self.status == WAIT_PROCESS
if not all_ok:
self.close()
self.wake_up()
return
self.len = ''
if len(message) == 0:
# it was a oneway request, do not write answer
self.message = ''
self.status = WAIT_LEN
else:
self.message = struct.pack('!i', len(message)) + message
self.status = SEND_ANSWER
self.wake_up()
@locked
def is_writeable(self):
"Returns True if connection should be added to write list of select."
return self.status == SEND_ANSWER
# it's not necessary, but...
@locked
def is_readable(self):
"Returns True if connection should be added to read list of select."
return self.status in (WAIT_LEN, WAIT_MESSAGE)
@locked
def is_closed(self):
"Returns True if connection is closed."
return self.status == CLOSED
def fileno(self):
"Returns the file descriptor of the associated socket."
return self.socket.fileno()
def close(self):
"Closes connection"
self.status = CLOSED
self.socket.close()
class TNonblockingServer:
"""Non-blocking server."""
def __init__(self, processor, lsocket, inputProtocolFactory=None,
outputProtocolFactory=None, threads=10):
self.processor = processor
self.socket = lsocket
self.in_protocol = inputProtocolFactory or TBinaryProtocolFactory()
self.out_protocol = outputProtocolFactory or self.in_protocol
self.threads = int(threads)
self.clients = {}
self.tasks = Queue.Queue()
self._read, self._write = socket.socketpair()
self.prepared = False
def setNumThreads(self, num):
"""Set the number of worker threads that should be created."""
# implement ThreadPool interface
assert not self.prepared, "You can't change number of threads for working server"
self.threads = num
def prepare(self):
"""Prepares server for serve requests."""
self.socket.listen()
for _ in xrange(self.threads):
thread = Worker(self.tasks)
thread.setDaemon(True)
thread.start()
self.prepared = True
def wake_up(self):
"""Wake up main thread.
The server usualy waits in select call in we should terminate one.
The simplest way is using socketpair.
Select always wait to read from the first socket of socketpair.
In this case, we can just write anything to the second socket from
socketpair."""
self._write.send('1')
def _select(self):
"""Does select on open connections."""
readable = [self.socket.handle.fileno(), self._read.fileno()]
writable = []
for i, connection in self.clients.items():
if connection.is_readable():
readable.append(connection.fileno())
if connection.is_writeable():
writable.append(connection.fileno())
if connection.is_closed():
del self.clients[i]
return select.select(readable, writable, readable)
def handle(self):
"""Handle requests.
WARNING! You must call prepare BEFORE calling handle.
"""
assert self.prepared, "You have to call prepare before handle"
rset, wset, xset = self._select()
for readable in rset:
if readable == self._read.fileno():
# don't care i just need to clean readable flag
self._read.recv(1024)
elif readable == self.socket.handle.fileno():
client = self.socket.accept().handle
self.clients[client.fileno()] = Connection(client, self.wake_up)
else:
connection = self.clients[readable]
connection.read()
if connection.status == WAIT_PROCESS:
itransport = TTransport.TMemoryBuffer(connection.message)
otransport = TTransport.TMemoryBuffer()
iprot = self.in_protocol.getProtocol(itransport)
oprot = self.out_protocol.getProtocol(otransport)
self.tasks.put([self.processor, iprot, oprot,
otransport, connection.ready])
for writeable in wset:
self.clients[writeable].write()
for oob in xset:
self.clients[oob].close()
del self.clients[oob]
def close(self):
"""Closes the server."""
for _ in xrange(self.threads):
self.tasks.put([None, None, None, None, None])
self.socket.close()
self.prepared = False
def serve(self):
"""Serve forever."""
self.prepare()
while True:
self.handle()
================================================
FILE: lib/thrift/server/TProcessPoolServer.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
from multiprocessing import Process, Value, Condition, reduction
from TServer import TServer
from thrift.transport.TTransport import TTransportException
class TProcessPoolServer(TServer):
"""
Server with a fixed size pool of worker subprocesses which service requests.
Note that if you need shared state between the handlers - it's up to you!
Written by Dvir Volk, doat.com
"""
def __init__(self, * args):
TServer.__init__(self, *args)
self.numWorkers = 10
self.workers = []
self.isRunning = Value('b', False)
self.stopCondition = Condition()
self.postForkCallback = None
def setPostForkCallback(self, callback):
if not callable(callback):
raise TypeError("This is not a callback!")
self.postForkCallback = callback
def setNumWorkers(self, num):
"""Set the number of worker threads that should be created"""
self.numWorkers = num
def workerProcess(self):
"""Loop around getting clients from the shared queue and process them."""
if self.postForkCallback:
self.postForkCallback()
while self.isRunning.value == True:
try:
client = self.serverTransport.accept()
self.serveClient(client)
except (KeyboardInterrupt, SystemExit):
return 0
except Exception, x:
logging.exception(x)
def serveClient(self, client):
"""Process input/output from a client for as long as possible"""
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransportException, tx:
pass
except Exception, x:
logging.exception(x)
itrans.close()
otrans.close()
def serve(self):
"""Start a fixed number of worker threads and put client into a queue"""
#this is a shared state that can tell the workers to exit when set as false
self.isRunning.value = True
#first bind and listen to the port
self.serverTransport.listen()
#fork the children
for i in range(self.numWorkers):
try:
w = Process(target=self.workerProcess)
w.daemon = True
w.start()
self.workers.append(w)
except Exception, x:
logging.exception(x)
#wait until the condition is set by stop()
while True:
self.stopCondition.acquire()
try:
self.stopCondition.wait()
break
except (SystemExit, KeyboardInterrupt):
break
except Exception, x:
logging.exception(x)
self.isRunning.value = False
def stop(self):
self.isRunning.value = False
self.stopCondition.acquire()
self.stopCondition.notify()
self.stopCondition.release()
================================================
FILE: lib/thrift/server/TServer.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
import sys
import os
import traceback
import threading
import Queue
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
class TServer:
"""Base interface for a server, which must have a serve method."""
""" 3 constructors for all servers:
1) (processor, serverTransport)
2) (processor, serverTransport, transportFactory, protocolFactory)
3) (processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory)"""
def __init__(self, *args):
if (len(args) == 2):
self.__initArgs__(args[0], args[1],
TTransport.TTransportFactoryBase(),
TTransport.TTransportFactoryBase(),
TBinaryProtocol.TBinaryProtocolFactory(),
TBinaryProtocol.TBinaryProtocolFactory())
elif (len(args) == 4):
self.__initArgs__(args[0], args[1], args[2], args[2], args[3], args[3])
elif (len(args) == 6):
self.__initArgs__(args[0], args[1], args[2], args[3], args[4], args[5])
def __initArgs__(self, processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory):
self.processor = processor
self.serverTransport = serverTransport
self.inputTransportFactory = inputTransportFactory
self.outputTransportFactory = outputTransportFactory
self.inputProtocolFactory = inputProtocolFactory
self.outputProtocolFactory = outputProtocolFactory
def serve(self):
pass
class TSimpleServer(TServer):
"""Simple single-threaded server that just pumps around one transport."""
def __init__(self, *args):
TServer.__init__(self, *args)
def serve(self):
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logging.exception(x)
itrans.close()
otrans.close()
class TThreadedServer(TServer):
"""Threaded server that spawns a new thread per each connection."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.daemon = kwargs.get("daemon", False)
def serve(self):
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
t = threading.Thread(target = self.handle, args=(client,))
t.setDaemon(self.daemon)
t.start()
except KeyboardInterrupt:
raise
except Exception, x:
logging.exception(x)
def handle(self, client):
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logging.exception(x)
itrans.close()
otrans.close()
class TThreadPoolServer(TServer):
"""Server with a fixed size pool of threads which service requests."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.clients = Queue.Queue()
self.threads = 10
self.daemon = kwargs.get("daemon", False)
def setNumThreads(self, num):
"""Set the number of worker threads that should be created"""
self.threads = num
def serveThread(self):
"""Loop around getting clients from the shared queue and process them."""
while True:
try:
client = self.clients.get()
self.serveClient(client)
except Exception, x:
logging.exception(x)
def serveClient(self, client):
"""Process input/output from a client for as long as possible"""
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logging.exception(x)
itrans.close()
otrans.close()
def serve(self):
"""Start a fixed number of worker threads and put client into a queue"""
for i in range(self.threads):
try:
t = threading.Thread(target = self.serveThread)
t.setDaemon(self.daemon)
t.start()
except Exception, x:
logging.exception(x)
# Pump the socket for clients
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
self.clients.put(client)
except Exception, x:
logging.exception(x)
class TForkingServer(TServer):
"""A Thrift server that forks a new process for each request"""
"""
This is more scalable than the threaded server as it does not cause
GIL contention.
Note that this has different semantics from the threading server.
Specifically, updates to shared variables will no longer be shared.
It will also not work on windows.
This code is heavily inspired by SocketServer.ForkingMixIn in the
Python stdlib.
"""
def __init__(self, *args):
TServer.__init__(self, *args)
self.children = []
def serve(self):
def try_close(file):
try:
file.close()
except IOError, e:
logging.warning(e, exc_info=True)
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
try:
pid = os.fork()
if pid: # parent
# add before collect, otherwise you race w/ waitpid
self.children.append(pid)
self.collect_children()
# Parent must close socket or the connection may not get
# closed promptly
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
try_close(itrans)
try_close(otrans)
else:
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
ecode = 0
try:
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, e:
logging.exception(e)
ecode = 1
finally:
try_close(itrans)
try_close(otrans)
os._exit(ecode)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logging.exception(x)
def collect_children(self):
while self.children:
try:
pid, status = os.waitpid(0, os.WNOHANG)
except os.error:
pid = None
if pid:
self.children.remove(pid)
else:
break
================================================
FILE: lib/thrift/server/__init__.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__all__ = ['TServer', 'TNonblockingServer']
================================================
FILE: lib/thrift/transport/THttpClient.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TTransport import TTransportBase
from cStringIO import StringIO
import urlparse
import httplib
import warnings
import socket
class THttpClient(TTransportBase):
"""Http implementation of TTransport base."""
def __init__(
self,
uri_or_host,
port=None,
path=None,
proxy_host=None,
proxy_port=None
):
"""THttpClient supports two different types constructor parameters.
THttpClient(host, port, path) - deprecated
THttpClient(uri)
Only the second supports https."""
"""THttpClient supports proxy
THttpClient(host, port, path, proxy_host, proxy_port) - deprecated
ThttpClient(uri, None, None, proxy_host, proxy_port)"""
if port is not None:
warnings.warn(
"Please use the THttpClient('http://host:port/path') syntax",
DeprecationWarning,
stacklevel=2)
self.host = uri_or_host
self.port = port
assert path
self.path = path
self.scheme = 'http'
else:
parsed = urlparse.urlparse(uri_or_host)
self.scheme = parsed.scheme
assert self.scheme in ('http', 'https')
if self.scheme == 'http':
self.port = parsed.port or httplib.HTTP_PORT
elif self.scheme == 'https':
self.port = parsed.port or httplib.HTTPS_PORT
self.host = parsed.hostname
self.path = parsed.path
if parsed.query:
self.path += '?%s' % parsed.query
if proxy_host is not None and proxy_port is not None:
self.endpoint_host = proxy_host
self.endpoint_port = proxy_port
self.path = urlparse.urlunparse((
self.scheme,
"%s:%i" % (self.host, self.port),
self.path,
None,
None,
None
))
else:
self.endpoint_host = self.host
self.endpoint_port = self.port
self.__wbuf = StringIO()
self.__http = None
self.__timeout = None
self.__headers = {}
def open(self):
protocol = httplib.HTTP if self.scheme == 'http' else httplib.HTTPS
self.__http = protocol(self.endpoint_host, self.endpoint_port)
def close(self):
self.__http.close()
self.__http = None
def isOpen(self):
return self.__http is not None
def setTimeout(self, ms):
if not hasattr(socket, 'getdefaulttimeout'):
raise NotImplementedError
if ms is None:
self.__timeout = None
else:
self.__timeout = ms / 1000.0
def read(self, sz):
return self.__http.file.read(sz)
def write(self, buf):
self.__wbuf.write(buf)
def __withTimeout(f):
def _f(*args, **kwargs):
orig_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(args[0].__timeout)
result = f(*args, **kwargs)
socket.setdefaulttimeout(orig_timeout)
return result
return _f
def addHeaders(self, **kwargs):
self.__headers.update(kwargs)
def flush(self):
if self.isOpen():
self.close()
self.open()
# Pull data out of buffer
data = self.__wbuf.getvalue()
self.__wbuf = StringIO()
# HTTP request
self.__http.putrequest('POST', self.path)
# Write headers
self.__http.putheader('Host', self.host)
self.__http.putheader('Content-Type', 'application/x-thrift')
self.__http.putheader('Content-Length', str(len(data)))
for key, value in self.__headers.iteritems():
self.__http.putheader(key, value)
self.__http.endheaders()
# Write payload
self.__http.send(data)
# Get reply to flush the request
self.code, self.message, self.headers = self.__http.getreply()
# Decorate if we know how to timeout
if hasattr(socket, 'getdefaulttimeout'):
flush = __withTimeout(flush)
================================================
FILE: lib/thrift/transport/TSSLSocket.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import socket
import ssl
from thrift.transport import TSocket
from thrift.transport.TTransport import TTransportException
class TSSLSocket(TSocket.TSocket):
"""
SSL implementation of client-side TSocket
This class creates outbound sockets wrapped using the
python standard ssl module for encrypted connections.
The protocol used is set using the class variable
SSL_VERSION, which must be one of ssl.PROTOCOL_* and
defaults to ssl.PROTOCOL_TLSv1 for greatest security.
"""
SSL_VERSION = ssl.PROTOCOL_TLSv1
def __init__(self, host='localhost', port=9090, validate=True, ca_certs=None, unix_socket=None):
"""
@param validate: Set to False to disable SSL certificate validation entirely.
@type validate: bool
@param ca_certs: Filename to the Certificate Authority pem file, possibly a
file downloaded from: http://curl.haxx.se/ca/cacert.pem This is passed to
the ssl_wrap function as the 'ca_certs' parameter.
@type ca_certs: str
Raises an IOError exception if validate is True and the ca_certs file is
None, not present or unreadable.
"""
self.validate = validate
self.is_valid = False
self.peercert = None
if not validate:
self.cert_reqs = ssl.CERT_NONE
else:
self.cert_reqs = ssl.CERT_REQUIRED
self.ca_certs = ca_certs
if validate:
if ca_certs is None or not os.access(ca_certs, os.R_OK):
raise IOError('Certificate Authority ca_certs file "%s" is not readable, cannot validate SSL certificates.' % (ca_certs))
TSocket.TSocket.__init__(self, host, port, unix_socket)
def open(self):
try:
res0 = self._resolveAddr()
for res in res0:
sock_family, sock_type= res[0:2]
ip_port = res[4]
plain_sock = socket.socket(sock_family, sock_type)
self.handle = ssl.wrap_socket(plain_sock, ssl_version=self.SSL_VERSION,
do_handshake_on_connect=True, ca_certs=self.ca_certs, cert_reqs=self.cert_reqs)
self.handle.settimeout(self._timeout)
try:
self.handle.connect(ip_port)
except socket.error, e:
if res is not res0[-1]:
continue
else:
raise e
break
except socket.error, e:
if self._unix_socket:
message = 'Could not connect to secure socket %s' % self._unix_socket
else:
message = 'Could not connect to %s:%d' % (self.host, self.port)
raise TTransportException(type=TTransportException.NOT_OPEN, message=message)
if self.validate:
self._validate_cert()
def _validate_cert(self):
"""internal method to validate the peer's SSL certificate, and to check the
commonName of the certificate to ensure it matches the hostname we
used to make this connection. Does not support subjectAltName records
in certificates.
raises TTransportException if the certificate fails validation."""
cert = self.handle.getpeercert()
self.peercert = cert
if 'subject' not in cert:
raise TTransportException(type=TTransportException.NOT_OPEN,
message='No SSL certificate found from %s:%s' % (self.host, self.port))
fields = cert['subject']
for field in fields:
# ensure structure we get back is what we expect
if not isinstance(field, tuple):
continue
cert_pair = field[0]
if len(cert_pair) < 2:
continue
cert_key, cert_value = cert_pair[0:2]
if cert_key != 'commonName':
continue
certhost = cert_value
if certhost == self.host:
# success, cert commonName matches desired hostname
self.is_valid = True
return
else:
raise TTransportException(type=TTransportException.UNKNOWN,
message='Host name we connected to "%s" doesn\'t match certificate provided commonName "%s"' % (self.host, certhost))
raise TTransportException(type=TTransportException.UNKNOWN,
message='Could not validate SSL certificate from host "%s". Cert=%s' % (self.host, cert))
class TSSLServerSocket(TSocket.TServerSocket):
"""
SSL implementation of TServerSocket
This uses the ssl module's wrap_socket() method to provide SSL
negotiated encryption.
"""
SSL_VERSION = ssl.PROTOCOL_TLSv1
def __init__(self, host=None, port=9090, certfile='cert.pem', unix_socket=None):
"""Initialize a TSSLServerSocket
@param certfile: The filename of the server certificate file, defaults to cert.pem
@type certfile: str
@param host: The hostname or IP to bind the listen socket to, i.e. 'localhost' for only allowing
local network connections. Pass None to bind to all interfaces.
@type host: str
@param port: The port to listen on for inbound connections.
@type port: int
"""
self.setCertfile(certfile)
TSocket.TServerSocket.__init__(self, host, port)
def setCertfile(self, certfile):
"""Set or change the server certificate file used to wrap new connections.
@param certfile: The filename of the server certificate, i.e. '/etc/certs/server.pem'
@type certfile: str
Raises an IOError exception if the certfile is not present or unreadable.
"""
if not os.access(certfile, os.R_OK):
raise IOError('No such certfile found: %s' % (certfile))
self.certfile = certfile
def accept(self):
plain_client, addr = self.handle.accept()
try:
client = ssl.wrap_socket(plain_client, certfile=self.certfile,
server_side=True, ssl_version=self.SSL_VERSION)
except ssl.SSLError, ssl_exc:
# failed handshake/ssl wrap, close socket to client
plain_client.close()
# raise ssl_exc
# We can't raise the exception, because it kills most TServer derived serve()
# methods.
# Instead, return None, and let the TServer instance deal with it in
# other exception handling. (but TSimpleServer dies anyway)
return None
result = TSocket.TSocket()
result.setHandle(client)
return result
================================================
FILE: lib/thrift/transport/TSocket.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TTransport import *
import os
import errno
import socket
import sys
class TSocketBase(TTransportBase):
def _resolveAddr(self):
if self._unix_socket is not None:
return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None, self._unix_socket)]
else:
return socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_PASSIVE | socket.AI_ADDRCONFIG)
def close(self):
if self.handle:
self.handle.close()
self.handle = None
class TSocket(TSocketBase):
"""Socket implementation of TTransport base."""
def __init__(self, host='localhost', port=9090, unix_socket=None):
"""Initialize a TSocket
@param host(str) The host to connect to.
@param port(int) The (TCP) port to connect to.
@param unix_socket(str) The filename of a unix socket to connect to.
(host and port will be ignored.)
"""
self.host = host
self.port = port
self.handle = None
self._unix_socket = unix_socket
self._timeout = None
def setHandle(self, h):
self.handle = h
def isOpen(self):
return self.handle is not None
def setTimeout(self, ms):
if ms is None:
self._timeout = None
else:
self._timeout = ms/1000.0
if self.handle is not None:
self.handle.settimeout(self._timeout)
def open(self):
try:
res0 = self._resolveAddr()
for res in res0:
self.handle = socket.socket(res[0], res[1])
self.handle.settimeout(self._timeout)
try:
self.handle.connect(res[4])
except socket.error, e:
if res is not res0[-1]:
continue
else:
raise e
break
except socket.error, e:
if self._unix_socket:
message = 'Could not connect to socket %s' % self._unix_socket
else:
message = 'Could not connect to %s:%d' % (self.host, self.port)
raise TTransportException(type=TTransportException.NOT_OPEN, message=message)
def read(self, sz):
try:
buff = self.handle.recv(sz)
except socket.error, e:
if (e.args[0] == errno.ECONNRESET and
(sys.platform == 'darwin' or sys.platform.startswith('freebsd'))):
# freebsd and Mach don't follow POSIX semantic of recv
# and fail with ECONNRESET if peer performed shutdown.
# See corresponding comment and code in TSocket::read()
# in lib/cpp/src/transport/TSocket.cpp.
self.close()
# Trigger the check to raise the END_OF_FILE exception below.
buff = ''
else:
raise
if len(buff) == 0:
raise TTransportException(type=TTransportException.END_OF_FILE, message='TSocket read 0 bytes')
return buff
def write(self, buff):
if not self.handle:
raise TTransportException(type=TTransportException.NOT_OPEN, message='Transport not open')
sent = 0
have = len(buff)
while sent < have:
plus = self.handle.send(buff)
if plus == 0:
raise TTransportException(type=TTransportException.END_OF_FILE, message='TSocket sent 0 bytes')
sent += plus
buff = buff[plus:]
def flush(self):
pass
class TServerSocket(TSocketBase, TServerTransportBase):
"""Socket implementation of TServerTransport base."""
def __init__(self, host=None, port=9090, unix_socket=None):
self.host = host
self.port = port
self._unix_socket = unix_socket
self.handle = None
def listen(self):
res0 = self._resolveAddr()
for res in res0:
if res[0] is socket.AF_INET6 or res is res0[-1]:
break
# We need remove the old unix socket if the file exists and
# nobody is listening on it.
if self._unix_socket:
tmp = socket.socket(res[0], res[1])
try:
tmp.connect(res[4])
except socket.error, err:
eno, message = err.args
if eno == errno.ECONNREFUSED:
os.unlink(res[4])
self.handle = socket.socket(res[0], res[1])
self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self.handle, 'settimeout'):
self.handle.settimeout(None)
self.handle.bind(res[4])
self.handle.listen(128)
def accept(self):
client, addr = self.handle.accept()
result = TSocket()
result.setHandle(client)
return result
================================================
FILE: lib/thrift/transport/TTransport.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from cStringIO import StringIO
from struct import pack,unpack
from thrift.Thrift import TException
class TTransportException(TException):
"""Custom Transport Exception class"""
UNKNOWN = 0
NOT_OPEN = 1
ALREADY_OPEN = 2
TIMED_OUT = 3
END_OF_FILE = 4
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
class TTransportBase:
"""Base class for Thrift transport layer."""
def isOpen(self):
pass
def open(self):
pass
def close(self):
pass
def read(self, sz):
pass
def readAll(self, sz):
buff = ''
have = 0
while (have < sz):
chunk = self.read(sz-have)
have += len(chunk)
buff += chunk
if len(chunk) == 0:
raise EOFError()
return buff
def write(self, buf):
pass
def flush(self):
pass
# This class should be thought of as an interface.
class CReadableTransport:
"""base class for transports that are readable from C"""
# TODO(dreiss): Think about changing this interface to allow us to use
# a (Python, not c) StringIO instead, because it allows
# you to write after reading.
# NOTE: This is a classic class, so properties will NOT work
# correctly for setting.
@property
def cstringio_buf(self):
"""A cStringIO buffer that contains the current chunk we are reading."""
pass
def cstringio_refill(self, partialread, reqlen):
"""Refills cstringio_buf.
Returns the currently used buffer (which can but need not be the same as
the old cstringio_buf). partialread is what the C code has read from the
buffer, and should be inserted into the buffer before any more reads. The
return value must be a new, not borrowed reference. Something along the
lines of self._buf should be fine.
If reqlen bytes can't be read, throw EOFError.
"""
pass
class TServerTransportBase:
"""Base class for Thrift server transports."""
def listen(self):
pass
def accept(self):
pass
def close(self):
pass
class TTransportFactoryBase:
"""Base class for a Transport Factory"""
def getTransport(self, trans):
return trans
class TBufferedTransportFactory:
"""Factory transport that builds buffered transports"""
def getTransport(self, trans):
buffered = TBufferedTransport(trans)
return buffered
class TBufferedTransport(TTransportBase,CReadableTransport):
"""Class that wraps another transport and buffers its I/O.
The implementation uses a (configurable) fixed-size read buffer
but buffers all writes until a flush is performed.
"""
DEFAULT_BUFFER = 4096
def __init__(self, trans, rbuf_size = DEFAULT_BUFFER):
self.__trans = trans
self.__wbuf = StringIO()
self.__rbuf = StringIO("")
self.__rbuf_size = rbuf_size
def isOpen(self):
return self.__trans.isOpen()
def open(self):
return self.__trans.open()
def close(self):
return self.__trans.close()
def read(self, sz):
ret = self.__rbuf.read(sz)
if len(ret) != 0:
return ret
self.__rbuf = StringIO(self.__trans.read(max(sz, self.__rbuf_size)))
return self.__rbuf.read(sz)
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
out = self.__wbuf.getvalue()
# reset wbuf before write/flush to preserve state on underlying failure
self.__wbuf = StringIO()
self.__trans.write(out)
self.__trans.flush()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self.__rbuf
def cstringio_refill(self, partialread, reqlen):
retstring = partialread
if reqlen < self.__rbuf_size:
# try to make a read of as much as we can.
retstring += self.__trans.read(self.__rbuf_size)
# but make sure we do read reqlen bytes.
if len(retstring) < reqlen:
retstring += self.__trans.readAll(reqlen - len(retstring))
self.__rbuf = StringIO(retstring)
return self.__rbuf
class TMemoryBuffer(TTransportBase, CReadableTransport):
"""Wraps a cStringIO object as a TTransport.
NOTE: Unlike the C++ version of this class, you cannot write to it
then immediately read from it. If you want to read from a
TMemoryBuffer, you must either pass a string to the constructor.
TODO(dreiss): Make this work like the C++ version.
"""
def __init__(self, value=None):
"""value -- a value to read from for stringio
If value is set, this will be a transport for reading,
otherwise, it is for writing"""
if value is not None:
self._buffer = StringIO(value)
else:
self._buffer = StringIO()
def isOpen(self):
return not self._buffer.closed
def open(self):
pass
def close(self):
self._buffer.close()
def read(self, sz):
return self._buffer.read(sz)
def write(self, buf):
self._buffer.write(buf)
def flush(self):
pass
def getvalue(self):
return self._buffer.getvalue()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self._buffer
def cstringio_refill(self, partialread, reqlen):
# only one shot at reading...
raise EOFError()
class TFramedTransportFactory:
"""Factory transport that builds framed transports"""
def getTransport(self, trans):
framed = TFramedTransport(trans)
return framed
class TFramedTransport(TTransportBase, CReadableTransport):
"""Class that wraps another transport and frames its I/O when writing."""
def __init__(self, trans,):
self.__trans = trans
self.__rbuf = StringIO()
self.__wbuf = StringIO()
def isOpen(self):
return self.__trans.isOpen()
def open(self):
return self.__trans.open()
def close(self):
return self.__trans.close()
def read(self, sz):
ret = self.__rbuf.read(sz)
if len(ret) != 0:
return ret
self.readFrame()
return self.__rbuf.read(sz)
def readFrame(self):
buff = self.__trans.readAll(4)
sz, = unpack('!i', buff)
self.__rbuf = StringIO(self.__trans.readAll(sz))
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
wout = self.__wbuf.getvalue()
wsz = len(wout)
# reset wbuf before write/flush to preserve state on underlying failure
self.__wbuf = StringIO()
# N.B.: Doing this string concatenation is WAY cheaper than making
# two separate calls to the underlying socket object. Socket writes in
# Python turn out to be REALLY expensive, but it seems to do a pretty
# good job of managing string buffer operations without excessive copies
buf = pack("!i", wsz) + wout
self.__trans.write(buf)
self.__trans.flush()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self.__rbuf
def cstringio_refill(self, prefix, reqlen):
# self.__rbuf will already be empty here because fastbinary doesn't
# ask for a refill until the previous buffer is empty. Therefore,
# we can start reading new frames immediately.
while len(prefix) < reqlen:
self.readFrame()
prefix += self.__rbuf.getvalue()
self.__rbuf = StringIO(prefix)
return self.__rbuf
class TFileObjectTransport(TTransportBase):
"""Wraps a file-like object to make it work as a Thrift transport."""
def __init__(self, fileobj):
self.fileobj = fileobj
def isOpen(self):
return True
def close(self):
self.fileobj.close()
def read(self, sz):
return self.fileobj.read(sz)
def write(self, buf):
self.fileobj.write(buf)
def flush(self):
self.fileobj.flush()
================================================
FILE: lib/thrift/transport/TTwisted.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from zope.interface import implements, Interface, Attribute
from twisted.internet.protocol import Protocol, ServerFactory, ClientFactory, \
connectionDone
from twisted.internet import defer
from twisted.protocols import basic
from twisted.python import log
from twisted.web import server, resource, http
from thrift.transport import TTransport
from cStringIO import StringIO
class TMessageSenderTransport(TTransport.TTransportBase):
def __init__(self):
self.__wbuf = StringIO()
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
msg = self.__wbuf.getvalue()
self.__wbuf = StringIO()
self.sendMessage(msg)
def sendMessage(self, message):
raise NotImplementedError
class TCallbackTransport(TMessageSenderTransport):
def __init__(self, func):
TMessageSenderTransport.__init__(self)
self.func = func
def sendMessage(self, message):
self.func(message)
class ThriftClientProtocol(basic.Int32StringReceiver):
MAX_LENGTH = 2 ** 31 - 1
def __init__(self, client_class, iprot_factory, oprot_factory=None):
self._client_class = client_class
self._iprot_factory = iprot_factory
if oprot_factory is None:
self._oprot_factory = iprot_factory
else:
self._oprot_factory = oprot_factory
self.recv_map = {}
self.started = defer.Deferred()
def dispatch(self, msg):
self.sendString(msg)
def connectionMade(self):
tmo = TCallbackTransport(self.dispatch)
self.client = self._client_class(tmo, self._oprot_factory)
self.started.callback(self.client)
def connectionLost(self, reason=connectionDone):
for k,v in self.client._reqs.iteritems():
tex = TTransport.TTransportException(
type=TTransport.TTransportException.END_OF_FILE,
message='Connection closed')
v.errback(tex)
def stringReceived(self, frame):
tr = TTransport.TMemoryBuffer(frame)
iprot = self._iprot_factory.getProtocol(tr)
(fname, mtype, rseqid) = iprot.readMessageBegin()
try:
method = self.recv_map[fname]
except KeyError:
method = getattr(self.client, 'recv_' + fname)
self.recv_map[fname] = method
method(iprot, mtype, rseqid)
class ThriftServerProtocol(basic.Int32StringReceiver):
MAX_LENGTH = 2 ** 31 - 1
def dispatch(self, msg):
self.sendString(msg)
def processError(self, error):
self.transport.loseConnection()
def processOk(self, _, tmo):
msg = tmo.getvalue()
if len(msg) > 0:
self.dispatch(msg)
def stringReceived(self, frame):
tmi = TTransport.TMemoryBuffer(frame)
tmo = TTransport.TMemoryBuffer()
iprot = self.factory.iprot_factory.getProtocol(tmi)
oprot = self.factory.oprot_factory.getProtocol(tmo)
d = self.factory.processor.process(iprot, oprot)
d.addCallbacks(self.processOk, self.processError,
callbackArgs=(tmo,))
class IThriftServerFactory(Interface):
processor = Attribute("Thrift processor")
iprot_factory = Attribute("Input protocol factory")
oprot_factory = Attribute("Output protocol factory")
class IThriftClientFactory(Interface):
client_class = Attribute("Thrift client class")
iprot_factory = Attribute("Input protocol factory")
oprot_factory = Attribute("Output protocol factory")
class ThriftServerFactory(ServerFactory):
implements(IThriftServerFactory)
protocol = ThriftServerProtocol
def __init__(self, processor, iprot_factory, oprot_factory=None):
self.processor = processor
self.iprot_factory = iprot_factory
if oprot_factory is None:
self.oprot_factory = iprot_factory
else:
self.oprot_factory = oprot_factory
class ThriftClientFactory(ClientFactory):
implements(IThriftClientFactory)
protocol = ThriftClientProtocol
def __init__(self, client_class, iprot_factory, oprot_factory=None):
self.client_class = client_class
self.iprot_factory = iprot_factory
if oprot_factory is None:
self.oprot_factory = iprot_factory
else:
self.oprot_factory = oprot_factory
def buildProtocol(self, addr):
p = self.protocol(self.client_class, self.iprot_factory,
self.oprot_factory)
p.factory = self
return p
class ThriftResource(resource.Resource):
allowedMethods = ('POST',)
def __init__(self, processor, inputProtocolFactory,
outputProtocolFactory=None):
resource.Resource.__init__(self)
self.inputProtocolFactory = inputProtocolFactory
if outputProtocolFactory is None:
self.outputProtocolFactory = inputProtocolFactory
else:
self.outputProtocolFactory = outputProtocolFactory
self.processor = processor
def getChild(self, path, request):
return self
def _cbProcess(self, _, request, tmo):
msg = tmo.getvalue()
request.setResponseCode(http.OK)
request.setHeader("content-type", "application/x-thrift")
request.write(msg)
request.finish()
def render_POST(self, request):
request.content.seek(0, 0)
data = request.content.read()
tmi = TTransport.TMemoryBuffer(data)
tmo = TTransport.TMemoryBuffer()
iprot = self.inputProtocolFactory.getProtocol(tmi)
oprot = self.outputProtocolFactory.getProtocol(tmo)
d = self.processor.process(iprot, oprot)
d.addCallback(self._cbProcess, request, tmo)
return server.NOT_DONE_YET
================================================
FILE: lib/thrift/transport/TZlibTransport.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
'''
TZlibTransport provides a compressed transport and transport factory
class, using the python standard library zlib module to implement
data compression.
'''
from __future__ import division
import zlib
from cStringIO import StringIO
from TTransport import TTransportBase, CReadableTransport
class TZlibTransportFactory(object):
'''
Factory transport that builds zlib compressed transports.
This factory caches the last single client/transport that it was passed
and returns the same TZlibTransport object that was created.
This caching means the TServer class will get the _same_ transport
object for both input and output transports from this factory.
(For non-threaded scenarios only, since the cache only holds one object)
The purpose of this caching is to allocate only one TZlibTransport where
only one is really needed (since it must have separate read/write buffers),
and makes the statistics from getCompSavings() and getCompRatio()
easier to understand.
'''
# class scoped cache of last transport given and zlibtransport returned
_last_trans = None
_last_z = None
def getTransport(self, trans, compresslevel=9):
'''Wrap a transport , trans, with the TZlibTransport
compressed transport class, returning a new
transport to the caller.
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Defaults to 9.
@type compresslevel: int
This method returns a TZlibTransport which wraps the
passed C{trans} TTransport derived instance.
'''
if trans == self._last_trans:
return self._last_z
ztrans = TZlibTransport(trans, compresslevel)
self._last_trans = trans
self._last_z = ztrans
return ztrans
class TZlibTransport(TTransportBase, CReadableTransport):
'''
Class that wraps a transport with zlib, compressing writes
and decompresses reads, using the python standard
library zlib module.
'''
# Read buffer size for the python fastbinary C extension,
# the TBinaryProtocolAccelerated class.
DEFAULT_BUFFSIZE = 4096
def __init__(self, trans, compresslevel=9):
'''
Create a new TZlibTransport, wrapping C{trans}, another
TTransport derived object.
@param trans: A thrift transport object, i.e. a TSocket() object.
@type trans: TTransport
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Default is 9.
@type compresslevel: int
'''
self.__trans = trans
self.compresslevel = compresslevel
self.__rbuf = StringIO()
self.__wbuf = StringIO()
self._init_zlib()
self._init_stats()
def _reinit_buffers(self):
'''
Internal method to initialize/reset the internal StringIO objects
for read and write buffers.
'''
self.__rbuf = StringIO()
self.__wbuf = StringIO()
def _init_stats(self):
'''
Internal method to reset the internal statistics counters
for compression ratios and bandwidth savings.
'''
self.bytes_in = 0
self.bytes_out = 0
self.bytes_in_comp = 0
self.bytes_out_comp = 0
def _init_zlib(self):
'''
Internal method for setting up the zlib compression and
decompression objects.
'''
self._zcomp_read = zlib.decompressobj()
self._zcomp_write = zlib.compressobj(self.compresslevel)
def getCompRatio(self):
'''
Get the current measured compression ratios (in,out) from
this transport.
Returns a tuple of:
(inbound_compression_ratio, outbound_compression_ratio)
The compression ratios are computed as:
compressed / uncompressed
E.g., data that compresses by 10x will have a ratio of: 0.10
and data that compresses to half of ts original size will
have a ratio of 0.5
None is returned if no bytes have yet been processed in
a particular direction.
'''
r_percent, w_percent = (None, None)
if self.bytes_in > 0:
r_percent = self.bytes_in_comp / self.bytes_in
if self.bytes_out > 0:
w_percent = self.bytes_out_comp / self.bytes_out
return (r_percent, w_percent)
def getCompSavings(self):
'''
Get the current count of saved bytes due to data
compression.
Returns a tuple of:
(inbound_saved_bytes, outbound_saved_bytes)
Note: if compression is actually expanding your
data (only likely with very tiny thrift objects), then
the values returned will be negative.
'''
r_saved = self.bytes_in - self.bytes_in_comp
w_saved = self.bytes_out - self.bytes_out_comp
return (r_saved, w_saved)
def isOpen(self):
'''Return the underlying transport's open status'''
return self.__trans.isOpen()
def open(self):
"""Open the underlying transport"""
self._init_stats()
return self.__trans.open()
def listen(self):
'''Invoke the underlying transport's listen() method'''
self.__trans.listen()
def accept(self):
'''Accept connections on the underlying transport'''
return self.__trans.accept()
def close(self):
'''Close the underlying transport,'''
self._reinit_buffers()
self._init_zlib()
return self.__trans.close()
def read(self, sz):
'''
Read up to sz bytes from the decompressed bytes buffer, and
read from the underlying transport if the decompression
buffer is empty.
'''
ret = self.__rbuf.read(sz)
if len(ret) > 0:
return ret
# keep reading from transport until something comes back
while True:
if self.readComp(sz):
break
ret = self.__rbuf.read(sz)
return ret
def readComp(self, sz):
'''
Read compressed data from the underlying transport, then
decompress it and append it to the internal StringIO read buffer
'''
zbuf = self.__trans.read(sz)
zbuf = self._zcomp_read.unconsumed_tail + zbuf
buf = self._zcomp_read.decompress(zbuf)
self.bytes_in += len(zbuf)
self.bytes_in_comp += len(buf)
old = self.__rbuf.read()
self.__rbuf = StringIO(old + buf)
if len(old) + len(buf) == 0:
return False
return True
def write(self, buf):
'''
Write some bytes, putting them into the internal write
buffer for eventual compression.
'''
self.__wbuf.write(buf)
def flush(self):
'''
Flush any queued up data in the write buffer and ensure the
compression buffer is flushed out to the underlying transport
'''
wout = self.__wbuf.getvalue()
if len(wout) > 0:
zbuf = self._zcomp_write.compress(wout)
self.bytes_out += len(wout)
self.bytes_out_comp += len(zbuf)
else:
zbuf = ''
ztail = self._zcomp_write.flush(zlib.Z_SYNC_FLUSH)
self.bytes_out_comp += len(ztail)
if (len(zbuf) + len(ztail)) > 0:
self.__wbuf = StringIO()
self.__trans.write(zbuf + ztail)
self.__trans.flush()
@property
def cstringio_buf(self):
'''Implement the CReadableTransport interface'''
return self.__rbuf
def cstringio_refill(self, partialread, reqlen):
'''Implement the CReadableTransport interface for refill'''
retstring = partialread
if reqlen < self.DEFAULT_BUFFSIZE:
retstring += self.read(self.DEFAULT_BUFFSIZE)
while len(retstring) < reqlen:
retstring += self.read(reqlen - len(retstring))
self.__rbuf = StringIO(retstring)
return self.__rbuf
================================================
FILE: lib/thrift/transport/__init__.py
================================================
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__all__ = ['TTransport', 'TSocket', 'THttpClient','TZlibTransport']
================================================
FILE: sublime_evernote.py
================================================
#coding:utf-8
import sys
sys.path.insert(0,"lib")
import evernote.edam.userstore.UserStore as UserStore
import evernote.edam.notestore.NoteStore as NoteStore
import evernote.edam.type.ttypes as Types
import evernote.edam.error.ttypes as Errors
from evernote.api.client import EvernoteClient
import sublime,sublime_plugin
import markdown2
import webbrowser
consumer_key = 'oparrish-4096'
consumer_secret ='c112c6417738f06a'
evernoteHost = "www.evernote.com"
callbackUrl = "http://127.0.0.1/sublimeevernote/callback"
settings = sublime.load_settings("SublimeEvernote.sublime-settings")
def get_evernote_client(token=None):
if token:
return EvernoteClient(token=token,service_host=evernoteHost, sandbox=False)
else:
return EvernoteClient(
consumer_key=consumer_key,
consumer_secret=consumer_secret,
service_host=evernoteHost,
sandbox=False
)
class SendToEvernoteCommand(sublime_plugin.TextCommand):
def __init__(self,view):
self.view = view
self.window = sublime.active_window()
def to_markdown_html(self):
encoding = self.view.encoding()
if encoding == 'Undefined':
encoding = 'utf-8'
elif encoding == 'Western (Windows 1252)':
encoding = 'windows-1252'
sels = self.view.sel()
contents = ''
if sels:
for sel in sels: contents += self.view.substr(sel) + '\n\n'
if not contents.strip():
region = sublime.Region(0L, self.view.size())
contents = self.view.substr(region)
markdown_html = markdown2.markdown(contents, extras=['footnotes', 'fenced-code-blocks', 'cuddled-lists', 'code-friendly', 'metadata'])
return markdown_html
def connect(self,callback,**kwargs):
sublime.status_message("authenticate..., please wait...")
client = get_evernote_client()
request_token = client.get_request_token(callbackUrl)
def on_verifier(verifier):
access_token = client.get_access_token(request_token['oauth_token'],request_token['oauth_token_secret'],verifier)
settings.set('access_token',access_token)
sublime.save_settings('SublimeEvernote.sublime-settings')
sublime.status_message("authenticate ok")
callback(**kwargs)
webbrowser.open(client.get_authorize_url(request_token))
self.window.show_input_panel("type Verifier (required):",'',on_verifier,None,None)
def send_note(self,**kwargs):
access_token = settings.get('access_token')
client,noteStore = None,None
if access_token :
client = get_evernote_client(token=access_token)
else:
return self.connect(self.send_note,**kwargs)
try:
noteStore = client.get_note_store()
except Exception as e:
if sublime.ok_cancel_dialog('error %s! retry?'%e):
self.connect(self.send_note,**kwargs)
markdown_html = self.to_markdown_html()
def sendnote(title,tags):
note = Types.Note()
note.title = title.encode('utf-8')
note.content = ''
note.content += ''
note.content += '%s'%markdown_html.encode('utf-8')
note.content += ' '
note.tagNames = tags and tags.split(",") or []
try:
sublime.status_message("please wait...")
cnote = noteStore.createNote(access_token, note)
sublime.status_message("send success guid:%s"%cnote.guid)
sublime.message_dialog("success")
except Errors.EDAMUserException as e:
args = dict(title=title,tags=tags)
if e.errorCode == 9:
self.connect(self.send_note,**args)
else:
if sublime.ok_cancel_dialog('error %s! retry?'%e):
self.connect(self.send_note,**args)
except Exception as e:
sublime.error_message('error %s'%e)
def on_title(title):
def on_tags(tags):
sendnote(title,tags)
if not 'tags' in markdown_html.metadata:
self.window.show_input_panel("Tags (Optional)::","",on_tags,None,None)
else:
sendnote(title, markdown_html.metadata['tags'])
if not(kwargs.get("title") or 'title' in markdown_html.metadata):
self.window.show_input_panel("Title (required)::","",on_title,None,None)
elif not kwargs.get("tags"):
on_title(markdown_html.metadata['title'])
else:
sendnote(kwargs.get("title"),kwargs.get("tags"))
def run(self, edit):
if not settings.get("access_token"):
self.connect(self.send_note)
else:
self.send_note()