Python v3.7 support; updated tweepy; refs #17

This commit is contained in:
Chris Caron 2018-09-05 22:55:16 -04:00
parent dbab994f37
commit 5968278da3
10 changed files with 146 additions and 92 deletions

View File

@ -11,6 +11,10 @@ matrix:
env: TOXENV=py35 env: TOXENV=py35
- python: "3.6" - python: "3.6"
env: TOXENV=py36 env: TOXENV=py36
- python: "3.7"
dist: xenial
sudo: required
env: TOXENV=py37
- python: "pypy2.7-5.8.0" - python: "pypy2.7-5.8.0"
env: TOXENV=pypy env: TOXENV=pypy
- python: "pypy3.5-5.8.0" - python: "pypy3.5-5.8.0"

View File

@ -5,7 +5,7 @@
""" """
Tweepy Twitter API library Tweepy Twitter API library
""" """
__version__ = '3.5.0' __version__ = '3.6.0'
__author__ = 'Joshua Roesslein' __author__ = 'Joshua Roesslein'
__license__ = 'MIT' __license__ = 'MIT'

View File

@ -94,34 +94,35 @@ class API(object):
) )
def statuses_lookup(self, id_, include_entities=None, def statuses_lookup(self, id_, include_entities=None,
trim_user=None, map_=None): trim_user=None, map_=None, tweet_mode=None):
return self._statuses_lookup(list_to_csv(id_), include_entities, return self._statuses_lookup(list_to_csv(id_), include_entities,
trim_user, map_) trim_user, map_, tweet_mode)
@property @property
def _statuses_lookup(self): def _statuses_lookup(self):
""" :reference: https://dev.twitter.com/rest/reference/get/statuses/lookup """ :reference: https://dev.twitter.com/rest/reference/get/statuses/lookup
:allowed_param:'id', 'include_entities', 'trim_user', 'map' :allowed_param:'id', 'include_entities', 'trim_user', 'map', 'tweet_mode'
""" """
return bind_api( return bind_api(
api=self, api=self,
path='/statuses/lookup.json', path='/statuses/lookup.json',
payload_type='status', payload_list=True, payload_type='status', payload_list=True,
allowed_param=['id', 'include_entities', 'trim_user', 'map'], allowed_param=['id', 'include_entities', 'trim_user', 'map', 'tweet_mode'],
require_auth=True require_auth=True
) )
@property @property
def user_timeline(self): def user_timeline(self):
""" :reference: https://dev.twitter.com/rest/reference/get/statuses/user_timeline """ :reference: https://dev.twitter.com/rest/reference/get/statuses/user_timeline
:allowed_param:'id', 'user_id', 'screen_name', 'since_id' :allowed_param:'id', 'user_id', 'screen_name', 'since_id', 'max_id', 'count', 'include_rts', 'trim_user', 'exclude_replies'
""" """
return bind_api( return bind_api(
api=self, api=self,
path='/statuses/user_timeline.json', path='/statuses/user_timeline.json',
payload_type='status', payload_list=True, payload_type='status', payload_list=True,
allowed_param=['id', 'user_id', 'screen_name', 'since_id', allowed_param=['id', 'user_id', 'screen_name', 'since_id',
'max_id', 'count', 'include_rts'] 'max_id', 'count', 'include_rts', 'trim_user',
'exclude_replies']
) )
@property @property
@ -177,7 +178,7 @@ class API(object):
def update_status(self, *args, **kwargs): def update_status(self, *args, **kwargs):
""" :reference: https://dev.twitter.com/rest/reference/post/statuses/update """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update
:allowed_param:'status', 'in_reply_to_status_id', 'lat', 'long', 'source', 'place_id', 'display_coordinates', 'media_ids' :allowed_param:'status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates', 'media_ids'
""" """
post_data = {} post_data = {}
media_ids = kwargs.pop("media_ids", None) media_ids = kwargs.pop("media_ids", None)
@ -189,7 +190,7 @@ class API(object):
path='/statuses/update.json', path='/statuses/update.json',
method='POST', method='POST',
payload_type='status', payload_type='status',
allowed_param=['status', 'in_reply_to_status_id', 'lat', 'long', 'source', 'place_id', 'display_coordinates'], allowed_param=['status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates'],
require_auth=True require_auth=True
)(post_data=post_data, *args, **kwargs) )(post_data=post_data, *args, **kwargs)
@ -198,7 +199,7 @@ class API(object):
:allowed_param: :allowed_param:
""" """
f = kwargs.pop('file', None) f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 3072, form_field='media', f=f) headers, post_data = API._pack_image(filename, 4883, form_field='media', f=f)
kwargs.update({'headers': headers, 'post_data': post_data}) kwargs.update({'headers': headers, 'post_data': post_data})
return bind_api( return bind_api(
@ -213,7 +214,7 @@ class API(object):
def update_with_media(self, filename, *args, **kwargs): def update_with_media(self, filename, *args, **kwargs):
""" :reference: https://dev.twitter.com/rest/reference/post/statuses/update_with_media """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update_with_media
:allowed_param:'status', 'possibly_sensitive', 'in_reply_to_status_id', 'lat', 'long', 'place_id', 'display_coordinates' :allowed_param:'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates'
""" """
f = kwargs.pop('file', None) f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 3072, form_field='media[]', f=f) headers, post_data = API._pack_image(filename, 3072, form_field='media[]', f=f)
@ -225,8 +226,8 @@ class API(object):
method='POST', method='POST',
payload_type='status', payload_type='status',
allowed_param=[ allowed_param=[
'status', 'possibly_sensitive', 'in_reply_to_status_id', 'lat', 'long', 'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str',
'place_id', 'display_coordinates' 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates'
], ],
require_auth=True require_auth=True
)(*args, **kwargs) )(*args, **kwargs)
@ -259,6 +260,20 @@ class API(object):
require_auth=True require_auth=True
) )
@property
def unretweet(self):
""" :reference: https://dev.twitter.com/rest/reference/post/statuses/unretweet/%3Aid
:allowed_param:'id'
"""
return bind_api(
api=self,
path='/statuses/unretweet/{id}.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property @property
def retweets(self): def retweets(self):
""" :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets/%3Aid """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets/%3Aid
@ -331,6 +346,7 @@ class API(object):
path='/users/lookup.json', path='/users/lookup.json',
payload_type='user', payload_list=True, payload_type='user', payload_list=True,
method='POST', method='POST',
allowed_param=['user_id', 'screen_name', 'include_entities']
) )
def me(self): def me(self):
@ -487,7 +503,7 @@ class API(object):
@property @property
def show_friendship(self): def show_friendship(self):
""" :reference: https://dev.twitter.com/rest/reference/get/friendships/show """ :reference: https://dev.twitter.com/rest/reference/get/friendships/show
:allowed_param:'source_id', 'source_screen_name' :allowed_param:'source_id', 'source_screen_name', 'target_id', 'target_screen_name'
""" """
return bind_api( return bind_api(
api=self, api=self,
@ -661,24 +677,6 @@ class API(object):
require_auth=True require_auth=True
) )
@property
def update_profile_colors(self):
""" :reference: https://dev.twitter.com/docs/api/1.1/post/account/update_profile_colors
:allowed_param:'profile_background_color', 'profile_text_color',
'profile_link_color', 'profile_sidebar_fill_color',
'profile_sidebar_border_color'],
"""
return bind_api(
api=self,
path='/account/update_profile_colors.json',
method='POST',
payload_type='user',
allowed_param=['profile_background_color', 'profile_text_color',
'profile_link_color', 'profile_sidebar_fill_color',
'profile_sidebar_border_color'],
require_auth=True
)
def update_profile_image(self, filename, file_=None): def update_profile_image(self, filename, file_=None):
""" :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_image """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_image
:allowed_param:'include_entities', 'skip_status' :allowed_param:'include_entities', 'skip_status'
@ -725,14 +723,14 @@ class API(object):
@property @property
def update_profile(self): def update_profile(self):
""" :reference: https://dev.twitter.com/rest/reference/post/account/update_profile """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile
:allowed_param:'name', 'url', 'location', 'description' :allowed_param:'name', 'url', 'location', 'description', 'profile_link_color'
""" """
return bind_api( return bind_api(
api=self, api=self,
path='/account/update_profile.json', path='/account/update_profile.json',
method='POST', method='POST',
payload_type='user', payload_type='user',
allowed_param=['name', 'url', 'location', 'description'], allowed_param=['name', 'url', 'location', 'description', 'profile_link_color'],
require_auth=True require_auth=True
) )
@ -804,6 +802,46 @@ class API(object):
require_auth=True require_auth=True
) )
@property
def mutes_ids(self):
""" :reference: https://dev.twitter.com/rest/reference/get/mutes/users/ids """
return bind_api(
api=self,
path='/mutes/users/ids.json',
payload_type='json',
require_auth=True
)
@property
def create_mute(self):
""" :reference: https://dev.twitter.com/rest/reference/post/mutes/users/create
:allowed_param:'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/mutes/users/create.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def destroy_mute(self):
""" :reference: https://dev.twitter.com/rest/reference/post/mutes/users/destroy
:allowed_param:'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/mutes/users/destroy.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property @property
def blocks(self): def blocks(self):
""" :reference: https://dev.twitter.com/rest/reference/get/blocks/list """ :reference: https://dev.twitter.com/rest/reference/get/blocks/list
@ -1208,7 +1246,7 @@ class API(object):
""" :reference: https://dev.twitter.com/rest/reference/get/search/tweets """ :reference: https://dev.twitter.com/rest/reference/get/search/tweets
:allowed_param:'q', 'lang', 'locale', 'since_id', 'geocode', :allowed_param:'q', 'lang', 'locale', 'since_id', 'geocode',
'max_id', 'since', 'until', 'result_type', 'count', 'max_id', 'since', 'until', 'result_type', 'count',
'include_entities', 'from', 'to', 'source'] 'include_entities', 'from', 'to', 'source'
""" """
return bind_api( return bind_api(
api=self, api=self,
@ -1326,7 +1364,7 @@ class API(object):
filename = filename.encode("utf-8") filename = filename.encode("utf-8")
BOUNDARY = b'Tw3ePy' BOUNDARY = b'Tw3ePy'
body = list() body = []
body.append(b'--' + BOUNDARY) body.append(b'--' + BOUNDARY)
body.append('Content-Disposition: form-data; name="{0}";' body.append('Content-Disposition: form-data; name="{0}";'
' filename="{1}"'.format(form_field, filename) ' filename="{1}"'.format(form_field, filename)

View File

@ -7,7 +7,7 @@ from __future__ import print_function
import time import time
import re import re
from six.moves.urllib.parse import quote from six.moves.urllib.parse import quote, urlencode
import requests import requests
import logging import logging
@ -15,6 +15,8 @@ import logging
from .error import TweepError, RateLimitError, is_rate_limit_error_message from .error import TweepError, RateLimitError, is_rate_limit_error_message
from .utils import convert_to_utf8_str from .utils import convert_to_utf8_str
from .models import Model from .models import Model
import six
import sys
re_path_template = re.compile('{\w+}') re_path_template = re.compile('{\w+}')
@ -104,7 +106,7 @@ def bind_api(**config):
self.session.params[k] = convert_to_utf8_str(arg) self.session.params[k] = convert_to_utf8_str(arg)
log.info("PARAMS: %r", self.session.params) log.debug("PARAMS: %r", self.session.params)
def build_path(self): def build_path(self):
for variable in re_path_template.findall(self.path): for variable in re_path_template.findall(self.path):
@ -132,7 +134,7 @@ def bind_api(**config):
# Query the cache if one is available # Query the cache if one is available
# and this request uses a GET method. # and this request uses a GET method.
if self.use_cache and self.api.cache and self.method == 'GET': if self.use_cache and self.api.cache and self.method == 'GET':
cache_result = self.api.cache.get(url) cache_result = self.api.cache.get('%s?%s' % (url, urlencode(self.session.params)))
# if cache result found and not expired, return it # if cache result found and not expired, return it
if cache_result: if cache_result:
# must restore api reference # must restore api reference
@ -158,7 +160,7 @@ def bind_api(**config):
sleep_time = self._reset_time - int(time.time()) sleep_time = self._reset_time - int(time.time())
if sleep_time > 0: if sleep_time > 0:
if self.wait_on_rate_limit_notify: if self.wait_on_rate_limit_notify:
print("Rate limit reached. Sleeping for:", sleep_time) log.warning("Rate limit reached. Sleeping for: %d" % sleep_time)
time.sleep(sleep_time + 5) # sleep for few extra sec time.sleep(sleep_time + 5) # sleep for few extra sec
# if self.wait_on_rate_limit and self._reset_time is not None and \ # if self.wait_on_rate_limit and self._reset_time is not None and \
@ -166,10 +168,11 @@ def bind_api(**config):
# sleep_time = self._reset_time - int(time.time()) # sleep_time = self._reset_time - int(time.time())
# if sleep_time > 0: # if sleep_time > 0:
# if self.wait_on_rate_limit_notify: # if self.wait_on_rate_limit_notify:
# print("Rate limit reached. Sleeping for: " + str(sleep_time)) # log.warning("Rate limit reached. Sleeping for: %d" % sleep_time)
# time.sleep(sleep_time + 5) # sleep for few extra sec # time.sleep(sleep_time + 5) # sleep for few extra sec
# Apply authentication # Apply authentication
auth = None
if self.api.auth: if self.api.auth:
auth = self.api.auth.apply_auth() auth = self.api.auth.apply_auth()
@ -186,8 +189,10 @@ def bind_api(**config):
auth=auth, auth=auth,
proxies=self.api.proxy) proxies=self.api.proxy)
except Exception as e: except Exception as e:
raise TweepError('Failed to send request: %s' % e) six.reraise(TweepError, TweepError('Failed to send request: %s' % e), sys.exc_info()[2])
rem_calls = resp.headers.get('x-rate-limit-remaining') rem_calls = resp.headers.get('x-rate-limit-remaining')
if rem_calls is not None: if rem_calls is not None:
self._remaining_calls = int(rem_calls) self._remaining_calls = int(rem_calls)
elif isinstance(self._remaining_calls, int): elif isinstance(self._remaining_calls, int):
@ -233,7 +238,7 @@ def bind_api(**config):
# Store result into cache if one is available. # Store result into cache if one is available.
if self.use_cache and self.api.cache and self.method == 'GET' and result: if self.use_cache and self.api.cache and self.method == 'GET' and result:
self.api.cache.store(url, result) self.api.cache.store('%s?%s' % (url, urlencode(self.session.params)), result)
return result return result

View File

@ -6,20 +6,16 @@ from __future__ import print_function
import time import time
import datetime import datetime
import hashlib
import threading import threading
import os import os
import logging
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
try:
import hashlib
except ImportError:
# python 2.4
import md5 as hashlib
try: try:
import fcntl import fcntl
except ImportError: except ImportError:
@ -27,6 +23,7 @@ except ImportError:
# TODO: use win32file # TODO: use win32file
pass pass
log = logging.getLogger('tweepy.cache')
class Cache(object): class Cache(object):
"""Cache interface""" """Cache interface"""
@ -157,7 +154,7 @@ class FileCache(Cache):
self._lock_file = self._lock_file_win32 self._lock_file = self._lock_file_win32
self._unlock_file = self._unlock_file_win32 self._unlock_file = self._unlock_file_win32
else: else:
print('Warning! FileCache locking not supported on this system!') log.warning('FileCache locking not supported on this system!')
self._lock_file = self._lock_file_dummy self._lock_file = self._lock_file_dummy
self._unlock_file = self._unlock_file_dummy self._unlock_file = self._unlock_file_dummy

View File

@ -93,6 +93,8 @@ class Status(Model):
setattr(status, 'source_url', None) setattr(status, 'source_url', None)
elif k == 'retweeted_status': elif k == 'retweeted_status':
setattr(status, k, Status.parse(api, v)) setattr(status, k, Status.parse(api, v))
elif k == 'quoted_status':
setattr(status, k, Status.parse(api, v))
elif k == 'place': elif k == 'place':
if v is not None: if v is not None:
setattr(status, k, Place.parse(api, v)) setattr(status, k, Place.parse(api, v))

View File

@ -54,11 +54,11 @@ class JSONParser(Parser):
raise TweepError('Failed to parse JSON payload: %s' % e) raise TweepError('Failed to parse JSON payload: %s' % e)
needs_cursors = 'cursor' in method.session.params needs_cursors = 'cursor' in method.session.params
if needs_cursors and isinstance(json, dict): if needs_cursors and isinstance(json, dict) \
if 'previous_cursor' in json: and 'previous_cursor' in json \
if 'next_cursor' in json: and 'next_cursor' in json:
cursors = json['previous_cursor'], json['next_cursor'] cursors = json['previous_cursor'], json['next_cursor']
return json, cursors return json, cursors
else: else:
return json return json

View File

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function
import logging import logging
import re import re
import requests import requests
import sys
from requests.exceptions import Timeout from requests.exceptions import Timeout
from threading import Thread from threading import Thread
from time import sleep from time import sleep
@ -161,6 +162,7 @@ class ReadBuffer(object):
return self._pop(length) return self._pop(length)
read_len = max(self._chunk_size, length - len(self._buffer)) read_len = max(self._chunk_size, length - len(self._buffer))
self._buffer += self._stream.read(read_len) self._buffer += self._stream.read(read_len)
return six.b('')
def read_line(self, sep=six.b('\n')): def read_line(self, sep=six.b('\n')):
"""Read the data stream until a given separator is found (default \n) """Read the data stream until a given separator is found (default \n)
@ -177,6 +179,7 @@ class ReadBuffer(object):
else: else:
start = len(self._buffer) start = len(self._buffer)
self._buffer += self._stream.read(self._chunk_size) self._buffer += self._stream.read(self._chunk_size)
return six.b('')
def _pop(self, length): def _pop(self, length):
r = self._buffer[:length] r = self._buffer[:length]
@ -217,6 +220,9 @@ class Stream(object):
self.body = None self.body = None
self.retry_time = self.retry_time_start self.retry_time = self.retry_time_start
self.snooze_time = self.snooze_time_step self.snooze_time = self.snooze_time_step
# Example: proxies = {'http': 'http://localhost:1080', 'https': 'http://localhost:1080'}
self.proxies = options.get("proxies")
def new_session(self): def new_session(self):
self.session = requests.Session() self.session = requests.Session()
@ -230,7 +236,7 @@ class Stream(object):
# Connect and process the stream # Connect and process the stream
error_counter = 0 error_counter = 0
resp = None resp = None
exception = None exc_info = None
while self.running: while self.running:
if self.retry_count is not None: if self.retry_count is not None:
if error_counter > self.retry_count: if error_counter > self.retry_count:
@ -244,7 +250,8 @@ class Stream(object):
timeout=self.timeout, timeout=self.timeout,
stream=True, stream=True,
auth=auth, auth=auth,
verify=self.verify) verify=self.verify,
proxies = self.proxies)
if resp.status_code != 200: if resp.status_code != 200:
if self.listener.on_error(resp.status_code) is False: if self.listener.on_error(resp.status_code) is False:
break break
@ -267,7 +274,7 @@ class Stream(object):
# If it's not time out treat it like any other exception # If it's not time out treat it like any other exception
if isinstance(exc, ssl.SSLError): if isinstance(exc, ssl.SSLError):
if not (exc.args and 'timed out' in str(exc.args[0])): if not (exc.args and 'timed out' in str(exc.args[0])):
exception = exc exc_info = sys.exc_info()
break break
if self.listener.on_timeout() is False: if self.listener.on_timeout() is False:
break break
@ -277,7 +284,7 @@ class Stream(object):
self.snooze_time = min(self.snooze_time + self.snooze_time_step, self.snooze_time = min(self.snooze_time + self.snooze_time_step,
self.snooze_time_cap) self.snooze_time_cap)
except Exception as exc: except Exception as exc:
exception = exc exc_info = sys.exc_info()
# any other exception is fatal, so kill loop # any other exception is fatal, so kill loop
break break
@ -288,10 +295,10 @@ class Stream(object):
self.new_session() self.new_session()
if exception: if exc_info:
# call a handler first so that the exception can be logged. # call a handler first so that the exception can be logged.
self.listener.on_exception(exception) self.listener.on_exception(exc_info[1])
raise exception six.reraise(*exc_info)
def _data(self, data): def _data(self, data):
if self.listener.on_data(data) is False: if self.listener.on_data(data) is False:
@ -310,17 +317,18 @@ class Stream(object):
while self.running and not resp.raw.closed: while self.running and not resp.raw.closed:
length = 0 length = 0
while not resp.raw.closed: while not resp.raw.closed:
line = buf.read_line().strip() line = buf.read_line()
if not line: stripped_line = line.strip() if line else line # line is sometimes None so we need to check here
if not stripped_line:
self.listener.keep_alive() # keep-alive new lines are expected self.listener.keep_alive() # keep-alive new lines are expected
elif line.isdigit(): elif stripped_line.isdigit():
length = int(line) length = int(stripped_line)
break break
else: else:
raise TweepError('Expecting length, unexpected value found') raise TweepError('Expecting length, unexpected value found')
next_status_obj = buf.read_len(length) next_status_obj = buf.read_len(length)
if self.running: if self.running and next_status_obj:
self._data(next_status_obj) self._data(next_status_obj)
# # Note: keep-alive newlines might be inserted before each length value. # # Note: keep-alive newlines might be inserted before each length value.
@ -352,9 +360,9 @@ class Stream(object):
if resp.raw.closed: if resp.raw.closed:
self.on_closed(resp) self.on_closed(resp)
def _start(self, async): def _start(self, is_async):
self.running = True self.running = True
if async: if is_async:
self._thread = Thread(target=self._run) self._thread = Thread(target=self._run)
self._thread.start() self._thread.start()
else: else:
@ -370,7 +378,7 @@ class Stream(object):
replies=None, replies=None,
track=None, track=None,
locations=None, locations=None,
async=False, is_async=False,
encoding='utf8'): encoding='utf8'):
self.session.params = {'delimited': 'length'} self.session.params = {'delimited': 'length'}
if self.running: if self.running:
@ -391,34 +399,36 @@ class Stream(object):
if track: if track:
self.session.params['track'] = u','.join(track).encode(encoding) self.session.params['track'] = u','.join(track).encode(encoding)
self._start(async) self._start(is_async)
def firehose(self, count=None, async=False): def firehose(self, count=None, is_async=False):
self.session.params = {'delimited': 'length'} self.session.params = {'delimited': 'length'}
if self.running: if self.running:
raise TweepError('Stream object already connected!') raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/firehose.json' % STREAM_VERSION self.url = '/%s/statuses/firehose.json' % STREAM_VERSION
if count: if count:
self.url += '&count=%s' % count self.url += '&count=%s' % count
self._start(async) self._start(is_async)
def retweet(self, async=False): def retweet(self, is_async=False):
self.session.params = {'delimited': 'length'} self.session.params = {'delimited': 'length'}
if self.running: if self.running:
raise TweepError('Stream object already connected!') raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/retweet.json' % STREAM_VERSION self.url = '/%s/statuses/retweet.json' % STREAM_VERSION
self._start(async) self._start(is_async)
def sample(self, async=False, languages=None): def sample(self, is_async=False, languages=None, stall_warnings=False):
self.session.params = {'delimited': 'length'} self.session.params = {'delimited': 'length'}
if self.running: if self.running:
raise TweepError('Stream object already connected!') raise TweepError('Stream object already connected!')
self.url = '/%s/statuses/sample.json' % STREAM_VERSION self.url = '/%s/statuses/sample.json' % STREAM_VERSION
if languages: if languages:
self.session.params['language'] = ','.join(map(str, languages)) self.session.params['language'] = ','.join(map(str, languages))
self._start(async) if stall_warnings:
self.session.params['stall_warnings'] = 'true'
self._start(is_async)
def filter(self, follow=None, track=None, async=False, locations=None, def filter(self, follow=None, track=None, is_async=False, locations=None,
stall_warnings=False, languages=None, encoding='utf8', filter_level=None): stall_warnings=False, languages=None, encoding='utf8', filter_level=None):
self.body = {} self.body = {}
self.session.headers['Content-type'] = "application/x-www-form-urlencoded" self.session.headers['Content-type'] = "application/x-www-form-urlencoded"
@ -439,13 +449,13 @@ class Stream(object):
if languages: if languages:
self.body['language'] = u','.join(map(str, languages)) self.body['language'] = u','.join(map(str, languages))
if filter_level: if filter_level:
self.body['filter_level'] = unicode(filter_level, encoding) self.body['filter_level'] = filter_level.encode(encoding)
self.session.params = {'delimited': 'length'} self.session.params = {'delimited': 'length'}
self.host = 'stream.twitter.com' self.host = 'stream.twitter.com'
self._start(async) self._start(is_async)
def sitestream(self, follow, stall_warnings=False, def sitestream(self, follow, stall_warnings=False,
with_='user', replies=False, async=False): with_='user', replies=False, is_async=False):
self.body = {} self.body = {}
if self.running: if self.running:
raise TweepError('Stream object already connected!') raise TweepError('Stream object already connected!')
@ -458,7 +468,7 @@ class Stream(object):
self.body['with'] = with_ self.body['with'] = with_
if replies: if replies:
self.body['replies'] = replies self.body['replies'] = replies
self._start(async) self._start(is_async)
def disconnect(self): def disconnect(self):
if self.running is False: if self.running is False:

View File

@ -7,7 +7,6 @@ from __future__ import print_function
from datetime import datetime from datetime import datetime
import six import six
from six.moves.urllib.parse import quote
from email.utils import parsedate from email.utils import parsedate
@ -41,14 +40,7 @@ def import_simplejson():
try: try:
import simplejson as json import simplejson as json
except ImportError: except ImportError:
try: import json
import json # Python 2.6+
except ImportError:
try:
# Google App Engine
from django.utils import simplejson as json
except ImportError:
raise ImportError("Can't load a json library")
return json return json

View File

@ -1,5 +1,5 @@
[tox] [tox]
envlist = py27,py34,py35,py36,pypy,pypy3,coverage-report envlist = py27,py34,py35,py36,py37,pypy,pypy3,coverage-report
[testenv] [testenv]
@ -37,6 +37,12 @@ deps=
-r{toxinidir}/dev-requirements.txt -r{toxinidir}/dev-requirements.txt
commands = coverage run --parallel -m pytest {posargs} commands = coverage run --parallel -m pytest {posargs}
[testenv:py37]
deps=
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands = coverage run --parallel -m pytest {posargs}
[testenv:pypy] [testenv:pypy]
deps= deps=
-r{toxinidir}/requirements.txt -r{toxinidir}/requirements.txt