本文整理汇总了Python中pywikibot.comms.http.fetch函数的典型用法代码示例。如果您正苦于以下问题:Python fetch函数的具体用法?Python fetch怎么用?Python fetch使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了fetch函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_fetch
def test_fetch(self):
"""Test that using the data parameter and body parameter produce same results."""
r_data = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
data={'fish&chips': 'delicious'})
r_body = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
body={'fish&chips': 'delicious'})
self.assertDictEqual(json.loads(r_data.content),
json.loads(r_body.content))
开发者ID:magul,项目名称:pywikibot-core,代码行数:9,代码来源:http_tests.py
示例2: test_follow_redirects
def test_follow_redirects(self):
"""Test follow 301 redirects correctly."""
# The following will redirect from ' ' -> '_', and maybe to https://
r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status, 200)
self.assertIsNotNone(r.data.history)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.data.url)
r = http.fetch(uri='http://www.gandi.eu')
self.assertEqual(r.status, 200)
self.assertEqual(r.data.url,
'http://www.gandi.net')
开发者ID:AbdealiJK,项目名称:pywikibot-core,代码行数:13,代码来源:http_tests.py
示例3: getWebCitationURL
def getWebCitationURL(url, timestamp=None):
"""Return archived URL by Web Citation.
See http://www.webcitation.org/doc/WebCiteBestPracticesGuide.pdf
for more details
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
uri = u'http://www.webcitation.org/query?'
query = {'returnxml': 'true',
'url': url}
if timestamp is not None:
query['date'] = timestamp
uri = uri + urlencode(query)
xmltext = http.fetch(uri).content
if "success" in xmltext:
data = ET.fromstring(xmltext)
return data.find('.//webcite_url').text
else:
return None
开发者ID:Darkdadaah,项目名称:pywikibot-core,代码行数:26,代码来源:weblib.py
示例4: langs
def langs(self):
"""Build interwikimap."""
response = fetch(self.api + "?action=query&meta=siteinfo&siprop=interwikimap&sifilteriw=local&format=json")
iw = json.loads(response.content)
if "error" in iw:
raise RuntimeError("%s - %s" % (iw["error"]["code"], iw["error"]["info"]))
return [wiki for wiki in iw["query"]["interwikimap"] if "language" in wiki]
开发者ID:h4ck3rm1k3,项目名称:pywikibot-core,代码行数:7,代码来源:site_detect.py
示例5: _ocr_callback
def _ocr_callback(self, cmd_uri, parser_func=None):
"""OCR callback function.
@return: tuple (error, text [error description in case of error]).
"""
def id(x):
return x
if not cmd_uri:
raise ValueError('Parameter cmd_uri is mandatory.')
if parser_func is None:
parser_func = id
if not callable(parser_func):
raise TypeError('Keyword parser_func must be callable.')
# wrong link fail with Exceptions
try:
response = http.fetch(cmd_uri, charset='utf-8')
except Exception as e:
pywikibot.error('Querying %s: %s' % (cmd_uri, e))
return (True, e)
data = json.loads(response.content)
assert 'error' in data, 'Error from phe-tools: %s' % data
assert data['error'] in [0, 1], 'Error from phe-tools: %s' % data
error = bool(data['error'])
if error:
pywikibot.error('Querying %s: %s' % (cmd_uri, data['text']))
return (error, data['text'])
else:
return (error, parser_func(data['text']))
开发者ID:magul,项目名称:pywikibot-core,代码行数:35,代码来源:proofreadpage.py
示例6: getInternetArchiveURL
def getInternetArchiveURL(url, timestamp=None):
"""Return archived URL by Internet Archive.
See [[:mw:Archived Pages]] and https://archive.org/help/wayback_api.php
for more details.
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
import json
uri = u'https://archive.org/wayback/available?'
query = {'url': url}
if timestamp is not None:
query['timestamp'] = timestamp
uri = uri + urlencode(query)
jsontext = http.fetch(uri).content
if "closest" in jsontext:
data = json.loads(jsontext)
return data['archived_snapshots']['closest']['url']
else:
return None
开发者ID:TridevGuha,项目名称:pywikibot-core,代码行数:26,代码来源:weblib.py
示例7: test_https_cert_error
def test_https_cert_error(self):
"""Test if http.fetch respects disable_ssl_certificate_validation."""
self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
http.fetch,
uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
with warnings.catch_warnings(record=True) as warning_log:
response = http.fetch(
uri='https://testssl-expire-r2i2.disig.sk/index.en.html',
disable_ssl_certificate_validation=True)
r = response.content
self.assertIsInstance(r, unicode)
self.assertTrue(re.search(r'<title>.*</title>', r))
http.session.close() # clear the connection
# Verify that it now fails again
self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
http.fetch,
uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
# Verify that the warning occurred
self.assertIn('InsecureRequestWarning',
[w.category.__name__ for w in warning_log])
开发者ID:magul,项目名称:pywikibot-core,代码行数:25,代码来源:http_tests.py
示例8: test_no_params
def test_no_params(self):
"""Test fetch method with no parameters."""
r = http.fetch(uri=self.get_httpbin_url('/get'), params={})
self.assertEqual(r.status, 200)
content = json.loads(r.content)
self.assertDictEqual(content['args'], {})
开发者ID:magul,项目名称:pywikibot-core,代码行数:7,代码来源:http_tests.py
示例9: getOpenStreetMap
def getOpenStreetMap(latitude, longitude):
"""
Get the result from https://nominatim.openstreetmap.org/reverse .
@rtype: list of tuples
"""
result = []
gotInfo = False
parameters = urlencode({'lat': latitude, 'lon': longitude, 'accept-language': 'en'})
while not gotInfo:
try:
page = fetch('https://nominatim.openstreetmap.org/reverse?format=xml&%s' % parameters)
et = xml.etree.ElementTree.fromstring(page.content)
gotInfo = True
except IOError:
pywikibot.output(u'Got an IOError, let\'s try again')
time.sleep(30)
except socket.timeout:
pywikibot.output(u'Got a timeout, let\'s try again')
time.sleep(30)
validParts = [u'hamlet', u'village', u'city', u'county', u'country']
invalidParts = [u'path', u'road', u'suburb', u'state', u'country_code']
addressparts = et.find('addressparts')
for addresspart in addressparts.getchildren():
if addresspart.tag in validParts:
result.append(addresspart.text)
elif addresspart.tag in invalidParts:
pywikibot.output(u'Dropping %s, %s' % (addresspart.tag, addresspart.text))
else:
pywikibot.warning('%s, %s is not in addressparts lists'
% (addresspart.tag, addresspart.text))
return result
开发者ID:metakgp,项目名称:batman,代码行数:33,代码来源:imagerecat.py
示例10: getDataFromHost
def getDataFromHost(self, queryStr):
"""
Go and fetch a query from the host's API.
@rtype: dict
"""
url = self.getUrl(queryStr)
try:
resp = http.fetch(url)
except:
pywikibot.warning(u"Failed to retrieve %s" % url)
raise
data = resp.content
if not data:
pywikibot.warning('No data received for %s' % url)
raise pywikibot.ServerError('No data received for %s' % url)
try:
data = json.loads(data)
except ValueError:
pywikibot.warning(
'Data received for %s but no JSON could be decoded: %r'
% (url, data))
raise pywikibot.ServerError(
'Data received for %s but no JSON could be decoded: %r'
% (url, data))
return data
开发者ID:AbdealiJK,项目名称:pywikibot-core,代码行数:30,代码来源:wikidataquery.py
示例11: github_svn_rev2hash
def github_svn_rev2hash(tag, rev):
"""Convert a Subversion revision to a Git hash using Github.
@param tag: name of the Subversion repo on Github
@param rev: Subversion revision identifier
@return: the git hash
@rtype: str
"""
from io import StringIO
import xml.dom.minidom
from pywikibot.comms import http
uri = 'https://github.com/wikimedia/%s/!svn/vcc/default' % tag
request = http.fetch(uri=uri, method='PROPFIND',
body="<?xml version='1.0' encoding='utf-8'?>"
"<propfind xmlns=\"DAV:\"><allprop/></propfind>",
headers={'label': str(rev),
'user-agent': 'SVN/1.7.5 {pwb}'})
data = request.content
dom = xml.dom.minidom.parse(StringIO(data))
hsh = dom.getElementsByTagName("C:git-commit")[0].firstChild.nodeValue
date = dom.getElementsByTagName("S:date")[0].firstChild.nodeValue
date = time.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
return hsh, date
开发者ID:emijrp,项目名称:pywikibot-core,代码行数:25,代码来源:version.py
示例12: test_follow_redirects
def test_follow_redirects(self):
"""Test follow 301 redirects after an exception works correctly."""
# to be effective, this exception should be raised in httplib2
self.assertRaises(Exception,
http.fetch,
uri='invalid://url')
# The following will redirect from ' ' -> '_', and maybe to https://
r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status, 200)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.response_headers['content-location'])
r = http.fetch(uri='http://www.gandi.eu')
self.assertEqual(r.status, 200)
self.assertEqual(r.response_headers['content-location'],
'http://www.gandi.net')
开发者ID:rubin16,项目名称:pywikibot-core,代码行数:17,代码来源:http_tests.py
示例13: test_fetch
def test_fetch(self):
"""Test http.fetch using http://www.wikipedia.org/."""
r = http.fetch('http://www.wikipedia.org/')
self.assertIsInstance(r, threadedhttp.HttpRequest)
self.assertEqual(r.status, 200)
self.assertIn('<html lang="mul"', r.content)
self.assertIsInstance(r.content, unicode)
self.assertIsInstance(r.raw, bytes)
开发者ID:AbdealiJK,项目名称:pywikibot-core,代码行数:8,代码来源:http_tests.py
示例14: get_image_from_image_page
def get_image_from_image_page(imagePage):
"""Get the image object to work based on an imagePage object."""
imageBuffer = None
imageURL = imagePage.fileUrl()
imageURLopener = http.fetch(imageURL)
imageBuffer = io.BytesIO(imageURLopener.raw[:])
image = Image.open(imageBuffer)
return image
开发者ID:Darkdadaah,项目名称:pywikibot-core,代码行数:8,代码来源:match_images.py
示例15: downloadPhoto
def downloadPhoto(photoUrl=''):
"""
Download the photo and store it in a io.BytesIO object.
TODO: Add exception handling
"""
imageFile = fetch(photoUrl).raw
return io.BytesIO(imageFile)
开发者ID:Kat233hryn,项目名称:pywikibot-core,代码行数:9,代码来源:flickrripper.py
示例16: test_tools_path
def test_tools_path(self):
"""Test tools path."""
if '?' in tool:
self.skipTest('"{0}" is a regex!'.format(tool))
path = 'http://tools.wmflabs.org/%s?user=%s' % (tool, 'xqt')
request = fetch(path)
self.assertIn(request.status, (200, 207),
'Http response status {0} for "{1}"'
''.format(request.data.status_code, tool))
开发者ID:edgarskos,项目名称:pywikibot-bots-xqbot,代码行数:9,代码来源:checkvotes_tests.py
示例17: test_follow_redirects
def test_follow_redirects(self):
"""Test follow 301 redirects after an exception works correctly."""
# It doesnt matter what exception is raised here, provided it
# occurs within the httplib2 request method.
self.assertRaises(KeyError,
http.fetch,
uri='invalid://url')
# The following will redirect from ' ' -> '_', and maybe to https://
r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status, 200)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.response_headers['content-location'])
r = http.fetch(uri='http://www.gandi.eu')
self.assertEqual(r.status, 200)
self.assertEqual(r.response_headers['content-location'],
'http://www.gandi.net')
开发者ID:skamithi,项目名称:pywikibot-core,代码行数:18,代码来源:http_tests.py
示例18: downloadPhoto
def downloadPhoto(self):
"""
Download the photo and store it in a io.BytesIO object.
TODO: Add exception handling
"""
if not self.contents:
imageFile = fetch(self.URL).raw
self.contents = io.BytesIO(imageFile)
return self.contents
开发者ID:PersianWikipedia,项目名称:pywikibot-core,代码行数:10,代码来源:data_ingestion.py
示例19: getFlinfoDescription
def getFlinfoDescription(photo_id=0):
"""
Get the description from http://wikipedia.ramselehof.de/flinfo.php.
TODO: Add exception handling, try a couple of times
"""
parameters = urlencode({'id': photo_id, 'raw': 'on'})
return fetch(
'http://wikipedia.ramselehof.de/flinfo.php?%s' % parameters).content
开发者ID:Kat233hryn,项目名称:pywikibot-core,代码行数:10,代码来源:flickrripper.py
示例20: setUpClass
def setUpClass(cls):
"""
Set up the test class.
Prevent tests running if the host is down.
"""
super(CheckHostnameMixin, cls).setUpClass()
if not hasattr(cls, 'sites'):
return
for key, data in cls.sites.items():
if 'hostname' not in data:
raise Exception('%s: hostname not defined for %s'
% (cls.__name__, key))
hostname = data['hostname']
if hostname in cls._checked_hostnames:
if isinstance(cls._checked_hostnames[hostname], Exception):
raise unittest.SkipTest(
'%s: hostname %s failed (cached): %s'
% (cls.__name__, hostname,
cls._checked_hostnames[hostname]))
elif cls._checked_hostnames[hostname] is False:
raise unittest.SkipTest('%s: hostname %s failed (cached)'
% (cls.__name__, hostname))
else:
continue
e = None
try:
if '://' not in hostname:
hostname = 'http://' + hostname
r = http.fetch(uri=hostname,
default_error_handling=False)
if r.exception:
e = r.exception
else:
if r.status not in [200, 301, 302, 303, 307, 308]:
raise ServerError('HTTP status: %d' % r.status)
r.content # default decode may raise exception
except Exception as e2:
pywikibot.error('%s: accessing %s caused exception:'
% (cls.__name__, hostname))
pywikibot.exception(e2, tb=True)
e = e2
pass
if e:
cls._checked_hostnames[hostname] = e
raise unittest.SkipTest(
'%s: hostname %s failed: %s'
% (cls.__name__, hostname, e))
cls._checked_hostnames[hostname] = True
开发者ID:emijrp,项目名称:pywikibot-core,代码行数:55,代码来源:aspects.py
注:本文中的pywikibot.comms.http.fetch函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论