• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python http.request函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pywikibot.comms.http.request函数的典型用法代码示例。如果您正苦于以下问题:Python request函数的具体用法?Python request怎么用?Python request使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了request函数的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_https

 def test_https(self):
     """Test http.request using https://www.wikiquote.org/."""
     r = http.request(site=None, uri='https://www.wikiquote.org/')
     self.assertIsInstance(r, unicode)
     self.assertIn('<html lang="mul"', r)
     self.assertOneDeprecationParts(
         'Invoking http.request without argument site', 'http.fetch()')
开发者ID:AbdealiJK,项目名称:pywikibot-core,代码行数:7,代码来源:http_tests.py


示例2: getInternetArchiveURL

def getInternetArchiveURL(url, timestamp=None):
    """Return archived URL by Internet Archive.

    Parameters:
        url - url to search an archived version for
        timestamp - requested archive date. The version closest to that moment
                    is returned. Format: YYYYMMDDhhmmss or part thereof.

    See [[:mw:Archived Pages]] and https://archive.org/help/wayback_api.php
    for more details.
    """
    import json
    uri = u'https://archive.org/wayback/available?'

    query = {'url': url}

    if timestamp is not None:
        query['timestamp'] = timestamp

    uri = uri + urllib.urlencode(query)
    jsontext = http.request(uri=uri, site=None)
    if "closest" in jsontext:
        data = json.loads(jsontext)
        return data['archived_snapshots']['closest']['url']
    else:
        return None
开发者ID:APerson241,项目名称:pywikibot-core,代码行数:26,代码来源:weblib.py


示例3: getWebCitationURL

def getWebCitationURL(url, timestamp=None):
    """Return archived URL by Web Citation.

    Parameters:
        url - url to search an archived version for
        timestamp - requested archive date. The version closest to that moment
                    is returned. Format: YYYYMMDDhhmmss or part thereof.

    See http://www.webcitation.org/doc/WebCiteBestPracticesGuide.pdf
    for more details
    """
    import xml.etree.ElementTree as ET
    uri = u'http://www.webcitation.org/query?'

    query = {'returnxml': 'true',
             'url': url}

    if not timestamp is None:
        query['date'] = timestamp

    uri = uri + urllib.urlencode(query)
    xmltext = http.request(uri=uri, site=None)
    if "success" in xmltext:
        data = ET.fromstring(xmltext)
        return data.find('.//webcite_url').text
    else:
        return None
开发者ID:APerson241,项目名称:pywikibot-core,代码行数:27,代码来源:weblib.py


示例4: test_https_ignore_cert_error

 def test_https_ignore_cert_error(self):
     """Test http.request ignoring invalid vikidia SSL certificate."""
     # As the connection is cached, the above test will cause
     # subsequent requests to go to the existing, broken, connection.
     # So, this uses a different host, which hopefully hasnt been
     # connected previously by other tests.
     r = http.request(site=None,
                      uri='https://en.vikidia.org/wiki/Main_Page',
                      disable_ssl_certificate_validation=True)
     self.assertIsInstance(r, unicode)
     self.assertIn('<title>Vikidia</title>', r)
开发者ID:skamithi,项目名称:pywikibot-core,代码行数:11,代码来源:http_tests.py


示例5: getversion_onlinerepo

def getversion_onlinerepo(repo=None):
    """Retrieve current framework revision number from online repository.

    @param repo: (optional) Online repository location
    @type repo: URL or string
    """
    from pywikibot.comms import http

    url = repo or 'https://git.wikimedia.org/feed/pywikibot/core'
    hsh = None
    buf = http.request(site=None, uri=url)
    buf = buf.split('\r\n')
    try:
        hsh = buf[13].split('/')[5][:-1]
    except Exception as e:
        raise ParseError(repr(e) + ' while parsing ' + repr(buf))
    return hsh
开发者ID:anrao91,项目名称:pywikibot-core,代码行数:17,代码来源:version.py


示例6: getInternetArchiveURL

def getInternetArchiveURL(site, url, timestamp=None):
    """Return archived URL by Internet Archive."""
    # See [[:mw:Archived Pages]] and http://archive.org/help/wayback_api.php
    import json
    query = u'http://archive.org/wayback/available?'
    query += u'url='
    query += url
    if not timestamp is None:
        query += u'&timestamp='
        query += timestamp
    if pywikibot.verbose:
        pywikibot.output(u"Requesting query from Internet Archive: %s" % query)
    jsontext = http.request(uri=query, site=site, retry=False, no_hostname=True)
    if "closest" in jsontext:
        data = json.loads(jsontext)
        return data['archived_snapshots']['closest']['url']
    else:
        return None
开发者ID:Rodehi,项目名称:GFROS,代码行数:18,代码来源:weblib.py


示例7: getDataFromHost

    def getDataFromHost(self, queryStr):
        """
        Go and fetch a query from the host's API.
        """
        url = self.getUrl(queryStr)

        try:
            resp = http.request(None, url)
        except:
            pywikibot.warning(u"Failed to retrieve %s" % url)
            raise

        try:
            data = json.loads(resp)
        except ValueError:
            pywikibot.warning(u"Data received from host but no JSON could be decoded")
            raise pywikibot.ServerError

        return data
开发者ID:anrao91,项目名称:pywikibot-core,代码行数:19,代码来源:wikidataquery.py


示例8: getWebCitationURL

def getWebCitationURL(site, url, timestamp=None):
    """Return archived URL by Web Citation."""
    # See http://www.webcitation.org/doc/WebCiteBestPracticesGuide.pdf
    from BeautifulSoup import BeautifulStoneSoup
    query = u'http://www.webcitation.org/query?'
    query += u'returnxml=true'
    query += u'&url='
    query += url
    if not timestamp is None:
        query += u'&date='
        query += timestamp
    if pywikibot.verbose:
        pywikibot.output(u"Requesting query from Web Citation: %s" % query)
    xmltext = http.request(uri=query, site=site, retry=False, no_hostname=True)
    if "success" in xmltext:
        data = BeautifulStoneSoup(xmltext)
        return data.find('webcite_url').string
    else:
        return None
开发者ID:Rodehi,项目名称:GFROS,代码行数:19,代码来源:weblib.py


示例9: github_svn_rev2hash

def github_svn_rev2hash(tag, rev):
    """Convert a Subversion revision to a Git hash using Github.

    @param tag: name of the Subversion repo on Github
    @param rev: Subversion revision identifier
    @return: the git hash
    @rtype: str
    """
    from io import StringIO
    import xml.dom.minidom
    from pywikibot.comms import http

    uri = 'https://github.com/wikimedia/%s/!svn/vcc/default' % tag
    data = http.request(site=None, uri=uri, method='PROPFIND',
                        body="<?xml version='1.0' encoding='utf-8'?>"
                        "<propfind xmlns=\"DAV:\"><allprop/></propfind>",
                        headers={'label': str(rev), 'user-agent': 'SVN/1.7.5 {pwb}'})

    dom = xml.dom.minidom.parse(StringIO(data))
    hsh = dom.getElementsByTagName("C:git-commit")[0].firstChild.nodeValue
    return hsh
开发者ID:rubin16,项目名称:pywikibot-core,代码行数:21,代码来源:version.py


示例10: submit

    def submit(self):
        """Submit a query and parse the response.

        @return:  The data retrieved from api.php (a dict)

        """
        paramstring = self.http_params()
        while True:
            action = self.params.get("action", "")
            simulate = self._simulate(action)
            if simulate:
                return simulate
            self.site.throttle(write=self.write)
            uri = self.site.scriptpath() + "/api.php"
            ssl = False
            if self.site.family.name in config.available_ssl_project:
                if action == "login" and config.use_SSL_onlogin:
                    ssl = True
                elif config.use_SSL_always:
                    ssl = True
            try:
                if self.mime:
                    # construct a MIME message containing all API key/values
                    container = MIMEMultipart(_subtype='form-data')
                    for key in self.params:
                        # key "file" requires special treatment in a multipart
                        # message
                        if key == "file":
                            local_filename = self.params[key]
                            filetype = mimetypes.guess_type(local_filename)[0] \
                                       or 'application/octet-stream'
                            file_content = file(local_filename, "rb").read()
                            submsg = MIMENonMultipart(*filetype.split("/"))
                            submsg.add_header("Content-disposition",
                                              "form-data", name=key,
                                              filename=local_filename)
                            submsg.set_payload(file_content)
                        else:
                            try:
                                self.params[key].encode("ascii")
                                keytype = ("text", "plain")
                            except UnicodeError:
                                keytype = ("application", "octet-stream")
                            submsg = MIMENonMultipart(*keytype)
                            submsg.add_header("Content-disposition", "form-data",
                                              name=key)
                            submsg.set_payload(self.params[key])
                        container.attach(submsg)
                    # strip the headers to get the HTTP message body
                    body = container.as_string()
                    marker = "\n\n" # separates headers from body
                    eoh = body.find(marker)
                    body = body[ eoh + len(marker): ]
                    # retrieve the headers from the MIME object
                    mimehead = dict(container.items())
                    rawdata = http.request(self.site, uri, ssl, method="POST",
                                           headers=mimehead, body=body)
                else:
                    rawdata = http.request(self.site, uri, ssl, method="POST",
                                headers={'Content-Type':
                                         'application/x-www-form-urlencoded'},
                                body=paramstring)
##                import traceback
##                traceback.print_stack()
##                print rawdata
            except Server504Error:
                pywikibot.log(u"Caught HTTP 504 error; retrying")
                self.wait()
                continue
            #TODO: what other exceptions can occur here?
            except Exception, e:
                # for any other error on the http request, wait and retry
                pywikibot.error(traceback.format_exc())
                pywikibot.log(u"%s, %s" % (uri, paramstring))
                self.wait()
                continue
            if not isinstance(rawdata, unicode):
                rawdata = rawdata.decode(self.site.encoding())
            pywikibot.debug(u"API response received:\n" + rawdata, _logger)
            if rawdata.startswith(u"unknown_action"):
                raise APIError(rawdata[:14], rawdata[16:])
            try:
                result = json.loads(rawdata)
            except ValueError:
                # if the result isn't valid JSON, there must be a server
                # problem.  Wait a few seconds and try again
                pywikibot.warning(
"Non-JSON response received from server %s; the server may be down."
                                 % self.site)
                pywikibot.debug(rawdata, _logger)
                # there might also be an overflow, so try a smaller limit
                for param in self.params:
                    if param.endswith("limit"):
                        value = self.params[param]
                        try:
                            self.params[param] = str(int(value) // 2)
                            pywikibot.output(u"Set %s = %s"
                                             % (param, self.params[param]))
                        except:
                            pass
#.........这里部分代码省略.........
开发者ID:shizhao,项目名称:pywikibot-core,代码行数:101,代码来源:api.py


示例11: test_https

 def test_https(self):
     """Test http.request using https://www.wikiquote.org/."""
     r = http.request(site=None, uri='https://www.wikiquote.org/')
     self.assertIsInstance(r, unicode)
     self.assertIn('<html lang="mul"', r)
开发者ID:skamithi,项目名称:pywikibot-core,代码行数:5,代码来源:http_tests.py


示例12: submit

    def submit(self):
        """Submit a query and parse the response.

        @return: a dict containing data retrieved from api.php

        """
        while True:
            paramstring = self.http_params()
            action = self.params.get("action", "")
            simulate = self._simulate(action)
            if simulate:
                return simulate
            if self.throttle:
                self.site.throttle(write=self.write)
            else:
                pywikibot.log("Action '{0}' is submitted not throttled.".format(action))
            uri = self.site.scriptpath() + "/api.php"
            try:
                if self.mime:
                    # construct a MIME message containing all API key/values
                    container = MIMEMultipart(_subtype='form-data')
                    for key in self.params:
                        # key "file" requires special treatment in a multipart
                        # message
                        if key == "file":
                            local_filename = self.params[key]
                            filetype = mimetypes.guess_type(local_filename)[0] \
                                or 'application/octet-stream'
                            file_content = file(local_filename, "rb").read()
                            submsg = Request._generate_MIME_part(
                                key, file_content, filetype.split('/'),
                                {'filename': local_filename})
                        else:
                            submsg = Request._generate_MIME_part(
                                key, self.params[key], None, None)
                        container.attach(submsg)
                    for key, value in self.mime_params.items():
                        container.attach(Request._generate_MIME_part(key, *value))
                    # strip the headers to get the HTTP message body
                    body = container.as_string()
                    marker = "\n\n"  # separates headers from body
                    eoh = body.find(marker)
                    body = body[eoh + len(marker):]
                    # retrieve the headers from the MIME object
                    headers = dict(list(container.items()))
                else:
                    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
                    body = paramstring

                rawdata = http.request(
                    self.site, uri, method="POST",
                    headers=headers, body=body)

#                import traceback
#                traceback.print_stack()
#                print rawdata
            except Server504Error:
                pywikibot.log(u"Caught HTTP 504 error; retrying")
                self.wait()
                continue
            except FatalServerError:
                # This error is not going to be fixed by just waiting
                pywikibot.error(traceback.format_exc())
                raise
            # TODO: what other exceptions can occur here?
            except Exception:
                # for any other error on the http request, wait and retry
                pywikibot.error(traceback.format_exc())
                pywikibot.log(u"%s, %s" % (uri, paramstring))
                self.wait()
                continue
            if not isinstance(rawdata, unicode):
                rawdata = rawdata.decode(self.site.encoding())
            pywikibot.debug(u"API response received:\n" + rawdata, _logger)
            if rawdata.startswith(u"unknown_action"):
                raise APIError(rawdata[:14], rawdata[16:])
            try:
                result = json.loads(rawdata)
            except ValueError:
                # if the result isn't valid JSON, there must be a server
                # problem.  Wait a few seconds and try again
                pywikibot.warning(
                    "Non-JSON response received from server %s; the server may be down."
                    % self.site)
                pywikibot.debug(rawdata, _logger)
                # there might also be an overflow, so try a smaller limit
                for param in self.params:
                    if param.endswith("limit"):
                        value = self.params[param]
                        try:
                            self.params[param] = str(int(value) // 2)
                            pywikibot.output(u"Set %s = %s"
                                             % (param, self.params[param]))
                        except:
                            pass
                self.wait()
                continue
            if not result:
                result = {}
            if not isinstance(result, dict):
#.........这里部分代码省略.........
开发者ID:anrao91,项目名称:pywikibot-core,代码行数:101,代码来源:api.py


示例13: subTemplate

    def subTemplate(self, content, param):
        """Substitute the template tags in content according to param.

           @param content: Content with tags to substitute.
           @type  content: string
           @param param: Param with data how to substitute tags.
           @type  param: dict

           Returns a tuple containig the new content with tags
           substituted and a list of those tags.
        """

        substed_tags = []  # DRTRIGON-73
        metadata     = {'mw-signature': u'~~~~',
                        'mw-timestamp': u'~~~~~',}  # DRTRIGON-132

        # 0.2.) check for 'simple' mode and get additional params
        if param['simple']:
            p = self.site.getExpandedString(param['simple'])
            param.update(pywikibot.extract_templates_and_params(p)[0][1])

        # 0.5.) check cron/date
        if param['cron']:
            # [min] [hour] [day of month] [month] [day of week]
            # (date supported only, thus [min] and [hour] dropped)
            if not (param['cron'][0] == '@'):
                param['cron'] = '* * ' + param['cron']
            entry = crontab.CronTab(param['cron'])
            # find the delay from midnight (does not return 0.0 - but next)
            delay = entry.next(datetime.datetime.now().replace(hour=0,
                                                               minute=0,
                                                               second=0,
                                                               microsecond=0)- \
                               datetime.timedelta(microseconds=1))

            pywikibot.output(u'CRON delay for execution: %.3f (<= %i)'
                             % (delay, self._bot_config['CRONMaxDelay']))

            if not (delay <= self._bot_config['CRONMaxDelay']):
                return (content, substed_tags, metadata)

        # 1.) getUrl or wiki text
        # (security: check url not to point to a local file on the server,
        #  e.g. 'file://' - same as used in xsalt.py)
        secure = False
        for item in [u'http://', u'https://',
                     u'mail://', u'local://', u'wiki://']:
            secure = secure or (param['url'][:len(item)] == item)
        param['zip'] = ast.literal_eval(param['zip'])
        if not secure:
            return (content, substed_tags, metadata)
        if   (param['url'][:7] == u'wiki://'):
            url = param['url'][7:].strip('[]')              # enable wiki-links
            if ast.literal_eval(param['expandtemplates']):  # DRTRIGON-93 (only with 'wiki://')
                external_buffer = pywikibot.Page(self.site,
                                                 url).get(expandtemplates=True)
            else:
                external_buffer = self.load( pywikibot.Page(self.site, url) )
        elif (param['url'][:7] == u'mail://'):              # DRTRIGON-101
            url = param['url'].replace(u'{{@}}', u'@')     # e.g. nlwiki
            mbox = SubsterMailbox(
              pywikibot.config.datafilepath(self._bot_config['data_path'],
                                            self._bot_config['mbox_file'], ''))
            external_buffer = mbox.find_data(url)
            mbox.close()
        elif (param['url'][:8] == u'local://'):             # DRTRIGON-131
            if (param['url'][8:] == u'cache/state_bots'):
                # filename hard-coded
                d = shelve.open(pywikibot.config.datafilepath('cache',
                                                              'state_bots'))
                external_buffer = pprint.pformat(
                    ast.literal_eval(pprint.pformat(d)))
                d.close()
            else:
                external_buffer = u'n/a'
        else:
            # consider using 'expires', 'last-modified', 'etag' in order to
            # make the updating data requests more efficient! use those stored
            # on page, if the user placed them, else use the conventional mode.
            # http://www.diveintopython.net/http_web_services/etags.html
            f_url, external_buffer = http.request(self.site, param['url'],
                                                  no_hostname = True,
                                                  back_response = True)
            headers = f_url.headers # same like 'f_url.info()'
            #if param['zip']:
            if ('text/' not in headers['content-type']):
                pywikibot.output(u'Source is of non-text content-type, '
                                 u'using raw data instead.')
                external_buffer = f_url.read()
            del f_url               # free some memory (no need to keep copy)

            for h in ['content-length', 'date', 'last-modified', 'expires']:
                if h in headers:
                    metadata['url-%s' % h] = headers[h]

        # some intermediate processing (unzip, xlsx2csv, ...)
        if param['zip']:    # 'application/zip', ...
            fileno          = 0 if (param['zip'] == True) else (param['zip']-1)
            external_buffer = self.unzip(external_buffer, fileno)
        if param['xlsx']:   # 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
#.........这里部分代码省略.........
开发者ID:stanta,项目名称:ipc-parser-1,代码行数:101,代码来源:subster.py


示例14: test_get

 def test_get(self):
     r = http.request(site=None, uri='http://www.wikipedia.org/')
     self.assertIsInstance(r, str if sys.version_info[0] >= 3 else unicode)
     self.assertIn('<html lang="mul"', r)
开发者ID:anrao91,项目名称:pywikibot-core,代码行数:4,代码来源:http_tests.py


示例15: test_http

 def test_http(self):
     """Test http request function."""
     r = http.request(site=None, uri='http://www.wikipedia.org/')
     self.assertIsInstance(r, unicode)
     self.assertIn('<html lang="mul"', r)
开发者ID:Exal117,项目名称:pywikibot-core,代码行数:5,代码来源:http_tests.py


示例16: test_get

 def test_get(self):
     r = http.request(site=None, uri='http://www.wikipedia.org/')
     self.assertIsInstance(r, str)
     self.assertTrue('<html lang="mul"' in r)
开发者ID:legoktm,项目名称:pywikibot-core,代码行数:4,代码来源:http_tests.py



注:本文中的pywikibot.comms.http.request函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python config.datafilepath函数代码示例发布时间:2022-05-26
下一篇:
Python http.fetch函数代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap