• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python urllib2.install_opener函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中urllib2.install_opener函数的典型用法代码示例。如果您正苦于以下问题:Python install_opener函数的具体用法?Python install_opener怎么用?Python install_opener使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了install_opener函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: findMovieReviewers

def findMovieReviewers(movie_id, subUrl) :	
	print movie_id
	print subUrl
	reload(sys)
	sys.setdefaultencoding('utf-8')
	
	cj = cookielib.LWPCookieJar() 
	try: 
		cj.revert('douban.cookie') 
	except: 
		try :
			dou=douban() 
			username='[email protected]' 
			password='123654' 
			domain='http://www.douban.com/' 
			origURL='http://www.douban.com/login' 
			dou.setinfo(username,password,domain,origURL) 
			dou.signin()  
		except : 
			return
	opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) 
	urllib2.install_opener(opener) 
	collectPage = urllib2.urlopen("http://movie.douban.com/subject/" + movie_id + "/reviews" + subUrl, timeout=20).read().encode('utf-8')
	soup = BeautifulSoup(collectPage, 'html.parser')

	#init db connection
	conn = MySQLdb.connect(host='localhost',user='root',passwd='root')
	curs = conn.cursor()
	conn.select_db('pydb')

	reviewsOfThisPage = soup.findAll("a", { "class" : "review-hd-avatar" })

	countReviews = len(reviewsOfThisPage)
	print countReviews

	for review in reviewsOfThisPage :
		reviewSoup = BeautifulSoup(str(review), 'html.parser')
		userId = reviewSoup.a["href"].split("/")[4]
		try :
			#insert data into db rowbyrow
			curs.execute('INSERT INTO users (user_id) VALUES (%s)', userId)
			print "rows affected " + str(curs.rowcount)
		except :
			print "error inserting, probably duplicate for userid : " + userId
			None

	try :
		foundSubUrl = soup.find("a", { "class" : "next" })['href']
	except :
		foundSubUrl = ""

	print foundSubUrl

	conn.commit()
	curs.close()
	conn.close()	

	if "" != foundSubUrl  and countReviews > 0 :
		time.sleep( 2 )
		findMovieReviewers(movie_id, foundSubUrl)
开发者ID:zhugino,项目名称:dbpy,代码行数:60,代码来源:loadUsersByFilms.py


示例2: query

def query(searchstr, outformat, allresults=False):
    """Return a list of bibtex items."""
    logging.debug("Query: %s" % searchstr)
    searchstr = "/scholar?q=" + urllib2.quote(searchstr)
    url = GOOGLE_SCHOLAR_URL + searchstr
    header = HEADERS
    header["Cookie"] = header["Cookie"] + ":CF=%d" % outformat
    for proxy_addr in proxy_list:
        try:
            proxy = urllib2.ProxyHandler({"http": proxy_addr})
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)
            request = urllib2.Request(url, headers=header)
            response = urllib2.urlopen(request, timeout=5)
            print "Success HTTP-Agent:" + proxy_addr
            break
        except urllib2.URLError, e:
            if hasattr(e, "code"):
                print str(e.code) + e.msg + proxy_addr
                if e.code == 403 or e.code == 503:
                    proxy_list.remove(proxy_addr)
            elif e.reason.message == "timed out":
                print "Timed Out" + proxy_addr
                proxy_list.remove(proxy_addr)
            continue
开发者ID:fc500110,项目名称:iamrobot,代码行数:25,代码来源:gscholar.py


示例3: save

    def save(self):
        # TODO: new IP address should be added in a side-by-side manner
        # or the interface wouldn't appear once IP was changed.
        retval = super(GlobalConfigurationForm, self).save()

        whattoreload = "hostname"
        if self.instance._orig_gc_ipv4gateway != self.cleaned_data.get('gc_ipv4gateway'):
            whattoreload = "networkgeneral"
        if self.instance._orig_gc_ipv6gateway != self.cleaned_data.get('gc_ipv6gateway'):
            whattoreload = "networkgeneral"
        notifier().reload(whattoreload)

        http_proxy = self.cleaned_data.get('gc_httpproxy')
        if http_proxy:
            os.environ['http_proxy'] = http_proxy
            os.environ['https_proxy'] = http_proxy
        elif not http_proxy:
            if 'http_proxy' in os.environ:
                del os.environ['http_proxy']
            if 'https_proxy' in os.environ:
                del os.environ['https_proxy']

        # Reset global opener so ProxyHandler can be recalculated
        urllib2.install_opener(None)

        return retval
开发者ID:bharathvu,项目名称:freenas,代码行数:26,代码来源:forms.py


示例4: openurl

    def openurl(self,url):
        """
        打开网页
        """
        cookie_support= urllib2.HTTPCookieProcessor(cookielib.CookieJar())
        self.opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
        urllib2.install_opener(self.opener)
        user_agents = [
                    'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11',
                    'Opera/9.25 (Windows NT 5.1; U; en)',
                    'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
                    'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
                    'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
                    'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9',
                    "Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.7 (KHTML, like Gecko) Ubuntu/11.04 Chromium/16.0.912.77 Chrome/16.0.912.77 Safari/535.7",
                    "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0 ",

                    ]

        agent = random.choice(user_agents)
        self.opener.addheaders = [("User-agent",agent),
                                  ("Accept","*/*"),
                                  ('Referer', 'http://www.google.com')
        ]
        try:
            res = self.opener.open(url)
            return res.read()
        except:
            return None
开发者ID:ustbliubo2014,项目名称:FaceRecognition,代码行数:29,代码来源:Analog_Internet_access.py


示例5: __init__

    def __init__(self, username, realm_id, config, debug=False):
        self._version = QAPI_VERSION
        self._cookiejar = CookieJar()
        self._username = username
        self._realm_id = realm_id
        self._profile = '@'.join((username, realm_id))
        self._realm = REALMS[self._realm_id]
        self._proxy = None
        self._templates = None
        self._debug = debug
        self._config = None #. User configuration file for scripted mode
        self._connected = False
        self._username = 'nobody'
        self._cFM = None
        try:
            from ConfigFileManager import ConfigFileManager, InternalConfigError
            try:
                self._config = ConfigFileManager(config)
                self._qapi_ini = self._config.option('qapi', 'ini')
                self._cFM = ConfigFileManager(self._qapi_ini)
            except InternalConfigError as e:
                raise Exception("Sorry, %s" % e)
        except ImportError as e:
            raise Exception("Sorry, %s" % e)

        urllib2.install_opener(self._opener())
开发者ID:nima,项目名称:insecurity,代码行数:26,代码来源:qapi.py


示例6: getResponseJSONData

	def getResponseJSONData(self, url, secureToken, jsonString, additionalOptions=None):
		"Method sends a JSON encoded string via REST"
		
		if "proxy" in globals(): # set proxy if necessary
			proxy_handler = urllib2.ProxyHandler(self.config.proxy)
			opener = urllib2.build_opener(proxy_handler)
			urllib2.install_opener(opener)
		
		req = urllib2.Request(url, jsonString)
			
		# define header fields	
		req.add_header('Authorization', self.config.SDK_AUTH+",oauth_token=\""+secureToken+"\"")
		req.add_header('User-Agent', self.config.SDK_VERSION)
		req.add_header('Accept', 'application/json')
		req.add_header('Content-Type', 'application/json')
		#req.add_header('Content-Length', len(json))
		
		# establish call
		try:
			response = urllib2.urlopen(req)
			response = json.loads(response.read())
			
			return response
		
		except urllib2.HTTPError as e: # catch other status codes than '0000' and raise a new TelekomException containing 'statusCode' and 'statusMessage'
			
			raise TelekomException(json.loads(e.read()))
开发者ID:tschubotz,项目名称:box_fetch,代码行数:27,代码来源:TelekomJSONService.py


示例7: _login

    def _login(self):
        """
        Authenticates a user in a bugzilla tracker
        """
        if not (self.backend_user and self.backend_password):
            printdbg("No account data provided. Not logged in bugzilla")
            return

        import cookielib

        cookie_j = cookielib.CookieJar()
        cookie_h = urllib2.HTTPCookieProcessor(cookie_j)

        url = self._get_login_url(self.url)
        values = {'Bugzilla_login': self.backend_user,
                  'Bugzilla_password': self.backend_password}

        opener = urllib2.build_opener(cookie_h)
        urllib2.install_opener(opener)
        data = urllib.urlencode(values)
        request = urllib2.Request(url, data)
        urllib2.urlopen(request)
        for i, c in enumerate(cookie_j):
            self.cookies[c.name] = c.value

        printout("Logged in bugzilla as %s" % self.backend_user)
        printdbg("Bugzilla session cookies: %s" % self.cookies)
开发者ID:MetricsGrimoire,项目名称:Bicho,代码行数:27,代码来源:bg.py


示例8: _opener

    def _opener(self):

        build = [urllib2.HTTPHandler()]

        if self.request.redirect:
            build.append(urllib2.HTTPRedirectHandler())

        if self.request.proxy_host and self.request.proxy_port:
            build.append(urllib2.ProxyHandler(
                {self.request.proxy_protocol: self.request.proxy_host + ':' + str(self.request.proxy_port)}))

            if self.request.proxy_username:
                proxy_auth_handler = urllib2.ProxyBasicAuthHandler()
                proxy_auth_handler.add_password('realm', 'uri', self.request.proxy_username,
                                                self.request.proxy_password)
                build.append(proxy_auth_handler)

        if self.request.cookies:
            self.request.cookies = os.path.join(self._dirname, self.request.cookies)
            self.cookies = cookielib.MozillaCookieJar()
            if os.path.isfile(self.request.cookies):
                self.cookies.load(self.request.cookies)
            build.append(urllib2.HTTPCookieProcessor(self.cookies))

        urllib2.install_opener(urllib2.build_opener(*build))
开发者ID:Inter95,项目名称:tutvguia,代码行数:25,代码来源:net.py


示例9: __init__

 def __init__(self, login, password, hostname, port=8091):
     self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
     self.passman.add_password(None, "http://%s:%d/" % (hostname, int(port)), login, password)
     self.hostname = hostname
     self.port = port
     self.opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(self.passman))
     urllib2.install_opener(self.opener)
开发者ID:itnihao,项目名称:python-zabbix,代码行数:7,代码来源:couchbase.py


示例10: fetch_data_from_url

def fetch_data_from_url(url):
  """Downloads and returns data from a url"""
  request = urllib2.Request(url)
  opener = urllib2.build_opener()
  urllib2.install_opener(opener)
  data = opener.open(request).read()
  return data
开发者ID:dbyler,项目名称:WhereNext,代码行数:7,代码来源:plumbing.py


示例11: _connect

    def _connect(self, request):
        """ Connect to the secured database by opening the request.

        Required:
        urllib2.Request     request     The URL Request.

        Return:
        str                 serialized_response     response data

        """
        # create a password manager
        password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()

        # Add the username and password.
        # If we knew the realm, we could use it instead of None.
        password_mgr.add_password(
                None,
                self.base_url(),
                self._username,
                self._password)

        handler = urllib2.HTTPBasicAuthHandler(password_mgr)

        # create "opener" (OpenerDirector instance)
        opener = urllib2.build_opener(handler)

        # Install the opener.
        # Now all calls to urllib2.urlopen use our opener.
        urllib2.install_opener(opener)

        serialized_response = urllib2.urlopen(request).read()

        return serialized_response
开发者ID:scrbrd,项目名称:scoreboard,代码行数:33,代码来源:db.py


示例12: login_website

def login_website():
    '''csdn'''
    cook_jar=cookielib.CookieJar()
    cookie_support=urllib2.HTTPCookieProcessor(cook_jar)
    opener=urllib2.build_opener(cookie_support,urllib2.HTTPHandler)
    urllib2.install_opener(opener)
    print 'logging'
    
    login_url='http://passport.csdn.net/ajax/accounthandler.ashx?t=log&u=dylinshi&p=123456a&remember=0&f=http%3A%2F%2Fblog.csdn.net%2F&rand=0.363029723724382'
    user_agents = [
            'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6',
            'Opera/9.25 (Windows NT 5.1; U; en)',
            'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
            'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
            'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
            'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9'
            ]
    headers={
            'User-Agent':user_agents[0],
            'Referer':settings.S_start_urls[0]
            }
    req=urllib2.Request(url=login_url,headers=headers)
    res = urllib2.urlopen(req)
    
    print 'code is :'+str(res.code)
    if res.code<=200:
        print 'login %s success'%settings.S_target_website
    else:
        print 'login %s fail'%settings.S_target_website
        print cook_jar._cookies
    return res
开发者ID:lvshuchengyin,项目名称:mycode,代码行数:31,代码来源:try_login.py


示例13: get_current_sequence

    def get_current_sequence(self):
        """get the current sequence from the paylist"""
        url = self.get_sequence_url()
        header = self.get_header()

        req = urllib2.Request(url, None, header)
        opener = urllib2.build_opener()
        opener.add_handler(urllib2.HTTPCookieProcessor(self.get_cookie()))
        try:
            opener.add_handler(self.get_proxy())
        except:
            log.warning('can not add proxy')

        urllib2.install_opener(opener)

        try:
            response = urllib2.urlopen(req, timeout=10)
            stream = response.read()
        except:
            return 0

        try:
            for line in stream.split('\n'):
                if line.startswith('#EXT-X-MEDIA-SEQUENCE'):
                    return line.split(':')[1]
        except:
            return 0
开发者ID:DerCoop,项目名称:wwwc,代码行数:27,代码来源:sessionhandler.py


示例14: run

 def run(self):
     global proxyLists
     global proxyCheckedLists
     while proxyLists:
         proxyLock.acquire()  #获取锁
         proxyList = proxyLists.pop() #推出一个代理ip信息
         proxyLock.release()
         
         cookie = urllib2.HTTPCookieProcessor()  #使用cookie
         proxyHandle = urllib2.ProxyHandler({"http" : r"http://%s:%s" % (proxyList[0], proxyList[1])})
         opener = urllib2.build_opener(cookie, proxyHandle)
         opener.addheaders = [("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.102 Safari/537.36")]
         urllib2.install_opener(opener)
         t1 = time.time()
         try:
             req = urllib2.urlopen(self.test_url, timeout=self.timeout)
             result = req.read()
             pos = result.find(self.test_str)
             timeused = time.time() - t1
             proxyList.append(timeused)
             if pos > 1:
                 proxyLock.acquire()
                 proxyCheckedLists.append(proxyList)
                 proxyLock.release() 
         except Exception,e:
             continue
开发者ID:xezw211,项目名称:python,代码行数:26,代码来源:pachong.py


示例15: fx_opener

def fx_opener(request):
    request.addfinalizer(
        functools.partial(setattr, urllib2, '_opener', urllib2._opener)
    )
    opener = urllib2.build_opener(TestHTTPHandler)
    urllib2.install_opener(opener)
    return opener
开发者ID:earthreader,项目名称:libearth,代码行数:7,代码来源:conftest.py


示例16: getResponseMixedData

	def getResponseMixedData(self, url, secureToken, dic, additionalOptions=None):
		"Method sets up a REST call with mixed body data such as multipart/form-data."
		
		# check whether proxy is given
		if "proxy" in globals():
			proxy_handler = urllib2.ProxyHandler(self.config.proxy)
			opener = urllib2.build_opener(proxy_handler)
			urllib2.install_opener(opener)
				
		multipart = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler)
		urllib2.install_opener(multipart)
		
		req = urllib2.Request(url, dic.parameters())

		req.add_header('Authorization', self.config.SDK_AUTH+",oauth_token=\""+secureToken+"\"")
		req.add_header('User-Agent', self.config.SDK_VERSION)
		req.add_header('Accept', 'application/json')
		
		# sets additional header fields
		if additionalOptions != None:
			for key in additionalOptions:
				req.add_header(key, additionalOptions[key])
		
		try:
			response = urllib2.urlopen(req)
			
			response = json.loads(response.read())	
			
			return response
		
		except urllib2.HTTPError as e:
			
			raise TelekomException(json.loads(e.read()))
开发者ID:tschubotz,项目名称:box_fetch,代码行数:33,代码来源:TelekomJSONService.py


示例17: __init__

 def __init__(self, url, close=True, proxy=None, post=None, mobile=False, referer=None, cookie=None, output='', timeout='10'):
     if not proxy is None:
         proxy_handler = urllib2.ProxyHandler({'http':'%s' % (proxy)})
         opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler)
         opener = urllib2.install_opener(opener)
     if output == 'cookie' or not close == True:
         import cookielib
         cookie_handler = urllib2.HTTPCookieProcessor(cookielib.LWPCookieJar())
         opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
         opener = urllib2.install_opener(opener)
     if not post is None:
         request = urllib2.Request(url, post)
     else:
         request = urllib2.Request(url,None)
     if mobile == True:
         request.add_header('User-Agent', 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7')
     else:
         request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0')
     if not referer is None:
         request.add_header('Referer', referer)
     if not cookie is None:
         request.add_header('cookie', cookie)
     response = urllib2.urlopen(request, timeout=int(timeout))
     if output == 'cookie':
         result = str(response.headers.get('Set-Cookie'))
     elif output == 'geturl':
         result = response.geturl()
     else:
         result = response.read()
     if close == True:
         response.close()
     self.result = result
开发者ID:1c0n,项目名称:lambda-xbmc-addons,代码行数:32,代码来源:default.py


示例18: Weibo

def Weibo(USERID, PASSWD):
    client = APIClient(app_key=APP_KEY, app_secret=APP_SECRET, redirect_uri=CALLBACK_URL)
    referer_url = client.get_authorize_url()
    #print "referer url is : %s" % referer_url

    cookies = urllib2.HTTPCookieProcessor()
    opener = urllib2.build_opener(cookies)
    urllib2.install_opener(opener)
 
    postdata = {"client_id": APP_KEY,
                "redirect_uri": CALLBACK_URL,
                "userId": USERID,
                "passwd": PASSWD,
                "isLoginSina": "0",
                "action": "submit",
                "response_type": "code",
             }
 
    headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:11.0) Gecko/20100101 Firefox/11.0",
               "Host": "api.weibo.com",
               "Referer": referer_url
             }
 
    req  = urllib2.Request(url = AUTH_URL,
                           data = urllib.urlencode(postdata),
                           headers = headers
                    )
    try:
        resp = urllib2.urlopen(req)
        #print "callback url is : %s" % resp.geturl()
        code = resp.geturl()[-32:]
    except APIError, e:
        print e
开发者ID:cloudaice,项目名称:weibo-terminal,代码行数:33,代码来源:lib.py


示例19: urlcontent

    def urlcontent(self, url, para=None, header={}):
        """
        获取地址的源代码
        url 要获取的网址
        header 头部设置
            """
        print "start get url:%s" % url
        if self.auto_sleep:
            sleep_time = random.random()*2
            time.sleep(sleep_time)

        #设置代理 只设置http和https代理
        if self.proxy:
            opener = urllib2.build_opener(urllib2.ProxyHandler({'http': self.proxy, 'https' : self.proxy}) )
            urllib2.install_opener(opener)
        #设置post参数
        params = None
        if para:
            params = urllib.urlencode(para)
        #创建请求
        request = urllib2.Request(url, params, header)
        try:
            #发送请求
            response = urllib2.urlopen(request)
            content = response.read()
            #设置了编码
            if self.charset:
                content = content.encode(self.charset)
            return content
        except:
            print 'get url content failed:', url
            return None
开发者ID:selecterskypython,项目名称:24home.com,代码行数:32,代码来源:http.py


示例20: check_proxy

 def check_proxy(self, specific={}):
     """ Checks if proxy settings are set on the OS
     Returns:
     -- 1 when direct connection works fine
     -- 2 when direct connection fails and any proxy is set in the OS
     -- 3 and settings when direct connection fails but a proxy is set
     see: https://docs.python.org/2/library/urllib.html#urllib.getproxies
     """
     os_proxies = getproxies()
     if len(os_proxies) == 0 and self.check_internet_connection:
         logging.info("No proxy needed nor set. Direct connection works.")
         return 1
     elif len(os_proxies) == 0 and not self.check_internet_connection:
         logging.error("Proxy not set in the OS. Needs to be specified")
         return 2
     else:
         #
         env['http_proxy'] = os_proxies.get("http")
         env['https_proxy'] = os_proxies.get("https")
         #
         proxy = ProxyHandler({
                              'http': os_proxies.get("http"),
                              'https': os_proxies.get("https")
                              })
         opener = build_opener(proxy)
         install_opener(opener)
         urlopen('http://www.google.com')
         return 3, os_proxies
开发者ID:etiennebr,项目名称:DicoGIS,代码行数:28,代码来源:checknorris.py



注:本文中的urllib2.install_opener函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python util.flatten函数代码示例发布时间:2022-05-27
下一篇:
Python parse.parse_qsl函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap