web crawler explanation-urllib library crawler-ip proxy-user proxy and IP proxy combination application

Using IP proxy

ProxyHandler() formats IP, the first parameter, the request target may be http or https, corresponding settings
build_opener() initializes IP
install_opener() sets the proxy IP to global and automatically uses the proxy IP when urlopen() requests are used

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import urllib.request
import random   #Introducing Random Module Files

ip = "180.115.8.212:39109"
proxy = urllib.request.ProxyHandler({"https":ip})                        #Format IP. Note: The first parameter may be http or https, corresponding to the settings
opener = urllib.request.build_opener(proxy,urllib.request.HTTPHandler)  #Initialize IP
urllib.request.install_opener(opener)       #Set the proxy IP to global and automatically use the proxy IP when using urlopen() requests

#request
url = "https://www.baidu.com/"
data = urllib.request.urlopen(url).read().decode("utf-8")
print(data)

Construction of ip proxy pool

It is suitable for proxy IP with long IP lifetime and good stability. It can call IP in the list randomly.

Python Resource Sharing Skirt: 855408893 has installation packages, learn video materials, update technology every day. Here is the gathering place of Python learners, zero foundation, advanced, welcome to click Python resource sharing

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
from urllib import request
import random   #Introducing Random Module Files
def dai_li_ip():
    ip = [
        '110.73.8.103:8123',
        '115.46.151.100:8123',
        '42.233.187.147:19'
        ]
    shui = random.choice(ip)
    print(shui)
    proxy = urllib.request.ProxyHandler({"https": shui})  # Format IP. Note that the first parameter, request target may be http or https, corresponding settings
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)  # Initialize IP
    urllib.request.install_opener(opener)  # Set the proxy IP to global and automatically use the proxy IP when using urlopen() requests

#request
dai_li_ip() #Executing proxy IP functions
url = "https://www.baidu.com/"
data = urllib.request.urlopen(url).read().decode("utf-8")
print(data)

Construction of ip proxy pool

Dynamic acquisition of IP by calling third-party interface every time, which is suitable for the case of short IP lifetime.

We use http://http.zhimaruanjian.com...

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
from urllib import request
import json
def dai_li_ip():
    url = "http://http-webapi.zhimaruanjian.com/getip?num=1&type=2&pro=&city=0&yys=0&port=11&time=1&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1"
    data = urllib.request.urlopen(url).read().decode("utf-8")
    data2 = json.loads(data)  # Restore the string to its original data type

    print(data2['data'][0])
    ip = str(data2['data'][0]['ip'])
    dkou = str(data2['data'][0]['port'])
    zh_ip = ip + ':' + dkou
    print(zh_ip)

    proxy = urllib.request.ProxyHandler({"https": zh_ip})  # Format IP. Note that the first parameter, request target may be http or https, corresponding settings
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)  # Initialize IP
    urllib.request.install_opener(opener)  # Set the proxy IP to global and automatically use the proxy IP when using urlopen() requests

#request
dai_li_ip() #Executing proxy IP functions
url = "https://www.baidu.com/"
data = urllib.request.urlopen(url).read().decode("utf-8")
print(data)

Application of combination of user agent and ip agent

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
from urllib import request
import json
import random
def yh_dl():    #Creating User Agent Pool
    yhdl = [
        'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
        'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0',
        'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
        'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
        'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11',
        'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
        'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
        'User-Agent:Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
        'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
        'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
        'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10',
        'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13',
        'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+',
        'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)',
        'UCWEB7.0.2.37/28/999',
        'NOKIA5700/ UCWEB7.0.2.37/28/999',
        'Openwave/ UCWEB7.0.2.37/28/999',
        'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999'
        ]
    thisua = random.choice(yhdl)                    #Random access to proxy information
    headers = ("User-Agent",thisua)                 #Stitching header information
    opener = urllib.request.build_opener()          #Create Request Object
    opener.addheaders=[headers]                     #Add header to request object
    urllib.request.install_opener(opener)           #Setting header information to global will automatically add headers when urlopen() method requests

def dai_li_ip():    #Create ip proxy pool
    url = "http://http-webapi.zhimaruanjian.com/getip?num=1&type=2&pro=&city=0&yys=0&port=11&time=1&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1"
    data = urllib.request.urlopen(url).read().decode("utf-8")
    data2 = json.loads(data)  # Restore the string to its original data type

    print(data2['data'][0])
    ip = str(data2['data'][0]['ip'])
    dkou = str(data2['data'][0]['port'])
    zh_ip = ip + ':' + dkou
    print(zh_ip)

    proxy = urllib.request.ProxyHandler({"https": zh_ip})  # Format IP. Note that the first parameter, request target may be http or https, corresponding settings
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)  # Initialize IP
    urllib.request.install_opener(opener)  # Set the proxy IP to global and automatically use the proxy IP when using urlopen() requests

#request
dai_li_ip() #Executing proxy IP functions
yh_dl()     #Executing user agent pool functions

gjci = 'Dress'
zh_gjci = gjc = urllib.request.quote(gjci)         #Transcoding keywords to browser characters, the default website can not be Chinese.
url = "https://s.taobao.com/search?q=%s&s=0" %(zh_gjci)
# print(url)
data = urllib.request.urlopen(url).read().decode("utf-8")
print(data)

User agent and ip agent combined application package module

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
from urllib import request
import json
import random
import re
import urllib.error
def hq_html(hq_url):
    """
    hq_html()Encapsulated crawler functions that automatically enable user agents and ip agent
    //Receive a parameter url to crawl the url of the page and return the html source code
    """
    def yh_dl():    #Creating User Agent Pool
        yhdl = [
            'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
            'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0',
            'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
            'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
            'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11',
            'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
            'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            'User-Agent:Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            'Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
            'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
            'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10',
            'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13',
            'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+',
            'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)',
            'UCWEB7.0.2.37/28/999',
            'NOKIA5700/ UCWEB7.0.2.37/28/999',
            'Openwave/ UCWEB7.0.2.37/28/999',
            'Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999'
            ]
        thisua = random.choice(yhdl)                    #Random access to proxy information
        headers = ("User-Agent",thisua)                 #Stitching header information
        opener = urllib.request.build_opener()          #Create Request Object
        opener.addheaders=[headers]                     #Add header to request object
        urllib.request.install_opener(opener)           #Setting header information to global will automatically add headers when urlopen() method requests

    def dai_li_ip(hq_url):    #Create ip proxy pool
        url = "http://http-webapi.zhimaruanjian.com/getip?num=1&type=2&pro=&city=0&yys=0&port=11&time=1&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1"
        if url:
            data = urllib.request.urlopen(url).read().decode("utf-8")
            data2 = json.loads(data)  # Restore the string to its original data type
            # print(data2['data'][0])
            ip = str(data2['data'][0]['ip'])
            dkou = str(data2['data'][0]['port'])
            zh_ip = ip + ':' + dkou
            pat = "(\w*):\w*"
            rst = re.compile(pat).findall(hq_url)  #Is regular matching acquisition http protocol or https protocol
            rst2 = rst[0]
            proxy = urllib.request.ProxyHandler({rst2: zh_ip})  # Format IP. Note that the first parameter, request target may be http or https, corresponding settings
            opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)  # Initialize IP
            urllib.request.install_opener(opener)  # Set the proxy IP to global and automatically use the proxy IP when using urlopen() requests
        else:
            pass

    #request
    try:
        dai_li_ip(hq_url) #Executing proxy IP functions
        yh_dl()     #Executing user agent pool functions

        data = urllib.request.urlopen(hq_url).read().decode("utf-8")
        return data
    except urllib.error.URLError as e:  # If an error occurs
        if hasattr(e, "code"):  # If there is an error code
            # print(e.code)  # Print error code
            pass
        if hasattr(e, "reason"):  # If there is an error message
            # print(e.reason)  # Print error message
            pass

# a = hq_html('http://www.baid.com/')
# print(a)

Module usage

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib.request
import fzhpach
 gjc = Advertising Recording
 gjc = urllib.request.quote(gjc), which transcodes keywords into browser characters, and the default website can not be Chinese.
url = 'https://www.baidu.com/s?wd=%s&pn=0' %(gjc)
a = fzhpach.hq_html(url)
print(a)
What can I learn from my learning process?
python learning resource qun, 855 408 893
 There are good learning video tutorials, development tools and e-books in the group.
Share with you python enterprise talent demand and how to learn python from zero basis, and learn what content.

Tags: Windows Python Mac OS X

Posted on Mon, 07 Oct 2019 23:24:02 -0700 by barrow