python开发Python学习Python3自学 爬虫实战

Python 实现刷网页流量工具,并添加代理,报错处理功能

2017-04-20  本文已影响390人  凌霄_

个人博客:凌霄的博客

准备

开始

import urllib.request
import urllib.error
#创建get方法
def get(url):
    code=urllib.request.urlopen(url).code
    return code
if __name__ == '__main__':
  #设置一些基本属性
    url = "http://lx.nextdev.cn"
    user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36"
    headers = {'User-Agent':user_agent}
    req = urllib.request.Request(url, headers=headers)
    #记录次数
    i = 1
    while 1: 
        code = get(url)
        print('访问:'+str(code))
        i = i+1

简单粗暴,刷的只是 pv,ip 没变,容易被搜索引擎发现,下面我们来改进一下

增加代理功能

random_proxy = random.choice(proxies)
proxy_support = urllib.request.ProxyHandler({"http":random_proxy})
opener = urllib.request.build_opener(proxy_support)
urllib.request.install_opener(opener)
if __name__ == '__main__':
    url = "http://lx.nextdev.cn"
    #添加代理列表,可以自行去百度获取
    proxies = ["124.88.67.22:80","124.88.67.82:80","124.88.67.81:80","124.88.67.31:80","124.88.67.19:80","58.23.16.240:80"]
    user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36"
    headers = {'User-Agent':user_agent}
    req = urllib.request.Request(url, headers=headers)
    i = 1
    while 1:
        #添加参数
        code = get(url,proxies)
        print('第'+str(i)+'次代理访问:'+str(code))
        i = i+1

这样差不多了,不过有个 bug ,如果页面打不开了或者代理失效了,程序就自动结束了,接下来我们添加异常处理功能

异常处理

def mail(txt):
    _user = "你的账号"
    _pwd = "你的密码"
    _to = "收件账号"
    msg = MIMEText(txt, 'plain', 'utf-8')
    #标题
    msg["Subject"] = "代理失效!"
    msg["From"] = _user
    msg["To"] = _to

    try:
        #这里我用的qq邮箱
        s = smtplib.SMTP_SSL("smtp.qq.com", 465)
        s.login(_user, _pwd)
        s.sendmail(_user, _to, msg.as_string())
        s.quit()
        print("Success!")

    except smtplib.SMTPException as e:
        print("Falied,%s" % e)
if __name__ == '__main__':
    url = "http://lx.nextdev.cn"
    proxies = ["124.88.67.22:80","124.88.67.82:80","124.88.67.81:80","124.88.67.31:80","124.88.67.19:80","58.23.16.240:80"]
    user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36"
    headers = {'User-Agent':user_agent}
    req = urllib.request.Request(url, headers=headers)
    i = 1
    while 1:
        try:
            code = get(url,proxies)
            print('第'+str(i)+'次代理访问:'+str(code))
            i = i+1
        except urllib.error.HTTPError as e:
            print(e.code)
              #添加mail方法
            mail(e.code)
        except urllib.error.URLError as err:
            print(err.reason)
              #添加mail方法
            mail(err.reason)

完成!

结语

代码只有短短的 50 行,程序还可以改进:
例如:代理列表自动获取,添加界面

欢迎评论

上一篇下一篇

猜你喜欢

热点阅读