写了个小爬虫,为何用上代理ip总是出现错误。

时间:2022-12-17 09:47:32
 import urllib.request
import re
import os
import random
import threading def url_open(url): #在第8到第12行,总是无法正常运行,代理Ip是从网上免费代理ip获取的。
#ips = ['117.136.234.12:80', '218.189.26.20:8080','202.194.101.150:80','180.166.112.47:8888'] #proxy = urllib.request.ProxyHandler({'http':random.choice(ips)})#{'http':'124.202.174.66:8118'}
#opener = urllib.request.build_opener(proxy)
#opener.addheaders = [('User-Agent','Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36')] #urllib.request.install_opener(opener) req = urllib.request.Request(url) req.add_header('User-Agent','Mozilla/5.0')
urlobject = urllib.request.urlopen(req)
response = urlobject.read()
return response def find_page(html):
s2 = r'\[\d{4}\]'
m = re.search(s2, html)
page = m.group()
print("find_page")
return page def find_page_link(html):
s = r'http://ww[0-9].sinaimg.cn/mw600/\w+.jpg'
m = re.findall(s, html)
return m def save_page(jpg):
for file in jpg:
data = url_open(file)
#print("wwwwwwwwww")
name = "E:\\作业\\j_d\\"+file.split('/')[-1]
with open(name, 'wb') as f:
f.write(data) def down_jpg(dir_name='E:\作业\j_d', page=10, pages=10):
#os.mkdir(dir_name)
os.chdir(dir_name)
#red = url_open('http://jandan.net/ooxx')
#print(type(red))
#red = red.decode('utf-8') #page = find_page(red)
#page = int(page[1:-1])
#page = 1333
for i in range(pages):
page += 1
url = 'http://jandan.net/ooxx/page-'+str(page)+'#comments'
print(url)
data = url_open(url)
data = data.decode('utf-8')
print("dddddddddddddd")
page_list = find_page_link(data)
#print("sssssssssssssss")
save_page(page_list) if __name__ == '__main__':
p = threading.Thread(target=down_jpg,args=('E:\作业\j_d',1555,10))
c = threading.Thread(target=down_jpg,args=('E:\作业\j_d',1024,10))
#down_jpg()
p.start()
c.start() p.join()
c.join()