1 #!usr/bin/env python 2 #coding:utf-8 3 4 import urllib2 5 import cookielib 6 from bs4 import BeautifulSoup 7 8 #cookie = cookielib.CookieJar() 9 #hander = urllib2.HTTPCookieProcessor(cookie) 10 #opener = urllib2.build_opener(hander) 11 12 user_agent = {‘User-Agent‘:‘Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.110 Safari/537.36‘} 13 cookie = {‘_T_WM=622b9aac959576e1ec052536bf788ca6; SUB=_2A256NuabDeRxGeVI41EY8CzMwj-IHXVZ2IrTrDV6PUJbstBeLWvckW1LHesb2LTzebke-kTx8Edhhrigsk-45Q..; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9W5E1DxUxIRIQkQSlkE2rx2s5JpX5o2p5NHD95Q0Son01K5Eeh.0; SUHB=0pHBSdB4EWaOgH; SSOLoginState=1462933195; gsid_CTandWM=4ubeCpOz59AQGOfsYRXsvfslM1v‘} 14 15 header = { 16 ‘User-Agent‘:user_agent, 17 ‘Cookie‘ :cookie 18 } 19 url = ‘http://weibo.cn/u/5305630013‘ 20 request = urllib2.Request(url,headers=header) 21 response = urllib2.urlopen(request) 22 soup = BeautifulSoup(response,‘lxml‘,from_encoding=‘utf-8‘) 23 print soup.prettify() 24 25 titles = soup.find_all(‘span‘,class_=‘ctt‘) 26 for title in titles: 27 print title.get_text()
本来是想用cookielib模块获取cookie的,但是只获取到了第一个分号的内容,就直接找在F12里面找到cookie,试了下效果,发现就是cookie问题就直接先把代码写出来!
要注意的地方:
1、首先就是cookie了,最好能够自动获取,我看我今天可不可以完成这个任务;
2、然后就是user-agent这个了,有时候要有可以,以防万一,就都添加上吧;
3、就是那个headers了,要传送给url的数据都要放在headers里面,一开始不知道就直接把cookie和user-agent就是放到request里面,报错。
最后是写给自己的,要慢慢的习惯面向对象编程了
时间: 2024-10-19 03:41:54