Python爬虫天气预报

正文

无聊不知道写点啥

运行结果

代码

import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
def city_parse(url):
    headers={"User-Agent":UserAgent().random}
    response=requests.get(url=url,headers=headers).content.decode("utf-8")
    soup=BeautifulSoup(response,"html5lib")
    conMidtab=soup.find("div",attrs={"class":"conMidtab"})
    table=conMidtab.find_all("table")
    for ta in table:
        trs=ta.find_all("tr")[2:]
        for index,tr in enumerate(trs):
            td=tr.find_all("td")
            if index ==0:
                city_td=td[1]
            else:
                city_td=td[0]
            city_temp=td[-2]
            city=list(city_td.stripped_strings)[0]
            temp=list(city_temp.stripped_strings)[0]
            print(city+"-最低温度:"+temp)
def chioce_area():
    print("请输入序号选择地区\n1.华北\n2.东北\n3.华东\n4.华中\n5.华南\n6.西北\n7.西南\n8.港澳台\n9.退出\n")
    num=int(input("请输入序号选择地区"))
    if num==1:
        url="http://www.weather.com.cn/textFC/hb.shtml"
        city_parse(url)
    if num ==2:
        url="http://www.weather.com.cn/textFC/db.shtml"
        city_parse(url)
    if num ==3:
        url = "http://www.weather.com.cn/textFC/hd.shtml"
        city_parse(url)
    if num ==4:
        url = "http://www.weather.com.cn/textFC/hz.shtml"
        city_parse(url)
    if num ==5:
        url = "http://www.weather.com.cn/textFC/hn.shtml"
        city_parse(url)
    if num ==6:
        url = "http://www.weather.com.cn/textFC/xb.shtml"
        city_parse(url)
    if num ==7:
        url = "http://www.weather.com.cn/textFC/xn.shtml"
        city_parse(url)
    if num ==8:
        url = "http://www.weather.com.cn/textFC/gat.shtml"
        city_parse(url)
    if num ==9:
        exit()
if __name__=="__main__":
    flag=0
    while flag==0 :
        chioce_area()

相关文章

功能概要:(目前已实现功能)公共展示部分:1.网站首页展示...
大体上把Python中的数据类型分为如下几类: Number(数字) ...
开发之前第一步,就是构造整个的项目结构。这就好比作一幅画...
源码编译方式安装Apache首先下载Apache源码压缩包,地址为ht...
前面说完了此项目的创建及数据模型设计的过程。如果未看过,...
python中常用的写爬虫的库有urllib2、requests,对于大多数比...