Python三期爬虫作业Python爬虫

【Python爬虫】爬取城市

2017-08-08  本文已影响66人  半杯故事
import requests,json,csv
from lxml import etree


# url = '/match/team_players.htm?divisionId=874902863023837184&teamId=892361870411960321'
# res = requests.get(url).text
# print(res)
# select = etree.HTML(res)
# name = select.xpath('//span[@class="player-name-value player-short-words"]/text()')
# print(name)

# url = "/match/team_players_json.htm?divisionId=874902863023837184&teamId=892361870411960321&page=2"
# res = requests.post(url).text
# data = json.loads(res)['data']
# count = data['count']
# print(count)


def post_team_name(url):
    try:
        res = requests.post(url.format(1)).text
        data = json.loads(res)['data']
        count = data['count']
        # print(count % 10)
        #如果最后一页不为0,整除后需要加1,在加1是为了for的时候最后
        pages = count // 10 + 2 if count % 10 > 0 else count // 10 + 1
        # print(pages)
        for page in range(1,pages):
            page_res = requests.post(url.format(page)).text
            page_data = json.loads(page_res)['data']
            for list in page_data['list']:
                teamName = list['teamName']
                teamId = list['teamId']
                teamPic = list['teamPic']
                # print(teamName,teamId,teamPic)

                #因为网页有json数据还有网页的可以用xpath来解析网页
                #使用json解析
                # post_players_information(teamId,teamName)

                #使用xpath解析
                post_xpath_players_information(teamId,teamName)
    except Exception as e:
        print("post_team_name函数解析错误 错误为:",e)

#使用Json解析获得数据
def post_players_information(teamId,teamName):
    try:
        play_url = "/match/team_players_json.htm?divisionId=874902863023837184&teamId={}".format(
            teamId)
        res = requests.post(play_url).text
        play_data = json.loads(res)['data']
        count = play_data['count']
        # print(count)
        pages = count // 10 + 2 if count % 10 > 0 else count // 10 + 1
        # print(pages)
        for page in range(1,pages):
            url = "/match/team_players_json.htm?divisionId=874902863023837184&teamId={}&page={}".format(
                teamId,page)
            res = requests.post(url).text
            play_data = json.loads(res)['data']
            for list in play_data['list']:
                print(teamName, teamId, count,list['nickname'],list['clothNo'],list['area'],list['age'],list['height'],list['weight'])
                writer.writerow((teamName, teamId, count,list['nickname'],list['clothNo'],list['area'],list['age'],list['height'],list['weight']))
    except Exception as e:
        print("post_players_information函数解析错误 错误为:",e)

#使用xpath解析获得数据
def post_xpath_players_information(teamId,teamName):

    try:
        play_url = "/match/team_players_json.htm?divisionId=874902863023837184&teamId={}".format(
            teamId)
        res = requests.post(play_url).text
        play_data = json.loads(res)['data']
        count = play_data['count']
        # print(count)
        pages = count // 10 + 2 if count % 10 > 0 else count // 10 + 1
        # print(pages)
        for page in range(1, pages):
            url = "/match/team_players.htm?divisionId=874902863023837184&teamId={}&page={}".format(teamId,page)
            res = requests.post(url).text
            select = etree.HTML(res)
            data = select.xpath('//div[@class="player-detail"]')
            # 名字
            names = select.xpath('//span[@class="player-name-value player-short-words"]/text()')
            # 球衣号
            nums = select.xpath('//span[@class="player-num-value"]/text()')
            # 地区
            addresss = select.xpath('//span[@class="player-address-value player-short-words"]/text()')
            # 年龄
            ages = select.xpath('//span[@class="player-age-value"]/text()')
            # 身高
            heights = select.xpath('//span[@class="player-height-value"]/text()')
            # 体重
            weights = select.xpath('//span[@class="player-weight-value"]/text()')
            for i in range(0,len(nums)):
                name = names[i]
                num = nums[i]
                address = addresss[i]
                age = ages[i]
                height = heights[i]
                weight = weights[i]
                print(teamName, teamId, count, name, num, address, age, height, weight)

                writer.writerow((teamName, teamId, count, name, num, address, age, height, weight))
    except Exception as e:
        print("post_xpath_players_information函数解析错误 错误为:", e)



#队伍对阵信息
def post_team_game(url):
    try:
        res = requests.post(url).text
        select = etree.HTML(res)

        game_times = select.xpath('//div[@class="event-time"]/span[1]/text()')
        game_sorts = select.xpath('//div[@class="event-time"]/span[2]/text()')
        team_one_names = select.xpath('//div[@class="team-one"]/span[@class="team-name"]/text()')
        team_one_scores = select.xpath('//div[@class="team-one"]/span[@class="team-score"]/text()')
        team_two_names = select.xpath('//div[@class="team-two"]/span[@class="team-name"]/text()')
        team_two_scores = select.xpath('//div[@class="team-two"]/span[@class="team-score"]/text()')

        for i in range(0,len(game_times)):
            game_time = game_times[i]
            game_sort = game_sorts[i]
            team_one_name = team_one_names[i]
            team_one_score = team_one_scores[i]
            team_two_name = team_two_names[i]
            team_two_score = team_two_scores[i]
            print('{} {}, {} {}分 对阵 {} {}分'.format(game_time,game_sort,team_one_name,team_one_score,team_two_name,team_two_score))

    except Exception as e:
        print("post_team_game函数解析错误 错误为:", e)



if __name__ == "__main__":
    f = open('chengshichuanqi.csv', 'w+', encoding='utf-8')
    writer = csv.writer(f)
    writer.writerow(('战队名称', '战队id', '队员数', '队员名', '球衣号', '队员地区','队员年龄','队员身高','队员体重'))
    base_url = '/match/join_teams_json.htm?divisionId=874902863023837184&page={}'
    game_url = '/match/success_events.htm?divisionId=874902863023837184'
    post_team_game(game_url)
    post_team_name(base_url)
屏幕快照 2017-08-08 下午12.57.16.png 屏幕快照 2017-08-08 下午8.27.07.png
上一篇 下一篇

猜你喜欢

热点阅读