使用python爬取网页,运行报错

# file_name:Crawler.py
import requests
from bs4 import BeautifulSoup


def download_content(url):
    """
    第一个函数,用来下载网页,返回网页内容
    参数 url 代表所要下载的网页网址。
    整体代码和之前类似
    """
    response = requests.get(url).text
    return response


# 第二个函数,将字符串内容保存到文件中
# 第一个参数为所要保存的文件名,第二个参数为要保存的字符串内容的变量
def save_to_file(filename, content):
    with open(filename, mode="w", encoding="utf-8") as f:
        f.write(content)

def create_doc_from_filename(filename):
    # 输入参数为要分析的 html 文件名,返回值为对应的 BeautifulSoup 对象
    with open(filename, "r", encoding='utf-8') as f:
        html_content = f.read()
        soup = BeautifulSoup(html_content, "lxml")
    return soup

def parse(soup):
    post_list = soup.find_all("div", class_="post-info")
    for post in post_list:
        link = post.find_all("a")[1]
        print(link.text.strip())
        print(link["href"])


def main():
    # 下载报考指南的网页
    url = "https://zkaoy.com/sions/exam"
    filename = "tips1.html"
    result = download_content(url)
    save_to_file(filename, result)
    soup = create_doc_from_filename(filename)
    parse(soup)

if __name__ == '__main__':
    main()

代码在网上copy的,运行不知道在哪里有问题,请高手指点下
报错信息好长一段,如下
D:\py\anaconda\envs\zhangyi_tracker\python.exe C:\Users\Administrator\PycharmProjects\pythonProject1\venv\爬取学习\1.py
Traceback (most recent call last):
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connection.py”, line 174, in _new_conn
conn = connection.create_connection(
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\util\connection.py”, line 72, in create_connection
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
File “D:\py\anaconda\envs\zhangyi_tracker\lib\socket.py”, line 954, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno 11002] getaddrinfo failed

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connectionpool.py”, line 703, in urlopen
httplib_response = self._make_request(
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connectionpool.py”, line 386, in _make_request
self._validate_conn(conn)
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connectionpool.py”, line 1042, in _validate_conn
conn.connect()
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connection.py”, line 363, in connect
self.sock = conn = self._new_conn()
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connection.py”, line 186, in _new_conn
raise NewConnectionError(
urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPSConnection object at 0x000001DE73DF8C70>: Failed to establish a new connection: [Errno 11002] getaddrinfo failed

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\adapters.py”, line 489, in send
resp = conn.urlopen(
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\connectionpool.py”, line 787, in urlopen
retries = retries.increment(
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\urllib3\util\retry.py”, line 592, in increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host=’zkaoy.com’, port=443): Max retries exceeded with url: /sions/exam (Caused by NewConnectionError(‘<urllib3.connection.HTTPSConnection object at 0x000001DE73DF8C70>: Failed to establish a new connection: [Errno 11002] getaddrinfo failed’))

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File “C:\Users\Administrator\PycharmProjects\pythonProject1\venv\爬取学习\1.py”, line 47, in
main()
File “C:\Users\Administrator\PycharmProjects\pythonProject1\venv\爬取学习\1.py”, line 41, in main
result = download_content(url)
File “C:\Users\Administrator\PycharmProjects\pythonProject1\venv\爬取学习\1.py”, line 12, in download_content
response = requests.get(url).text
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\api.py”, line 73, in get
return request(“get”, url, params=params, *kwargs)
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\api.py”, line 59, in request
return session.request(method=method, url=url, *
kwargs)
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\sessions.py”, line 587, in request
resp = self.send(prep, *send_kwargs)
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\sessions.py”, line 701, in send
r = adapter.send(request, *
kwargs)
File “D:\py\anaconda\envs\zhangyi_tracker\lib\site-packages\requests\adapters.py”, line 565, in send
raise ConnectionError(e, request=request)
requests.exceptions.ConnectionError: HTTPSConnectionPool(host=’zkaoy.com’, port=443): Max retries exceeded with url: /sions/exam (Caused by NewConnectionError(‘<urllib3.connection.HTTPSConnection object at 0x000001DE73DF8C70>: Failed to establish a new connection: [Errno 11002] getaddrinfo failed’))

讨论数量: 1

讨论应以学习和精进为目的。请勿发布不友善或者负能量的内容,与人为善,比聪明更重要!