for循环仅返回列表中的最后一项

时间:2017-10-26 15:24:03

标签: python list beautifulsoup list-comprehension

url="someurl"
outputfile='./file.zip'

link=urllib.urlopen(url)
soup= bs4.BeautifulSoup(link,'lxml')

links=[]
for data in soup.find_all('div', class_='master_content-outer-container'):
    for a in data.find_all('a'):
        links.append(a.get('href'))

output = open(outputfile, "wb")
for i in links:
    request=urllib.urlopen(i)
    read=request.read()
    output.write(read)
output.close()


zip_ref= zipfile.ZipFile(outputfile,'r')
zip_ref.extractall('./data/')
zip_ref.close()

我有一个存储在列表中的网址。我将它提供给urllib。每个网址以.zip扩展名结尾。当我运行此代码时,我只获得从列表中下载的最后一个文件。有大约400个链接要下载。 我错过了什么吗?

1 个答案:

答案 0 :(得分:2)

所以你把所有文件写成一个,那不会起作用

试试这个

import os
url="someurl"
outputfile='./file.zip'

link=urllib.urlopen(url)
soup= bs4.BeautifulSoup(link,'lxml')

links=[]
for data in soup.find_all('div', class_='master_content-outer-container'):
    for a in data.find_all('a'):
        links.append(a.get('href'))


for i in links:
    request=urllib.urlopen(i)
    read=request.read()

    file_name = os.path.basename(i)
    output = open(file_name, "wb")
    output.write(read)
    output.close()

    zip_ref= zipfile.ZipFile(file_name,'r')
    zip_ref.extractall('./data/')
    zip_ref.close()

选项2

import os

url="someurl"
outputfile='./file.zip'

link=urllib.urlopen(url)
soup= bs4.BeautifulSoup(link,'lxml')

def download_and_extract(link):
    request=urllib.urlopen(link)
    read=request.read()

    file_name = os.path.basename(link)
    output = open(file_name, "wb")
    output.write(read)
    output.close()

    zip_ref= zipfile.ZipFile(file_name,'r')
    zip_ref.extractall('./data/')
    zip_ref.close()

for data in soup.find_all('div', class_='master_content-outer-container'):
    for a in data.find_all('a'):
        download_and_extract(a.get('href'))
相关问题