博客园批量下载文章
如何实现批量下载文章的功能,类似于博客园文章的这种功能?勾选几个文章之后,统一下成压缩包?
代码如下:
import requests import zipfile import os # 要下载的文章列表 urls = ['https://www.cnblogs.com/xxx/p/xx.html', ...] # 创建临时目录 temp_dir = 'temp' os.makedirs(temp_dir, exist_ok=True) # 下载文章内容 for url inurls: resp = requests.get(url) with open(os.path.join(temp_dir, url.split('/')[-1]), 'wb') as f: f.write(resp.content) # 压缩临时目录 zip_file = 'articles.zip' with zipfile.ZipFile(zip_file, 'w') as z: for root, dirs, files in os.walk(temp_dir): for file in files: z.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), temp_dir)) # 删除临时目录 os.rmdir(temp_dir)

urls:
resp = requests.get(url)
with open(os.path.join(temp_dir, url.split('/')[-1]), 'wb') as f:
f.write(resp.content)
# 压缩临时目录
zip_file = 'articles.zip'
with zipfile.ZipFile(zip_file, 'w') as z:
for root, dirs, files in os.walk(temp_dir):
for file in files:
z.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), temp_dir))
# 删除临时目录
os.rmdir(temp_dir)






