1.csv简单小案例
import csvheader = ['姓名', '年龄', '性别']
data = [['张三', 25, '男'],['李四', 35, '女']
]
# newline=''消除空行
with open('数据写入.csv', 'w', encoding='gbk', newline='')as f:# 第一步 实例化写入对象w = csv.writer(f)# 第二部写入表头w.writerow(header)# 第三步 写入多行数据w.writerows(data)# PermissionError: [Errno 13] Permission denied: '数据写入.csv'# 文件未关闭

2.实战-腾讯招聘信息(仅供学习考)

import requests, jsonpath, time, csvclass Start(object):# 类实例化时会执行def __init__(self):self.headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'}self.sum_list = []self.h = ['招聘职位','国家','地区','类别','任务','最新日期','工作需求']def get_html(self, index):print(f'当前页数{index}============')url = f'https://careers.tencent.com/tencentcareer/api/post/Query?timestamp={int(time.time() * 1000)}&countryId=&cityId=&bgIds=&productId=&categoryId=40001001&parentCategoryId=&attrId=&keyword=&pageIndex={index}&pageSize=10&language=zh-cn&area='response = requests.get(url, headers=self.headers)# 提取数据json_data = response.json() # 将响应当中的内容取成json格式 字典 列表# 解析数据RecruitPostName = jsonpath.jsonpath(json_data, '$..RecruitPostName') # 匹配时不能有空格CountryName = jsonpath.jsonpath(json_data, '$..CountryName')LocationName = jsonpath.jsonpath(json_data, '$..LocationName')CategoryName = jsonpath.jsonpath(json_data, '$..CategoryName')Responsibility = jsonpath.jsonpath(json_data, '$..Responsibility')LastUpdateTime = jsonpath.jsonpath(json_data, '$..LastUpdateTime')RequireWorkYearsName = jsonpath.jsonpath(json_data, '$..RequireWorkYearsName')# 整合数据for i in zip(RecruitPostName, CountryName, LocationName, CategoryName, Responsibility, LastUpdateTime,RequireWorkYearsName):lis = [i[0],i[1],i[2],i[3],i[4].replace('\r', '').replace('\n', ''),i[5],i[6]]self.sum_list.append(lis)def save_csv(self):# print(self.sum_list)with open('腾讯招聘.csv', 'w', encoding='gbk', newline='')as f:# 第一步 实例化写入对象w = csv.writer(f)# 第二部写入表头w.writerow(self.h)# 第三步 写入多行数据w.writerows(self.sum_list)if __name__ == '__main__':s = Start() # 实例化类对象for index in range(1, 11):s.get_html(index)s.save_csv()

3.当当网书籍信息,上传到资源
