拆分sql数据,(shop_backup)sql文档过大(>5G)
执行表结构
sed -n '/^-- Table structure/,/^-- Dumping data/p' shop_backup.sql > structure.sql
 mysql -u root -p shop < structure.sql
执行数据
awk '/^INSERT INTO/{if(++count%100000==1) {file="data_part_"++i".sql"}; print > file}' shop_backup.sql
# 2. 导入前检查拆分结果(确认文件数量和大小)
 ls -lh data_part_*.sql | wc -l
 du -sh data_part_*.sql
# 3. 逐文件导入并显示进度(带错误继续和超时设置)
 for file in data_part_*.sql; do
   echo "正在导入: $file ..."
   mysql -u root   --connect_timeout=3600 --force shop < "$file" && \
     echo "成功: $file" >> import.log || \
     echo "失败: $file" >> error.log
 done
# 4. 检查结果
 echo "===== 导入完成 ====="
 echo "成功文件数: $(grep -c "成功" import.log 2>/dev/null || echo 0)"
 echo "失败文件数: $(grep -c "失败" error.log 2>/dev/null || echo 0)"
