想象一下你要搬家,如果一次性把所有家具塞进一辆卡车,万一中途爆胎或者堵车,整个搬家过程就彻底卡住了。但如果你把家具分成几批运输,哪怕其中一辆车出了问题,其他车辆还能继续工作,甚至可以用备用车辆重新运送失败的那部分——这就是分段上传的核心逻辑。
在真实项目中,我遇到过客户需要上传平均15GB的设计视频文件,而他们的办公网络平均速度只有200KB/s。如果采用传统单次上传方式,任何网络波动都会导致整个文件上传失败。而采用AWS S3的分段上传后,即使某个分片上传失败,也只需要重传该分片(通常5-10MB),而不是整个15GB文件。
分段上传技术将大文件分割成多个较小部分(建议5-15MB/片),每个分片独立上传。这带来三个关键优势:
首先安装AWS SDK:
bash复制npm install aws-sdk
初始化阶段需要特别注意凭证安全。我见过不少项目把AK/SK硬编码在前端,这是极其危险的做法。正确做法是通过后端接口动态获取临时凭证:
javascript复制// 安全获取临时凭证示例
async function getSTS() {
const res = await fetch('/api/sts-token')
return await res.json()
}
const { accessKeyId, secretAccessKey, sessionToken } = await getSTS()
const s3 = new AWS.S3({
credentials: new AWS.Credentials({
accessKeyId,
secretAccessKey,
sessionToken
}),
region: 'us-east-1'
})
创建分段上传任务时,建议添加唯一标识防止冲突。我在实际项目中遇到过用户重复上传同名文件导致数据混乱的情况:
javascript复制async function createUpload(file) {
const params = {
Bucket: 'your-bucket',
Key: `${Date.now()}-${file.name}`, // 添加时间戳前缀
ContentType: file.type
}
const { UploadId } = await s3.createMultipartUpload(params).promise()
return { uploadId: UploadId, key: params.Key }
}
文件切片需要考虑两个关键因素:
这是我优化过的分片上传实现:
javascript复制class UploadManager {
constructor(file, uploadId, key) {
this.file = file
this.uploadId = uploadId
this.key = key
this.partSize = 10 * 1024 * 1024 // 10MB
this.concurrentLimit = 3 // 控制并发数
this.pendingQueue = []
this.uploadedParts = []
}
async start() {
const partCount = Math.ceil(this.file.size / this.partSize)
for (let partNumber = 1; partNumber <= partCount; partNumber++) {
this.pendingQueue.push(this.uploadPart(partNumber))
// 控制并发数
if (this.pendingQueue.length >= this.concurrentLimit) {
await Promise.race(this.pendingQueue)
}
}
await Promise.all(this.pendingQueue)
return this.completeUpload()
}
async uploadPart(partNumber) {
const start = (partNumber - 1) * this.partSize
const end = Math.min(start + this.partSize, this.file.size)
const chunk = this.file.slice(start, end)
try {
const { ETag } = await s3.uploadPart({
Bucket: 'your-bucket',
Key: this.key,
PartNumber: partNumber,
UploadId: this.uploadId,
Body: chunk
}).promise()
this.uploadedParts.push({
PartNumber: partNumber,
ETag: ETag
})
} catch (err) {
console.error(`Part ${partNumber} upload failed:`, err)
throw err
}
}
}
所有分片上传完成后,必须按PartNumber顺序提交给completeMultipartUpload。这里有个容易踩的坑:ETag值需要保留原始引号:
javascript复制async function completeUpload(uploadId, key, parts) {
// 必须按PartNumber排序
const sortedParts = parts.sort((a, b) => a.PartNumber - b.PartNumber)
await s3.completeMultipartUpload({
Bucket: 'your-bucket',
Key: key,
UploadId: uploadId,
MultipartUpload: {
Parts: sortedParts.map(part => ({
PartNumber: part.PartNumber,
ETag: `"${part.ETag}"` // 注意保留引号
}))
}
}).promise()
}
实现断点续传需要四个关键步骤:
javascript复制async function resumeUpload(file, key) {
// 1. 检查文件是否已存在
try {
await s3.headObject({ Bucket: 'your-bucket', Key: key }).promise()
return { status: 'completed' }
} catch (err) {
if (err.code !== 'NotFound') throw err
}
// 2. 查找未完成的上传任务
const { Uploads } = await s3.listMultipartUploads({
Bucket: 'your-bucket',
Prefix: key
}).promise()
if (!Uploads.length) return { status: 'new' }
const uploadId = Uploads[0].UploadId
// 3. 获取已上传分片
const { Parts } = await s3.listParts({
Bucket: 'your-bucket',
Key: key,
UploadId: uploadId
}).promise()
return {
status: 'resumable',
uploadId,
uploadedParts: Parts || []
}
}
在弱网环境下,我建议采用指数退避重试策略。这个方案在移动端上传场景特别有效:
javascript复制async function uploadWithRetry(partParams, retries = 3) {
let delay = 1000 // 初始延迟1秒
for (let attempt = 1; attempt <= retries; attempt++) {
try {
return await s3.uploadPart(partParams).promise()
} catch (err) {
if (attempt === retries) throw err
await new Promise(resolve => setTimeout(resolve, delay))
delay *= 2 // 每次失败后延迟时间翻倍
}
}
}
通过localStorage保存上传状态,这是我实际项目中验证过的方案:
javascript复制// 保存上传状态
function saveUploadState(key, state) {
localStorage.setItem(`upload-${key}`, JSON.stringify({
uploadId: state.uploadId,
file: {
name: state.file.name,
size: state.file.size,
type: state.file.type,
lastModified: state.file.lastModified
},
uploadedParts: state.uploadedParts
}))
}
// 恢复上传状态
async function recoverUpload(key) {
const state = JSON.parse(localStorage.getItem(`upload-${key}`))
if (!state) return null
// 验证文件是否变更
const fileInput = document.getElementById('file-input')
const file = fileInput.files[0]
if (file.name !== state.file.name ||
file.size !== state.file.size ||
file.lastModified !== state.file.lastModified) {
return null
}
return state
}
固定分片大小不是最优解。根据我的测试数据:
实现动态分片调整:
javascript复制function getOptimalPartSize(networkType) {
const wifiSpeeds = [15 * 1024 * 1024, 20 * 1024 * 1024]
const cellularSpeeds = [5 * 1024 * 1024, 10 * 1024 * 1024]
const slowSpeeds = [1 * 1024 * 1024, 2 * 1024 * 1024]
// 实际项目中可以接入网络测速API
if (navigator.connection) {
const { effectiveType } = navigator.connection
if (effectiveType.includes('4g')) return cellularSpeeds[1]
if (effectiveType.includes('3g')) return cellularSpeeds[0]
if (effectiveType.includes('2g')) return slowSpeeds[0]
}
return wifiSpeeds[1] // 默认值
}
浏览器并发限制是个需要特别注意的问题。Chrome对同一域名最多允许6个TCP连接。我的优化方案是:
javascript复制class ConcurrentController {
constructor(maxConcurrent = 3) {
this.queue = []
this.activeCount = 0
this.maxConcurrent = maxConcurrent
}
async enqueue(task) {
return new Promise((resolve, reject) => {
this.queue.push({ task, resolve, reject })
this.dequeue()
})
}
async dequeue() {
if (this.activeCount >= this.maxConcurrent || !this.queue.length) return
const { task, resolve, reject } = this.queue.shift()
this.activeCount++
try {
const result = await task()
resolve(result)
} catch (err) {
reject(err)
} finally {
this.activeCount--
this.dequeue()
}
}
}
给用户展示实时上传速度能显著提升体验:
javascript复制class SpeedCalculator {
constructor() {
this.startTime = null
this.lastLoaded = 0
this.speeds = []
}
start() {
this.startTime = Date.now()
this.lastLoaded = 0
this.speeds = []
}
update(loaded) {
const now = Date.now()
const duration = (now - this.startTime) / 1000 // 秒
const delta = loaded - this.lastLoaded
if (duration > 0) {
const currentSpeed = delta / duration // bytes/s
this.speeds.push(currentSpeed)
// 保留最近5次记录
if (this.speeds.length > 5) {
this.speeds.shift()
}
this.lastLoaded = loaded
return this.getAverageSpeed()
}
return 0
}
getAverageSpeed() {
if (!this.speeds.length) return 0
const sum = this.speeds.reduce((a, b) => a + b, 0)
return sum / this.speeds.length
}
}