Vue large file upload in pieces (resumable upload, concurrent upload, instant upload)

For the processing of large files, whether it is the client or the server, it is not advisable to read, send and receive all at once, which can easily lead to memory problems. Therefore, for large file uploads, chunked uploads are used. From the perspective of upload efficiency, the maximum efficiency can be achieved by using multi-threaded concurrent uploads.

This article is based on file upload implemented by springboot + vue. This article mainly introduces the steps and code implementation of vue to implement file upload.

Upload step by step:
My analysis of uploading is divided into:

MD5 reads the file and obtains the MD5 encoding of the file
Request the server to determine whether the file is uploaded. If the upload is completed, it will directly return the file address.
If it is not uploaded, determine whether it is a resumed upload.
Determine whether to upload concurrently or sequentially
Start the multi-part file upload. After the multi-part upload is completed, it will be written to the uploaded list.
Determine whether the upload is completed
Directly upload the code

File Upload:

import md5 from 'js-md5' //Introducing MD5 encryption
import UpApi from '@/api/common.js'
import {<!-- --> concurrentExecution } from '@/utils/jnxh'
  
/**
 * File upload in parts
 * @params file {File} file
 * @params pieceSize {Number} Piece size default 3MB
 * @params concurrent {Number} Number of concurrencies Default 2
 * @params process {Function} progress callback function
 * @params success {Function} success callback function
 * @params error {Function} failure callback function
 */
export const uploadByPieces = ({<!-- -->
                                 file,
                                 pieceSize = 3,
                                 concurrent = 3,
                                 success,
                                 process,
                                 error
                               }) => {<!-- -->
  // If the file passed in is empty, return directly.
  if (!file || file.length < 1) {<!-- -->
    return error('The file cannot be empty')
  }
  let fileMD5 = '' //Total file list
  const chunkSize = pieceSize * 1024 * 1024 // 1MB piece
  const chunkCount = Math.ceil(file.size / chunkSize) //Total number of slices
  const chunkList = [] // shard list
  let uploaded = [] // Already uploaded
  let fileType = '' // file type
  // Get md5
  /***
   * Get md5
   **/
  const readFileMD5 = () => {<!-- -->
    //Read the md5 of the video file
    fileType = file.name.substring(file.name.lastIndexOf('.') + 1, file.name.length)
    console.log('Get the MD5 value of the file')
    let fileRederInstance = new FileReader()
    console.log('file', file)
    fileRederInstance.readAsBinaryString(file)
    fileRederInstance.addEventListener('load', e => {<!-- -->
      let fileBolb = e.target.result
      fileMD5 = md5(fileBolb)
      var index = file.name.lastIndexOf('.')
      var tp = file.name.substring(index + 1, file.name.length)
      let form = new FormData()
      form.append('filename', file.name)
      form.append('identifier', fileMD5)
      form.append('objectType', fileType)
      form.append('chunkNumber', 1)
      UpApi.uploadChunk(form).then(res => {<!-- -->
        if (res.skipUpload) {<!-- -->
          console.log('The file has been uploaded')
          success & amp; & amp; success(res)
        } else {<!-- -->
          // Determine whether the upload is resumed from a breakpoint
          if (res.uploaded & amp; & amp; res.uploaded.length != 0) {<!-- -->
            uploaded = [].concat(res.uploaded)
          }
          console.log('Uploaded fragments:' + uploaded)
          // Determine whether to upload concurrently or sequentially
          if (concurrent == 1 || chunkCount == 1) {<!-- -->
            console.log('Sequential upload')
            sequentialUplode(0)
          } else {<!-- -->
            console.log('Concurrent upload')
            concurrentUpload()
          }
        }
      }).catch((e) => {<!-- -->
        console.log('File merge error')
        console.log(e)
      })
    })
  }
  /***
   * Get details of each shard
   **/
  const getChunkInfo = (file, currentChunk, chunkSize) => {<!-- -->
    let start = currentChunk * chunkSize
    let end = Math.min(file.size, start + chunkSize)
    let chunk = file.slice(start, end)
    return {<!-- -->
      start,
      end,
      chunk
    }
  }
  /***
   * Perform chunk processing for each file
   **/
  const readChunkMD5 = () => {<!-- -->
    // Chunk upload for a single file
    for (var i = 0; i < chunkCount; i + + ) {<!-- -->
      const {<!-- -->
        chunk
      } = getChunkInfo(file, i, chunkSize)
  
      // Determine whether the uploaded fragments include the current fragment
      if (uploaded.indexOf(i + '') == -1) {<!-- -->
        uploadChunk({<!-- -->
          chunk,
          currentChunk: i,
          chunkCount
        })
      }
    }
  }
  /***
   *Original upload
   **/
  const uploadChunk = (chunkInfo) => {<!-- -->
    var sd = parseInt((chunkInfo.currentChunk / chunkInfo.chunkCount) * 100)
    console.log(sd, 'progress')
    process(sd)
    console.log(chunkInfo, 'Chapter size')
    let inde = chunkInfo.currentChunk + 1
    if (uploaded.indexOf(inde + '') > -1) {<!-- -->
      const {<!-- -->
        chunk
      } = getChunkInfo(file, chunkInfo.currentChunk + 1, chunkSize)
      uploadChunk({<!-- -->
        chunk,
        currentChunk: index,
        chunkCount
      })
    } else {<!-- -->
      var index = file.name.lastIndexOf('.')
      var tp = file.name.substring(index + 1, file.name.length)
      //Construct formData for uploaded files
      let fetchForm = new FormData()
      fetchForm.append('identifier', fileMD5)
      fetchForm.append('chunkNumber', chunkInfo.currentChunk + 1)
      fetchForm.append('chunkSize', chunkSize)
      fetchForm.append('currentChunkSize', chunkInfo.chunk.size)
      const chunkfile = new File([chunkInfo.chunk], file.name)
      fetchForm.append('file', chunkfile)
      // fetchForm.append('file', chunkInfo.chunk)
      fetchForm.append('filename', file.name)
      fetchForm.append('relativePath', file.name)
      fetchForm.append('totalChunks', chunkInfo.chunkCount)
      fetchForm.append('totalSize', file.size)
      fetchForm.append('objectType', tp)
      //Perform multipart upload
      let config = {<!-- -->
        headers: {<!-- -->
          'Content-Type': 'application/json',
          'Accept': '*/*'
        }
      }
  
      UpApi.uploadChunk(fetchForm, config).then(res => {<!-- -->
  
        if (res.code == 200) {<!-- -->
          console.log('Multiple upload successful')
          uploaded.push(chunkInfo.currentChunk + 1)
          // Determine whether all uploads have been completed
          if (uploaded.length == chunkInfo.chunkCount) {<!-- -->
            console.log('All completed')
            success(res)
            process(100)
          } else {<!-- -->
            const {<!-- -->
              chunk
            } = getChunkInfo(file, chunkInfo.currentChunk + 1, chunkSize)
            uploadChunk({<!-- -->
              chunk,
              currentChunk: chunkInfo.currentChunk + 1,
              chunkCount
            })
          }
  
        } else {<!-- -->
          console.log(res.msg)
        }
  
      }).catch((e) => {<!-- -->
        error & amp; & amp; error(e)
      })
      // if (chunkInfo.currentChunk < chunkInfo.chunkCount) {<!-- -->
      // setTimeout(() => {<!-- -->
      //
      // }, 1000)
      // }
    }
  }
  /***
   *Sequential upload
   **/
  const sequentialUplode = (currentChunk) => {<!-- -->
    const {<!-- -->
      chunk
    } = getChunkInfo(file, currentChunk, chunkSize)
    let chunkInfo = {<!-- -->
      chunk,
      currentChunk,
      chunkCount
    }
    var sd = parseInt((chunkInfo.currentChunk / chunkInfo.chunkCount) * 100)
    process(sd)
    console.log('Current upload fragment:' + currentChunk)
    let inde = chunkInfo.currentChunk + 1
    if (uploaded.indexOf(inde + '') > -1) {<!-- -->
      console.log('The fragment [' + currentChunk + '] has been uploaded')
      sequentialUplode(currentChunk + 1)
    } else {<!-- -->
      let uploadData = createUploadData(chunkInfo)
      let config = {<!-- -->
        headers: {<!-- -->
          'Content-Type': 'application/json',
          'Accept': '*/*'
        }
      }
      //Perform multipart upload
      UpApi.uploadChunk(uploadData, config).then(res => {<!-- -->
        if (res.code == 200) {<!-- -->
          console.log('Fragment [' + currentChunk + '] uploaded successfully')
          uploaded.push(chunkInfo.currentChunk + 1)
          // Determine whether all uploads have been completed
          if (uploaded.length == chunkInfo.chunkCount) {<!-- -->
            console.log('All completed')
            success(res)
            process(100)
          } else {<!-- -->
            sequentialUplode(currentChunk + 1)
          }
  
        } else {<!-- -->
          console.log(res.msg)
        }
  
      }).catch((e) => {<!-- -->
        error & amp; & amp; error(e)
      })
    }
  }
  /***
   * Concurrent upload
   **/
  const concurrentUpload = () => {<!-- -->
    for (var i = 0; i < chunkCount; i + + ) {<!-- -->
      chunkList.push(Number(i))
    }
    console.log('List of fragments to be uploaded:' + chunkList)
    concurrentExecution(chunkList, concurrent, (curItem) => {<!-- -->
      return new Promise((resolve, reject) => {<!-- -->
        const {<!-- -->
          chunk
        } = getChunkInfo(file, curItem, chunkSize)
        let chunkInfo = {<!-- -->
          chunk,
          currentChunk: curItem,
          chunkCount
        }
        var sd = parseInt((chunkInfo.currentChunk / chunkInfo.chunkCount) * 100)
        process(sd)
        console.log('Current upload fragment:' + curItem)
        let inde = chunkInfo.currentChunk + 1
        if (uploaded.indexOf(inde + '') == -1) {<!-- -->
          // Construct formData for uploaded files
          let uploadData = createUploadData(chunkInfo)
          //Request header
          let config = {<!-- -->
            headers: {<!-- -->
              'Content-Type': 'application/json',
              'Accept': '*/*'
            }
          }
          UpApi.uploadChunk(uploadData, config).then(res => {<!-- -->
            if (res.code == 200) {<!-- -->
              uploaded.push(chunkInfo.currentChunk + 1)
              console.log('Uploaded fragments:' + uploaded)
              // Determine whether all uploads have been completed
              if (uploaded.length == chunkInfo.chunkCount) {<!-- -->
                success(res)
                process(100)
              }
              resolve()
            } else {<!-- -->
              reject(res)
              console.log(res.msg)
            }
  
          }).catch((e) => {<!-- -->
            reject(res)
            error & amp; & amp; error(e)
          })
        } else {<!-- -->
          console.log('Fragment [' + chunkInfo.currentChunk + '] has been uploaded')
          resolve()
        }
      })
    }).then(res => {<!-- -->
      console.log('finish', res)
    })
  }
  /***
   * Create file upload parameters
   **/
  const createUploadData = (chunkInfo) => {<!-- -->
    let fetchForm = new FormData()
    fetchForm.append('identifier', fileMD5)
    fetchForm.append('chunkNumber', chunkInfo.currentChunk + 1)
    fetchForm.append('chunkSize', chunkSize)
    fetchForm.append('currentChunkSize', chunkInfo.chunk.size)
    const chunkfile = new File([chunkInfo.chunk], file.name)
    fetchForm.append('file', chunkfile)
    // fetchForm.append('file', chunkInfo.chunk)
    fetchForm.append('filename', file.name)
    fetchForm.append('relativePath', file.name)
    fetchForm.append('totalChunks', chunkInfo.chunkCount)
    fetchForm.append('totalSize', file.size)
    fetchForm.append('objectType', fileType)
    return fetchForm
  }
  readFileMD5() // Start executing code
}

Concurrency control:

/**
 * Concurrent execution
 * @params list {Array} - the array to iterate over
 * @params limit {Number} - the number of concurrency controls, preferably less than 3
 * @params asyncHandle {Function} - the processing function for each item of `list`, the parameter is the current processing item, and a Promise must be returned to determine whether to continue the iteration
 * @return {Promise} - Returns a Promise value to confirm whether all data iteration is completed
 */
export function concurrentExecution(list, limit, asyncHandle) {<!-- -->
  // Recursive execution
  let recursion = (arr) => {<!-- -->
    // Execute the method arr.shift() to take out and remove the first data
    return asyncHandle(arr.shift()).then(() => {<!-- -->
      // The array has not been iterated yet, and the recursion continues to iterate.
      if (arr.length !== 0) {<!-- -->
        return recursion(arr)
      } else {<!-- -->
        return 'finish'
      }
    })
  }
  //Create a new concurrent array
  let listCopy = [].concat(list)
  // All concurrent asynchronous operations in progress
  let asyncList = []
  limit = limit > listCopy.length ? listCopy.length : limit
  console.log(limit)
  while (limit--) {<!-- -->
    asyncList.push(recursion(listCopy))
  }
  //After all concurrent asynchronous operations are completed, this concurrency control iteration is completed
  return Promise.all(asyncList)
}

This concludes this article about Vue large file multi-part upload (resumable upload, concurrent upload, instant upload)

Reference article: http://blog.ncmem.com/wordpress/2023/11/07/vue-Large file segmented upload, breakpoint resume, concurrent upload, and second upload/
Welcome to join the group to discuss