jQuery利用FormData上传文件实现批量上传

在项目中涉及题库的批量上传功能,在此利用formdata进行文件上传,后台读取,进行批量插入。同时还需要带入teacherId和courseId两个参数,所以将文件和两个参数append到formdata中,传到后台。

JQuery 函数的提交按钮执行的函数如下:

<script type="text/javascript">
 //批量上传题库
 function fileSubmit() {
 var questionFile = new FormData();
 var fileObj = document.getElementById("questionFile").files[0];
        // js 获取文件对象,questionFile为文件选择框的Id
 questionFile.append("file", fileObj);
 var teacherId=localStorage.getItem("teacherId");
 questionFile.append("teacherId",teacherId);
 var courseId=localStorage.getItem("courseId");
 questionFile.append("courseId",courseId);
 $.ajax({
  async: false,
  type:"post",
  url:"/questions/batchUpload",
  data:questionFile,
  processData : false, //必须false才会避开jQuery对 formdata 的默认处理
  contentType : false, //必须false才会自动加上正确的Content-Type
  success:function (data) {
  layer.msg("上传成功");
  example.ajax.reload();
  }
 });
 }
 
</script>

需要注意的是以下两点:

  • jQuery 的 ajax 中processData设置为false (表示不需要对数据做处理)
  • jQuery 的 ajax 中contentType设置为false (因为前面已经声明了是‘FormData对象')

Controller 中的方法如下:

@ApiOperation(value = "批量上传题库")
 @RequestMapping(value = "/batchUpload",method = RequestMethod.POST)
 public void batchUploadQuestions(HttpServletRequest request) throws Exception{
 Collection<Part> files = request.getParts();
 questionsService.batchUploadQuestions(files);
 }

Service中的方法如下:

//题库的批量上传
 @Override
 public void batchUploadQuestions(Collection<Part> files) throws Exception {
 Iterator<Part> it = files.iterator();
 Part file = it.next();
 Workbook workbook = null;
 if (file.getSubmittedFileName().endsWith("xlsx")) {
  workbook = new XSSFWorkbook(file.getInputStream());
 } else if (file.getSubmittedFileName().endsWith("xls")) {
  workbook = new HSSFWorkbook(file.getInputStream());
 }
 Cell cell = null;
 List<Questions> questionsList = new ArrayList<>();
 //判断Excel中有几张表,目前设定为一张表
 Sheet sheet = workbook.getSheetAt(0);//获取sheet表
 for (int rowIndex = 2; rowIndex <= sheet.getLastRowNum(); rowIndex++) { //获取到一行
  Row row = sheet.getRow(rowIndex);
  if (row == null) {
  continue;
  }
  Questions questions = new Questions();
  List<String> strList = new ArrayList<>();
  for (int i = 1; i < row.getLastCellNum(); i++) {
        //获取到一列,第一列为序号不需要存入数据库,所以从1开始读
  cell = row.getCell(i);
  String value = "";
  switch (cell.getCellTypeEnum()) {
   case _NONE:
   break;
   case STRING:
   value = cell.getStringCellValue();
   break;
   case NUMERIC:
   Pattern points_ptrn = Pattern.compile("0.0+_*[^/s]+");
   if (DateUtil.isCellDateFormatted(cell)) {//日期
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
    value = sdf.format(DateUtil.getJavaDate(cell.getNumericCellValue()));
   } else if ("@".equals(cell.getCellStyle().getDataFormatString())
    || "General".equals(cell.getCellStyle().getDataFormatString())
    || "0_".equals(cell.getCellStyle().getDataFormatString())) {
    //文本 or 常规 or 整型数值
    DecimalFormat df = new DecimalFormat("0");
    value = df.format(cell.getNumericCellValue());
   } else if (points_ptrn.matcher(cell.getCellStyle().getDataFormatString()).matches()) {//正则匹配小数类型
    value = String.valueOf(cell.getNumericCellValue());//直接显示
   }
   break;
   default:
   value = cell.toString();
  }
  if ((i == 2 || i == 3) && value.equals("")) {//此处设计不需要读入的单元格
   strList.clear();
   break;
  }
  strList.add(value);
  }
  if (strList.size() == 9) {
         //对应数据库属性进行存储
  questions.setChapter(strList.get(0));
  questions.setSection(strList.get(1));
  questions.setType(strList.get(2));
  questions.setQuestion(strList.get(3));
  questions.setAnswerA(strList.get(4));
  questions.setAnswerB(strList.get(5));
  questions.setAnswerC(strList.get(6));
  questions.setAnswerD(strList.get(7));
  questions.setAnswerTrue(strList.get(8));
  questionsList.add(questions);
  }
 }
 
    //将前台存进的teacherId也当做文件进行读取
 Part file1 = it.next();
 InputStream inputStream = file1.getInputStream();
 BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
 String line = null;
 String teacherId = "";
 while ((line = bufferedReader.readLine()) != null) {
  teacherId = line;
 }
    
     //将前台传入的courseId当做文件读取
 Part file2 = it.next();
 InputStream inputStream1 = file2.getInputStream();
 BufferedReader bufferedReader1 = new BufferedReader(new InputStreamReader(inputStream1));
 String line1 = null;
 String courseId = "";
 while ((line1 = bufferedReader1.readLine()) != null) {
  courseId = line1;
 }
 batchSaveQuestionList(teacherId, courseId, questionsList);
 }
 
 
   //SQL 语句拼接后传入DAO层进行数据插入
 public void batchSaveQuestionList(String teacherId,String courseId,List<Questions> questionsList){
 String sql = "replace into questions(questionId,courseId,teacherId,chapter,section,type,question,answerA,answerB,answerC,answerD,answerTrue) values";
 for(int i = 0;i<questionsList.size();i++){
  String questionId = String.valueOf(System.currentTimeMillis())+i;
  if(i==0){
  sql+="('"+questionId+"','"+courseId+"','"+teacherId+"','"+questionsList.get(i).getChapter()+"','"+questionsList.get(i).getSection()
   + "','"+questionsList.get(i).getType()+"','"+questionsList.get(i).getQuestion()+ "','"+ questionsList.get(i).getAnswerA()
   +"','"+questionsList.get(i).getAnswerB()+"','"+questionsList.get(i).getAnswerC()+"','"+questionsList.get(i).getAnswerD()
   +"','"+questionsList.get(i).getAnswerTrue()+"')";
  }else{
  sql+=",('"+questionId+"','"+courseId+"','"+teacherId+"','"+questionsList.get(i).getChapter()+"','"+questionsList.get(i).getSection()
   + "','"+questionsList.get(i).getType()+"','"+questionsList.get(i).getQuestion()+ "','"+ questionsList.get(i).getAnswerA()
   +"','"+questionsList.get(i).getAnswerB()+"','"+questionsList.get(i).getAnswerC()+"','"+questionsList.get(i).getAnswerD()
   +"','"+questionsList.get(i).getAnswerTrue()+"')";
  }
 }
 questionsDao.batchSaveQuestionList(sql);
 }

DAO层的数据插入语句:

@Insert("${sql}")
void batchSaveQuestionList(@Param("sql") String sql);

自此即可实现批量上传,需要注意的是,这里定义的文件类型为Part类型。

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持呐喊教程,关注呐喊教程公众号的更多精彩内容。

声明:本文内容来源于网络,版权归原作者所有,内容由互联网用户自发贡献自行上传,本网站不拥有所有权,未作人工编辑处理,也不承担相关法律责任。如果您发现有涉嫌版权的内容,欢迎发送邮件至:notice#nhooo.com(发邮件时,请将#更换为@)进行举报,并提供相关证据,一经查实,本站将立刻删除涉嫌侵权内容。