后端开发PreparedStatement的优点以及实现高效批量插入
Posted cls1277
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了后端开发PreparedStatement的优点以及实现高效批量插入相关的知识,希望对你有一定的参考价值。
知识点基本都在代码的注释里
PreparedStatement比Statement好在:
1.可以操作数据库中blob的数据
2.可以高效批量插入
3.防止SQL注入,可以预编译
这部分有一些小细节,已经在代码中写出来了
package com.cls1277.insertblob;
import com.cls1277.preparedstatement.StatementTest;
import com.cls1277.preparedstatement.customer.Customer;
import com.cls1277.utils.JDBCutils;
import org.junit.Test;
import javax.xml.ws.RequestWrapper;
import java.io.*;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.Properties;
public class BlobInsert
@Test
public void testInsert()
Connection conn = null;
PreparedStatement ps = null;
try
conn = JDBCutils.getConnection();
String sql = "insert into customers(name, email, birth, photo)values(?,?,?,?)";
ps = conn.prepareStatement(sql);
ps.setObject(1, "曹老师");
ps.setObject(2, "cls1277@163.com");
ps.setObject(3, "2002-06-10");
//添加Blob
FileInputStream fis = new FileInputStream(new File("E:/cls1277/Free/image.jpg"));
ps.setBlob(4, fis);
ps.execute();
catch (Exception e)
e.printStackTrace();
finally
JDBCutils.closeResource(conn, ps);
@Test
public void testQueryBlob()
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
InputStream is = null;
FileOutputStream fos = null;
try
conn = JDBCutils.getConnection();
// String sql = "insert into customers(name, email, birth, photo)values(?,?,?,?)";
String sql = "select id,name,email,birth,photo from customers where id = ?";
ps = conn.prepareStatement(sql);
ps.setObject(1, 21);
rs = ps.executeQuery();
if(rs.next())
//方式1
int id = rs.getInt(1);
String name = rs.getString(2);
String email = rs.getString(3);
Date birth = rs.getDate(4);
//方式2:更好一点
// int id1 = rs.getInt("id");
// String name1 = rs.getString("name");
// String email1 = rs.getString("email");
// Date birth1 = rs.getDate("birth");
Customer customer = new Customer(id, name, email, birth);
System.out.println(customer);
//将photo保存到本地
Blob photo = rs.getBlob("photo");
//文件的保存
//当图片过大时虽然满足Blob类型的大小,但是得修改max_allow_packet的大小,mysql 8.0已经默认4M。
is = photo.getBinaryStream();
fos = new FileOutputStream(new File("save.jpg"));
byte[] buffer = new byte[1024];
int len;
while((len = is.read(buffer)) != -1)
fos.write(buffer, 0, len);
catch (Exception e)
e.printStackTrace();
finally
JDBCutils.closeResource(conn, ps, rs);
try
if(is!=null)
is.close();
catch (IOException e)
e.printStackTrace();
try
if(fos!=null)
fos.close();
catch (IOException e)
e.printStackTrace();
//使用PreparedStatement实现批量操作:预编译
@Test
public void testMultiInsert()
Connection conn = null;
PreparedStatement ps = null;
try
conn = JDBCutils.getConnection();
String sql = "insert into customers(name) values(?)";
ps = conn.prepareStatement(sql);
for(int i=0; i<20000; i++)
ps.setObject(1, "name_"+i);
ps.execute();
catch (Exception e)
e.printStackTrace();
finally
JDBCutils.closeResource(conn, ps);
//PreparedStatement批量插入优化
@Test
public void testMultiInsert2()
Connection conn = null;
PreparedStatement ps = null;
try
conn = JDBCutils.getConnection();
String sql = "insert into customers(name) values(?)";
ps = conn.prepareStatement(sql);
for(int i=0; i<20000; i++)
ps.setObject(1, "name_"+i);
//1.攒
ps.addBatch();
if(i%500==0)
//2.攒够了 执行一次
ps.executeBatch();
//3.执行了 清空一次
ps.clearBatch();
//4.注意判断不整除的情况
//如果不能运行Batch,则在配置文件的url后面加上:?rewriteBatchedStatement=true
//如果不能批量操作,则换新的驱动
catch (Exception e)
e.printStackTrace();
finally
JDBCutils.closeResource(conn, ps);
//PreparedStatement批量插入进一步优化:最高效
@Test
public void testMultiInsert3()
Connection conn = null;
PreparedStatement ps = null;
try
conn = JDBCutils.getConnection();
//优化:不允许自动提交(不着急提交)
conn.setAutoCommit(false);
String sql = "insert into customers(name) values(?)";
ps = conn.prepareStatement(sql);
for(int i=0; i<20000; i++)
ps.setObject(1, "name_"+i);
//1.攒
ps.addBatch();
if(i%500==0)
ps.executeBatch();
ps.clearBatch();
//最后一起提交
conn.commit();
catch (Exception e)
e.printStackTrace();
finally
JDBCutils.closeResource(conn, ps);
以上是关于后端开发PreparedStatement的优点以及实现高效批量插入的主要内容,如果未能解决你的问题,请参考以下文章