SQL Server批量插入数据案例详解

时间:2021-09-26 02:43:23

在sql server 中插入一条数据使用insert语句,但是如果想要批量插入一堆数据的话,循环使用insert不仅效率低,而且会导致sql一系统性能问题。下面介绍sql server支持的两种批量数据插入方法:bulk和表值参数(table-valued parameters),高效插入数据。

新建数据库:

?
1
2
3
4
5
6
7
8
9
10
11
--create database 
create database bulktestdb; 
go 
use bulktestdb; 
go 
--create table 
create table bulktesttable( 
id int primary key
username nvarchar(32), 
pwd varchar(16)) 
go

一.传统的insert方式

先看下传统的insert方式:一条一条的插入(性能消耗越来越大,速度越来越慢)

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
//使用简单的insert方法一条条插入 [慢]
#region [ simpleinsert ]
static void simpleinsert()
{
    console.writeline("使用简单的insert方法一条条插入");
    stopwatch sw = new stopwatch();
    sqlconnection sqlconn = new sqlconnection("server=.;database=bulktestdb;user=sa;password=123456;");
    sqlcommand sqlcmd = new sqlcommand();
    sqlcmd.commandtext = string.format("insert into bulktesttable(id,username,pwd)values(@p0,@p1,@p2)");
    sqlcmd.parameters.add("@p0", sqldbtype.int);
    sqlcmd.parameters.add("@p1", sqldbtype.nvarchar);
    sqlcmd.parameters.add("@p2", sqldbtype.nvarchar);
    sqlcmd.commandtype = commandtype.text;
    sqlcmd.connection = sqlconn;
    sqlconn.open();
    try
    {
        //循环插入1000条数据,每次插入100条,插入10次。 
        for (int multiply = 0; multiply < 10; multiply++)
        {
            for (int count = multiply * 100; count < (multiply + 1) * 100; count++)
            {
 
                sqlcmd.parameters["@p0"].value = count;
                sqlcmd.parameters["@p1"].value = string.format("user-{0}", count * multiply);
                sqlcmd.parameters["@p2"].value = string.format("pwd-{0}", count * multiply);
                sw.start();
                sqlcmd.executenonquery();
                sw.stop();
            }
            //每插入10万条数据后,显示此次插入所用时间 
            console.writeline(string.format("elapsed time is {0} milliseconds", sw.elapsedmilliseconds));
        }
        console.readkey();
    }
    catch (exception ex)
    {
        console.writeline(ex.message);
    }
}
#endregion

循环插入1000条数据,每次插入100条,插入10次,效率是越来越慢。

SQL Server批量插入数据案例详解

二.较快速的bulk插入方式:

使用使用bulk插入[ 较快 ]

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
//使用bulk插入的情况 [ 较快 ]
#region [ 使用bulk插入的情况 ]
static void bulktodb(datatable dt)
{
    stopwatch sw = new stopwatch();
    sqlconnection sqlconn = new sqlconnection("server=.;database=bulktestdb;user=sa;password=123456;");
    sqlbulkcopy bulkcopy = new sqlbulkcopy(sqlconn);
    bulkcopy.destinationtablename = "bulktesttable";
    bulkcopy.batchsize = dt.rows.count;
    try
    {
        sqlconn.open();
        if (dt != null && dt.rows.count != 0)
        {
            bulkcopy.writetoserver(dt);
        }
    }
    catch (exception ex)
    {
        console.writeline(ex.message);
    }
    finally
    {
        sqlconn.close();
        if (bulkcopy != null)
        {
            bulkcopy.close();
        }
    }
}
static datatable gettableschema()
{
    datatable dt = new datatable();
    dt.columns.addrange(new datacolumn[] {
        new datacolumn("id",typeof(int)),
        new datacolumn("username",typeof(string)),
        new datacolumn("pwd",typeof(string))
    });
    return dt;
}
static void bulkinsert()
{
    console.writeline("使用简单的bulk插入的情况");
    stopwatch sw = new stopwatch();
    for (int multiply = 0; multiply < 10; multiply++)
    {
        datatable dt = gettableschema();
        for (int count = multiply * 100; count < (multiply + 1) * 100; count++)
        {
            datarow r = dt.newrow();
            r[0] = count;
            r[1] = string.format("user-{0}", count * multiply);
            r[2] = string.format("pwd-{0}", count * multiply);
            dt.rows.add(r);
        }
        sw.start();
        bulktodb(dt);
        sw.stop();
        console.writeline(string.format("elapsed time is {0} milliseconds", sw.elapsedmilliseconds));
    }
}
#endregion

循环插入1000条数据,每次插入100条,插入10次,效率快了很多。

SQL Server批量插入数据案例详解

三.使用简称tvps插入数据

打开sqlserrver,执行以下脚本:

?
1
2
3
4
5
--create table valued 
create type bulkudt as table
  (id int
   username nvarchar(32), 
   pwd varchar(16))

SQL Server批量插入数据案例详解

成功后在数据库中发现多了bulkudt的缓存表。

使用简称tvps插入数据

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
//使用简称tvps插入数据 [最快]
#region [ 使用简称tvps插入数据 ]
static void tbalevaluedtodb(datatable dt)
{
    stopwatch sw = new stopwatch();
    sqlconnection sqlconn = new sqlconnection("server=.;database=bulktestdb;user=sa;password=123456;");
    const string tsqlstatement =
          "insert into bulktesttable (id,username,pwd)" +
          " select nc.id, nc.username,nc.pwd" +
          " from @newbulktesttvp as nc";
    sqlcommand cmd = new sqlcommand(tsqlstatement, sqlconn);
    sqlparameter catparam = cmd.parameters.addwithvalue("@newbulktesttvp", dt);
    catparam.sqldbtype = sqldbtype.structured;
    catparam.typename = "dbo.bulkudt";
    try
    {
        sqlconn.open();
        if (dt != null && dt.rows.count != 0)
        {
            cmd.executenonquery();
        }
    }
    catch (exception ex)
    {
        console.writeline("error>" + ex.message);
    }
    finally
    {
        sqlconn.close();
    }
}
static void tvpsinsert()
{
    console.writeline("使用简称tvps插入数据");
    stopwatch sw = new stopwatch();
    for (int multiply = 0; multiply < 10; multiply++)
    {
        datatable dt = gettableschema();
        for (int count = multiply * 100; count < (multiply + 1) * 100; count++)
        {
            datarow r = dt.newrow();
            r[0] = count;
            r[1] = string.format("user-{0}", count * multiply);
            r[2] = string.format("pwd-{0}", count * multiply);
            dt.rows.add(r);
        }
        sw.start();
        tbalevaluedtodb(dt);
        sw.stop();
        console.writeline(string.format("elapsed time is {0} milliseconds", sw.elapsedmilliseconds));
    }
    console.readline(); 
}
#endregion

SQL Server批量插入数据案例详解

循环插入1000条数据,每次插入100条,插入10次,效率是越来越慢,后面测试,将每次插入的数据量增大,会更大的体现tpvs插入的效率。

到此这篇关于sql server批量插入数据案例详解的文章就介绍到这了,更多相关sql server批量插入数据内容请搜索服务器之家以前的文章或继续浏览下面的相关文章希望大家以后多多支持服务器之家!

原文链接:https://blog.csdn.net/heyangyi_19940703/article/details/51981731