Technical: Microsoft – SQL Server – v2014 (In Memory Table) – Benchmark Bulk Inserts

Technical: Microsoft – SQL Server – v2014 (In Memory Table) – Benchmark Bulk Inserts

 

 

Create Tables

 

Create Table traditional table


use [DBLabInMemory]
go

set noexec off
go


/*
	drop table [dbo].[personTraditionalClustered]
*/
if object_id('[dbo].[personTraditionalClustered]') is not null
begin
	set noexec on
end
go



create table [dbo].[personTraditionalClustered]
(

    [firstname]	varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [lastname]	varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [emailAddress] [varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [phoneNumber] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressStreet] [varchar](100) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressCity] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressState] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressPostalCode]   [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL

  , [dateofBirth] datetime null


  , [uniqueID]	uniqueIdentifier not null
	 constraint defaultPersonTraditionalUniqueID 
			default newid()	


, constraint PK_PersonTraditional
	primary key ([uniqueID])



, index idx_Firstname nonclustered
	([firstname])

, index idx_Lastname nonclustered
	([lastname])

, index idx_AddressCity nonclustered
	([addressCity])


, index idx_AddressState nonclustered
	([addressState])

, index idx_AddressPostalCode nonclustered
	([addressPostalCode])

, index idx_EmailAddress nonclustered
	([emailAddress])

, index idx_DateofBirth nonclustered
	([dateofBirth])
	

)
go

set noexec off
go



 

 

 

Create Table in-memory table


use [DBLabInMemory]
go

set noexec off
go


/*
	drop table [dbo].[personInMemory]
*/
if object_id('[dbo].[personInMemory]') is not null
begin
	set noexec on
end
go



create table [dbo].[personInMemory]
(

    [firstname]	varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [lastname]	varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [emailAddress] [varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [phoneNumber] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressStreet] [varchar](100) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressCity] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressState] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressPostalCode]   [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL

  , [dateofBirth] datetime null


  , [uniqueID]	uniqueIdentifier not null
	 constraint defaultPersonInMemoryUniqueID 
			default newid()	


, constraint PK_PersonInMemory
	primary key ([uniqueID])
          WITH ( BUCKET_COUNT = 256)

, index idx_Firstname nonclustered
	([firstname])

, index idx_Lastname nonclustered
	([lastname])

, index idx_AddressCity nonclustered
	([addressCity])


, index idx_AddressState nonclustered
	([addressState])

, index idx_AddressPostalCode nonclustered
	([addressPostalCode])

, index idx_EmailAddress nonclustered
	([emailAddress])

, index idx_DateofBirth nonclustered
	([dateofBirth])
	

)
WITH
    ( 
         MEMORY_OPTIMIZED = ON 
       , DURABILITY = SCHEMA_AND_DATA 
    )
go

set noexec off
go



 

 


Create Table in-memory / durable schema only table



set noexec off
go

use [DBLabInMemory]
go


/*
	drop table [dbo].[personInMemoryDurableSchemaOnly]
*/


if object_id('[dbo].[personInMemoryDurableSchemaOnly]') is not null
begin
	set noexec on
end
go



create table [dbo].[personInMemoryDurableSchemaOnly]
(

    [firstname]	varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [lastname]	varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
   , [emailAddress] [varchar](40)
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [phoneNumber] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressStreet] [varchar](100) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressCity] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressState] [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL
  , [addressPostalCode]   [varchar](40) 
        COLLATE Latin1_General_100_BIN2 NOT NULL

  , [dateofBirth] datetime null


  , [uniqueID]	uniqueIdentifier not null
	 constraint defaultPersonInMemorySchemaOnlyUniqueID 
			default newid()	


, constraint PK_PersonInMemorySchemaOnly
	primary key ([uniqueID])
          WITH ( BUCKET_COUNT = 256)

, index idx_Firstname nonclustered
	([firstname])

, index idx_Lastname nonclustered
	([lastname])

, index idx_AddressCity nonclustered
	([addressCity])


, index idx_AddressState nonclustered
	([addressState])

, index idx_AddressPostalCode nonclustered
	([addressPostalCode])

, index idx_EmailAddress nonclustered
	([emailAddress])

, index idx_DateofBirth nonclustered
	([dateofBirth])
	

)
WITH
    ( 
         MEMORY_OPTIMIZED = ON 
       , DURABILITY = SCHEMA_ONLY
    )
go

set noexec off
go


 

 

Bulk Copy data into traditional table



rem remove existing data from traditional table
timer /q /nologo
	sqlcmd -S (local) -d DBLabInMemory -E -Q "truncate table [dbo].[personTraditionalClustered]"
timer /s /n /nologo

rem populate data into traditional table
timer /q /nologo
	bcp [dbo].[personTraditionalClustered] in e:\tmp\datagenSimple.txt -f fileTemplate_v4.xml  -S (local) -d DBLabInMemory  -e errfile.log -b 50000 -T -F 2
timer /s /n /nologo


 

Output:

 



bcp [dbo].[personInMemory] in e:\tmp\datagenSimple.txt -f fileTem
te_v4.xml -S (local) -d DBLabInMemory -e errfile.log -b 50000 -T -F 2

Starting copy...
50000 rows sent to SQL Server. Total sent: 50000
50000 rows sent to SQL Server. Total sent: 100000
50000 rows sent to SQL Server. Total sent: 150000
50000 rows sent to SQL Server. Total sent: 200000
50000 rows sent to SQL Server. Total sent: 250000
50000 rows sent to SQL Server. Total sent: 300000
50000 rows sent to SQL Server. Total sent: 350000
50000 rows sent to SQL Server. Total sent: 400000
50000 rows sent to SQL Server. Total sent: 450000
50000 rows sent to SQL Server. Total sent: 500000
50000 rows sent to SQL Server. Total sent: 550000
50000 rows sent to SQL Server. Total sent: 600000
50000 rows sent to SQL Server. Total sent: 650000
50000 rows sent to SQL Server. Total sent: 700000
50000 rows sent to SQL Server. Total sent: 750000
50000 rows sent to SQL Server. Total sent: 800000
50000 rows sent to SQL Server. Total sent: 850000
50000 rows sent to SQL Server. Total sent: 900000
50000 rows sent to SQL Server. Total sent: 950000
50000 rows sent to SQL Server. Total sent: 1000000
50000 rows sent to SQL Server. Total sent: 1050000
50000 rows sent to SQL Server. Total sent: 1100000
50000 rows sent to SQL Server. Total sent: 1150000

1160001 rows copied.
Network packet size (bytes): 4096
Clock Time (ms.) Total : 124177 Average : (9341.51 rows per sec.)

Total Time Taken = 0:02:05.08

 

 

 

Bulk Copy data into in-memory table


rem remove existing data from traditional table
timer /q /nologo
	sqlcmd -S (local) -d DBLabInMemory -E -Q "truncate table [dbo].[personInMemory]"
timer /s /n /nologo

rem populate data into traditional table
timer /q /nologo
	bcp [dbo].[personInMemory] in e:\tmp\datagenSimple.txt -f fileTemplate_v4.xml  -S (local) -d DBLabInMemory  -e errfile.log -b 50000 -T -F 2
timer /s /n /nologo


 

Output:



bcp [dbo].[personInMemory] in e:\tmp\datagenSimple.txt -f fileTem
te_v4.xml -S (local) -d DBLabInMemory -e errfile.log -b 50000 -T -F 2

Starting copy...
50000 rows sent to SQL Server. Total sent: 50000
50000 rows sent to SQL Server. Total sent: 100000
50000 rows sent to SQL Server. Total sent: 150000
50000 rows sent to SQL Server. Total sent: 200000
50000 rows sent to SQL Server. Total sent: 250000
50000 rows sent to SQL Server. Total sent: 300000
50000 rows sent to SQL Server. Total sent: 350000
50000 rows sent to SQL Server. Total sent: 400000
50000 rows sent to SQL Server. Total sent: 450000
50000 rows sent to SQL Server. Total sent: 500000
50000 rows sent to SQL Server. Total sent: 550000
50000 rows sent to SQL Server. Total sent: 600000
50000 rows sent to SQL Server. Total sent: 650000
50000 rows sent to SQL Server. Total sent: 700000
50000 rows sent to SQL Server. Total sent: 750000
50000 rows sent to SQL Server. Total sent: 800000
50000 rows sent to SQL Server. Total sent: 850000
50000 rows sent to SQL Server. Total sent: 900000
50000 rows sent to SQL Server. Total sent: 950000
50000 rows sent to SQL Server. Total sent: 1000000
50000 rows sent to SQL Server. Total sent: 1050000
50000 rows sent to SQL Server. Total sent: 1100000
50000 rows sent to SQL Server. Total sent: 1150000

1160001 rows copied.
Network packet size (bytes): 4096
Clock Time (ms.) Total : 822032 Average : (1411.14 rows per sec.)

Total Time taken = 0:13:42.81

WaitStats:

BCP-InMemory
A lot of the waits are due to SQLCLR.

 

Bulk Copy data into in-memory (durable schema only) table


rem remove existing data from traditional table
timer /q /nologo
	sqlcmd -S (local) -d DBLabInMemory -E -Q "truncate table [dbo].[personInMemoryDurableSchemaOnly]"
timer /s /n /nologo

rem populate data into traditional table
timer /q /nologo
	bcp [dbo].[personInMemoryDurableSchemaOnly] in e:\tmp\datagenSimple.txt -f fileTemplate_v4.xml  -S (local) -d DBLabInMemory  -e errfile.log -b 50000 -T -F 2
timer /s /n /nologo


 

Output:



bcp [dbo].[personInMemoryDurableSchemaOnly] in e:\tmp\datagenSimple.txt -f fileTem
te_v4.xml -S (local) -d DBLabInMemory -e errfile.log -b 50000 -T -F 2

Starting copy...
50000 rows sent to SQL Server. Total sent: 50000
50000 rows sent to SQL Server. Total sent: 100000
50000 rows sent to SQL Server. Total sent: 150000
50000 rows sent to SQL Server. Total sent: 200000
50000 rows sent to SQL Server. Total sent: 250000
50000 rows sent to SQL Server. Total sent: 300000
50000 rows sent to SQL Server. Total sent: 350000
50000 rows sent to SQL Server. Total sent: 400000
50000 rows sent to SQL Server. Total sent: 450000
50000 rows sent to SQL Server. Total sent: 500000
50000 rows sent to SQL Server. Total sent: 550000
50000 rows sent to SQL Server. Total sent: 600000
50000 rows sent to SQL Server. Total sent: 650000
50000 rows sent to SQL Server. Total sent: 700000
50000 rows sent to SQL Server. Total sent: 750000
50000 rows sent to SQL Server. Total sent: 800000
50000 rows sent to SQL Server. Total sent: 850000
50000 rows sent to SQL Server. Total sent: 900000
50000 rows sent to SQL Server. Total sent: 950000
50000 rows sent to SQL Server. Total sent: 1000000
50000 rows sent to SQL Server. Total sent: 1050000
50000 rows sent to SQL Server. Total sent: 1100000
50000 rows sent to SQL Server. Total sent: 1150000

1160001 rows copied.
Network packet size (bytes): 4096
Clock Time (ms.) Total : 667700 Average : (1737.31 rows per sec.)

Total Time taken = 0:11:08.51

 

WaitStats:

ResourceWaitsSchemaDurableOnly
A lot of the waits are due to SQLCLR and no datafile counters.

 

 

3rd Party Tools

Using Timer utility from http://www.gammadyne.com/cmdline.htm#timer to time MS DOS command line activities.

 

Repository

Github

Files posted to github in same location as earlier post; specifically https://github.com/DanielAdeniji/MSSSQLBCPFormatFileXML

Summary

It appears that bulk inserts into In-Memory table (both for schema & data durable and schema durable only) is appreciably slower than bulk-inserts into traditional tables.

 

Leave a Reply

Please log in using one of these methods to post your comment:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s