I want to using nodejs mssql package to bulk insert data with below json:
[
{
"name": "Tom",
"registerDate": "2021-10-10 00:00:00",
"gender": 0,
"consumeRecord":[
{
"date": "2021-10-11 00:00:00",
"price": 102.5
},
{
"date": "2021-10-12 00:00:00",
"price": 200
}
]
},
{
"name": "Mary",
"registerDate": "2021-06-10 00:00:00",
"gender": 1,
"consumeRecord":[
{
"date": "2021-07-11 00:00:00",
"price": 702.5
},
{
"date": "2021-12-12 00:00:00",
"price": 98.2
}
]
}
]
I am try to mssql bulk insert for the member record with multiple consume data?
Is there anything can insert one to many with bulk insert like below. because it seems need to insert the member table and get the id (primary key) first. Then using the id (primary key) for the consume table relation data
const sql = require('mssql')
// member table
const membertable = new sql.Table('Member')
table.columns.add('name', sql.Int, {nullable: false})
table.columns.add('registerDate', sql.VarChar(50), {nullable: false})
table.columns.add('gender', sql.VarChar(50), {nullable: false})
// consume record table
const consumeTable = new sql.Table('ConsumeRecord')
table.columns.add('MemberId', sql.Int, {nullable: false})
table.columns.add('Date', sql.VarChar(50), {nullable: false})
table.columns.add('price', sql.Money, {nullable: false})
// insert into member table
jsonList.forEach(data => {
table.rows.add(data.name)
table.rows.add(data.registerDate)
table.rows.add(data.gender)
consumeTable.rows.add(data.memberId) // <---- should insert member table id
consumeTable.rows.add(data.consumeRecord.data)
consumeTable.rows.add(data.consumeRecord.price)
const request = new sql.Request()
request.bulk(consumeTable , (err, result) => {
})
})
const request = new sql.Request()
request.bulk(membertable , (err, result) => {
})
Expected Record: Member Table
id (auto increment) | name | registerDate | gender |
---|---|---|---|
1 | Tom | 2021-10-10 00:00:00 | 0 |
2 | Mary | 2021-06-10 00:00:00 | 1 |
Consume Record Table
id | MemberId | Date | price |
---|---|---|---|
1 | 1 | 2021-10-10 00:00:00 | 102.5 |
2 | 1 | 2021-10-12 00:00:00 | 200 |
3 | 2 | 2021-07-11 00:00:00 | 702.5 |
4 | 2 | 2021-12-12 00:00:00 | 98.2 |
The best way to do this is to upload the whole thing in batch to SQL Server, and ensure that it inserts the correct foreign key.
You have two options
Option 1
OUTPUT
clause to select the inserted IDs back to the clientOption 2 is a bit easier: do the whole thing in SQL
OUTPUT
clause into table variableCREATE TABLE Member(
Id int IDENTITY PRIMARY KEY,
name varchar(50),
registerDate datetime NOT NULL,
gender tinyint NOT NULL
);
CREATE TABLE ConsumeRecord(
MemberId Int NOT NULL REFERENCES Member (Id),
Date datetime not null,
price decimal(9,2)
);
Note the more sensible datatypes of the columns
DECLARE @ids TABLE (jsonIndex nvarchar(5) COLLATE Latin1_General_BIN2 not null, memberId int not null);
WITH Source AS (
SELECT
j1.[key],
j2.*
FROM OPENJSON(@json) j1
CROSS APPLY OPENJSON(j1.value)
WITH (
name varchar(50),
registerDate datetime,
gender tinyint
) j2
)
MERGE Member m
USING Source s
ON 1=0 -- never match
WHEN NOT MATCHED THEN
INSERT (name, registerDate, gender)
VALUES (s.name, s.registerDate, s.gender)
OUTPUT s.[key], inserted.ID
INTO @ids(jsonIndex, memberId);
INSERT ConsumeRecord (MemberId, Date, price)
SELECT
i.memberId,
j2.date,
j2.price
FROM OPENJSON(@json) j1
CROSS APPLY OPENJSON(j1.value, '$.consumeRecord')
WITH (
date datetime,
price decimal(9,2)
) j2
JOIN @ids i ON i.jsonIndex = j1.[key];
Unfortunately, INSERT
only allows you to OUTPUT
from the inserted
table, not from any non-inserted columns. So we need to hack it with a weird MERGE