February 3, 2023 at 1:17 pm
Hi there
I have a problem with a dynamicSQL Statement.
In this code, I have some data come in JSON format, with which i want to perform a merge operation on an existing physical table.
I declare the JSON as nvarchar(max) and able to print this out
However when I place this JSON in my dynamic SQL merge statement and try to run it, I get the following errors:
Msg 137, Level 15, State 2, Line 18
Must declare the scalar variable "@JSON".
Msg 319, Level 15, State 1, Line 19
Incorrect syntax near the keyword 'with'. If this statement is a common table expression, an xmlnamespaces clause or a change tracking context clause, the previous statement must be terminated with a semicolon.
Msg 319, Level 15, State 1, Line 26
Incorrect syntax near the keyword 'with'. If this statement is a common table expression, an xmlnamespaces clause or a change tracking context clause, the previous statement must be terminated with a semicolon.
Msg 319, Level 15, State 1, Line 34
Incorrect syntax near the keyword 'with'. If this statement is a common table expression, an xmlnamespaces clause or a change tracking context clause, the previous statement must be terminated with a semicolon.
Can someone help with me this please?
Code is attached
DROP TABLE IF EXISTS [dbo].[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4]
GO
CREATE TABLE [dbo].[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4](
[ReadingDateTime] [datetime2](7) NULL,
[SIReading] [real] NULL,
[RawReading] [int] NULL
) ON [PRIMARY]
GO
DROP TABLE IF EXISTS [#ChannelReadingDelta]
CREATE TABLE [#ChannelReadingDelta]
(
[ReadingDateTime] [datetime2](7) NULL,
[RawReading] [int] NULL,
[SIReading] [real] NULL
)
DECLARE @FromDate DATETIME2,
@ToDateDATETIME2,
@nINT = 0,
@SQL NVARCHAR(MAX),
@SerialNumber NVARCHAR(60) = '894339' ,
@ChannelID INT = 11,
@JSONNVARCHAR(MAX),
@ChannelReadingTableName NVARCHAR(100) = '[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4]'
SET @JSON ='{
"ChannelReadings": [
{
"ReadingsDto": [
{
"Si": 54.03,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:20:57"
},
{
"Si": 53.97,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:22:57"
},
{
"Si": 54.01,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:24:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:26:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:28:57"
},
{
"Si": 54.03,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:30:57"
},
{
"Si": 53.97,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:32:57"
},
{
"Si": 54.01,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:34:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:36:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:38:57"
}
],
"ChannelId": 11
}
],
"DeviceSerialNumber": "894339",
"DataHashDto": "0xD919E3CD8234BD92103EFD177E2D1D8EC3F4FF8",
"NewDownloadTable": true'
SET @sql = 'MERGE ' + @ChannelReadingTableName + ' T '+ CHAR(13) + CHAR(10) +
' USING
(
SELECT
[TimeStamp] as ReadingDateTime,
Si as SIReading,
[Raw] as RawReading
FROM
OPENJSON(@JSON)
WITH
(
ChannelReadings NVARCHAR(MAX) ''$.ChannelReadings'' AS JSON,
DeviceSerialNumber NVARCHAR(100) ''$.DeviceSerialNumber''
) AS device
OUTER APPLY
OPENJSON(device.ChannelReadings)
WITH
(
ChannelId INT ''$.ChannelId'',
DataHashDto NVARCHAR(MAX) ''$.DataHashDto'',
ReadingsDto NVARCHAR(MAX) ''$.ReadingsDto'' AS JSON
) AS Readings
OUTER APPLY
OPENJSON(Readings.ReadingsDto)
WITH
(
Si DECIMAL(10, 2),
[Raw] DECIMAL(10, 2),
Conversion INT,
[TimeStamp] DATETIME
) channel_list
where
device.DeviceSerialNumber = ' + @SerialNumber + CHAR(13) + CHAR(10) +
' and
Readings.ChannelId = ' + cast(@ChannelID as nvarchar(100)) + CHAR(13) + CHAR(10) +
' ) S on T.ReadingDateTime = S.ReadingDateTime ' + CHAR(13) + CHAR(10) +
-- ' IsNull(ROUND(S.SIReading, 10),0)= IsNull(ROUND(T.SIReading, 10),0) ' + CHAR(13) + CHAR(10) +
' WHEN NOT MATCHED BY TARGET '+ CHAR(13) + CHAR(10) +
' THEN '+ CHAR(13) + CHAR(10) +
' INSERT (ReadingDateTime,SIReading, RawReading)'+ CHAR(13) + CHAR(10) +
' VALUES (S.ReadingDateTime,S.SIReading, S.RawReading) '+ CHAR(13) + CHAR(10) +
' ' + CHAR(13) +
' OUTPUT ' + CHAR(13) +
' Inserted.ReadingDateTime, ' + CHAR(13) +
' Inserted.SIReading, ' + CHAR(13) +
' Inserted.[RawReading] ' + CHAR(13) +
' ' + CHAR(13) +
' INTO #ChannelReadingDelta ([ReadingDateTime], [SIReading], [RawReading]); '
PRINT @sql
EXEC sp_executesql @SQL
February 3, 2023 at 2:15 pm
Several things wrong.
Anyway this compiles and runs without error:
DROP TABLE IF EXISTS [dbo].[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4]
GO
CREATE TABLE [dbo].[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4](
[ReadingDateTime] [datetime2](7) NULL,
[SIReading] [real] NULL,
[RawReading] [int] NULL
) ON [PRIMARY]
GO
DROP TABLE IF EXISTS [#ChannelReadingDelta]
CREATE TABLE [#ChannelReadingDelta]
(
[ReadingDateTime] [datetime2](7) NULL,
[RawReading] [int] NULL,
[SIReading] [real] NULL
)
DECLARE @FromDate DATETIME2,
@ToDate DATETIME2,
@n INT = 0,
@SQL NVARCHAR(MAX),
@SerialNumber NVARCHAR(60) = '894339' ,
@ChannelID INT = 11,
@JSON NVARCHAR(MAX),
@ChannelReadingTableName NVARCHAR(100) = '[ChannelReading_894339_11_8118A4C8-B228-441B-B465-175951523EA4]'
SET @JSON ='{
"ChannelReadings": [
{
"ReadingsDto": [
{
"Si": 54.03,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:20:57"
},
{
"Si": 53.97,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:22:57"
},
{
"Si": 54.01,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:24:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:26:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:28:57"
},
{
"Si": 54.03,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:30:57"
},
{
"Si": 53.97,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:32:57"
},
{
"Si": 54.01,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:34:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:36:57"
},
{
"Si": 54.06,
"Raw": 0,
"Conversion": 0,
"TimeStamp": "2022-04-22T14:38:57"
}
],
"ChannelId": 11
}
],
"DeviceSerialNumber": "894339",
"DataHashDto": "0xD919E3CD8234BD92103EFD177E2D1D8EC3F4FF8",
"NewDownloadTable": true
}'
SET @sql = 'MERGE ' + @ChannelReadingTableName + ' T '+ CHAR(13) + CHAR(10) +
' USING
(
SELECT
[TimeStamp] as ReadingDateTime,
Si as SIReading,
[Raw] as RawReading
FROM
OPENJSON(@JSON)
WITH
(
ChannelReadings NVARCHAR(MAX) ''$.ChannelReadings'' AS JSON,
DeviceSerialNumber NVARCHAR(100) ''$.DeviceSerialNumber''
) AS device
OUTER APPLY
OPENJSON(device.ChannelReadings)
WITH
(
ChannelId INT ''$.ChannelId'',
DataHashDto NVARCHAR(MAX) ''$.DataHashDto'',
ReadingsDto NVARCHAR(MAX) ''$.ReadingsDto'' AS JSON
) AS Readings
OUTER APPLY
OPENJSON(Readings.ReadingsDto)
WITH
(
Si DECIMAL(10, 2),
[Raw] DECIMAL(10, 2),
Conversion INT,
[TimeStamp] DATETIME
) channel_list
where
device.DeviceSerialNumber = ' + @SerialNumber + CHAR(13) + CHAR(10) +
' and
Readings.ChannelId = ' + cast(@ChannelID as nvarchar(100)) + CHAR(13) + CHAR(10) +
' ) S on T.ReadingDateTime = S.ReadingDateTime ' + CHAR(13) + CHAR(10) +
-- ' IsNull(ROUND(S.SIReading, 10),0)= IsNull(ROUND(T.SIReading, 10),0) ' + CHAR(13) + CHAR(10) +
' WHEN NOT MATCHED BY TARGET '+ CHAR(13) + CHAR(10) +
' THEN '+ CHAR(13) + CHAR(10) +
' INSERT (ReadingDateTime,SIReading, RawReading)'+ CHAR(13) + CHAR(10) +
' VALUES (S.ReadingDateTime,S.SIReading, S.RawReading) '+ CHAR(13) + CHAR(10) +
' ' + CHAR(13) +
' OUTPUT ' + CHAR(13) +
' Inserted.ReadingDateTime, ' + CHAR(13) +
' Inserted.SIReading, ' + CHAR(13) +
' Inserted.[RawReading] ' + CHAR(13) +
' ' + CHAR(13) +
' INTO #ChannelReadingDelta ([ReadingDateTime], [SIReading], [RawReading]); '
PRINT @sql
DECLARE @ParmDefinition NVARCHAR(500);
SET @ParmDefinition = N'@JSON nvarchar(MAX)';
EXEC sp_executesql @SQL, @ParmDefinition, @JSON=@JSON
February 3, 2023 at 2:54 pm
Hi Jonathan
thank you very nuch for that. yes the JSON should have been passed in the parm declaration . I now realize that
Viewing 4 posts - 1 through 3 (of 3 total)
You must be logged in to reply to this topic. Login to reply