September 17, 2015 at 8:35 am
If anyone wonders about performance on the posted solutions.
CREATE TABLE Testing
(
VHRGDT numeric(8, 0) NOT NULL,
VHRGTM numeric(6, 0) NOT NULL
)
INSERT INTO Testing
SELECT TOP 1000000
CONVERT( char(8), a.create_date, 112),
REPLACE( CONVERT( char(8), a.create_date, 114), ':', '')
FROM sys.all_objects a, sys.all_objects b;
GO
DECLARE @Dummy datetime;
--CHECKPOINT
--DBCC FREEPROCCACHE WITH NO_INFOMSGS;
--DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
--PRINT '-- ============= Drew ==================='
--SET STATISTICS TIME ON;
--select @Dummy = cast(FORMAT(VHRGDT, '0000-00-00') + ' ' + FORMAT(VHRGTM, '00:00:00.000') as datetime)
--from Testing;
--SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= tripleAxe ==================='
SET STATISTICS TIME ON;
select @Dummy = DATEADD(SECOND, CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 1, 2)) * 60 * 60 + CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 3, 2)) * 60 + SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 5, 2), CONVERT(DATETIME, CONVERT(VARCHAR(8), VHRGDT), 113))
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= tripleAxe2 ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT( DATETIME, CONVERT(VARCHAR(8), VHRGDT), 113) + CONVERT(DATETIME, STUFF(STUFF(RIGHT('0'+CONVERT(VARCHAR(6), VHRGTM), 6), 3, 0, ':'), 6,0,':'), 108)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Lynn ==================='
SET STATISTICS TIME ON;
select @Dummy = msdb.dbo.agent_datetime(VHRGDT,VHRGTM)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Kristen ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT(datetime, RTRIM(VHRGDT)) + (VHRGTM * 9 + VHRGTM % 10000 * 6 + VHRGTM % 100 * 10) / 216e4
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Kristen2 ==================='
SET STATISTICS TIME ON;
select @Dummy = DateTimeFromParts(VHRGDT / 10000, (VHRGDT % 10000) / 100, VHRGDT % 100 , VHRGTM / 10000, (VHRGTM / 100) % 100, VHRGTM % 100, 0)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Ed ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT(DATETIME,CONVERT(VARCHAR(8),VHRGDT,0) + CHAR(32) + STUFF(STUFF(STUFF(CONVERT(VARCHAR(8) ,10000000 + VHRGTM,0),5,0,CHAR(58)),8,0,CHAR(58)),1,2,''),112)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Sean ==================='
SET STATISTICS TIME ON;
select @Dummy = CAST(CAST(CAST(STR(VHRGDT) AS DATE) AS CHAR(10)) + ' ' + STUFF(STUFF(RIGHT('0' + CAST(VHRGTM AS VARCHAR(10)), 6), 3, 0, ':'), 6, 0, ':') AS DATETIME)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Luis ==================='
SET STATISTICS TIME ON;
select @Dummy = CAST( CAST( VHRGDT AS char(9)) + STUFF( STUFF( RIGHT( VHRGTM + 1000000, 6), 5, 0, ':'), 3, 0, ':') AS datetime)
from Testing;
SET STATISTICS TIME OFF;
GO 5
DROP TABLE Testing
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1107 ms, elapsed time = 1270 ms.
CPU time = 1186 ms, elapsed time = 1261 ms.
CPU time = 1138 ms, elapsed time = 1258 ms.
CPU time = 1123 ms, elapsed time = 1282 ms.
CPU time = 1154 ms, elapsed time = 1309 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 843 ms, elapsed time = 1022 ms.
CPU time = 858 ms, elapsed time = 919 ms.
CPU time = 890 ms, elapsed time = 1039 ms.
CPU time = 905 ms, elapsed time = 973 ms.
CPU time = 936 ms, elapsed time = 1132 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5959 ms, elapsed time = 6142 ms.
CPU time = 6022 ms, elapsed time = 6341 ms.
CPU time = 5928 ms, elapsed time = 6250 ms.
CPU time = 6037 ms, elapsed time = 6243 ms.
CPU time = 5928 ms, elapsed time = 6200 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 842 ms, elapsed time = 916 ms.
CPU time = 858 ms, elapsed time = 920 ms.
CPU time = 842 ms, elapsed time = 911 ms.
CPU time = 827 ms, elapsed time = 892 ms.
CPU time = 811 ms, elapsed time = 891 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 874 ms, elapsed time = 989 ms.
CPU time = 905 ms, elapsed time = 1014 ms.
CPU time = 952 ms, elapsed time = 1121 ms.
CPU time = 843 ms, elapsed time = 937 ms.
CPU time = 890 ms, elapsed time = 993 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 952 ms, elapsed time = 1025 ms.
CPU time = 905 ms, elapsed time = 977 ms.
CPU time = 920 ms, elapsed time = 983 ms.
CPU time = 858 ms, elapsed time = 993 ms.
CPU time = 982 ms, elapsed time = 1049 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 1872 ms, elapsed time = 1978 ms.
CPU time = 1919 ms, elapsed time = 1992 ms.
CPU time = 1966 ms, elapsed time = 2071 ms.
CPU time = 1872 ms, elapsed time = 1946 ms.
CPU time = 1904 ms, elapsed time = 1994 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 967 ms, elapsed time = 1079 ms.
CPU time = 858 ms, elapsed time = 927 ms.
CPU time = 858 ms, elapsed time = 943 ms.
CPU time = 811 ms, elapsed time = 919 ms.
CPU time = 843 ms, elapsed time = 976 ms.
September 17, 2015 at 8:55 am
Jeff Moden (9/17/2015)
Sean Lange (9/17/2015)
The table is small right now because the system is not yet in production but it isn't likely to get too huge. Maybe 100 or so new rows each week is all.Heh... famous last words. 😛
This same data in our current system has only generated around 500,000 rows in almost 20 years. 😛 Of course you know me well enough to know that I am never really satisfied with "good enough" when I think a better solution is available. Hence the reason for my posting this thread in the first place.
Jayanth Kurup made a suggestion above that made me think of an alternate. You say that you can't modify the table but, as many such projects go, do you really "only" mean that you can't change the existing columns? If that's true, then add a persisted computed column with the formula (might be a call to a well written scalar function) to do the conversion. At least that way your queries stand a chance of using an index during temporal searches.
I can't change the table at all. It will violate our support contract. Not to mention it would be possible to get trashed with an update at some point. We are not allowed to do anything other than select statements or we are on our own.
_______________________________________________________________
Need help? Help us help you.
Read the article at http://www.sqlservercentral.com/articles/Best+Practices/61537/ for best practices on asking questions.
Need to split a string? Try Jeff Modens splitter http://www.sqlservercentral.com/articles/Tally+Table/72993/.
Cross Tabs and Pivots, Part 1 – Converting Rows to Columns - http://www.sqlservercentral.com/articles/T-SQL/63681/
Cross Tabs and Pivots, Part 2 - Dynamic Cross Tabs - http://www.sqlservercentral.com/articles/Crosstab/65048/
Understanding and Using APPLY (Part 1) - http://www.sqlservercentral.com/articles/APPLY/69953/
Understanding and Using APPLY (Part 2) - http://www.sqlservercentral.com/articles/APPLY/69954/
September 17, 2015 at 9:00 am
Luis Cazares (9/17/2015)
If anyone wonders about performance on the posted solutions.
CREATE TABLE Testing
(
VHRGDT numeric(8, 0) NOT NULL,
VHRGTM numeric(6, 0) NOT NULL
)
INSERT INTO Testing
SELECT TOP 1000000
CONVERT( char(8), a.create_date, 112),
REPLACE( CONVERT( char(8), a.create_date, 114), ':', '')
FROM sys.all_objects a, sys.all_objects b;
GO
DECLARE @Dummy datetime;
--CHECKPOINT
--DBCC FREEPROCCACHE WITH NO_INFOMSGS;
--DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
--PRINT '-- ============= Drew ==================='
--SET STATISTICS TIME ON;
--select @Dummy = cast(FORMAT(VHRGDT, '0000-00-00') + ' ' + FORMAT(VHRGTM, '00:00:00.000') as datetime)
--from Testing;
--SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= tripleAxe ==================='
SET STATISTICS TIME ON;
select @Dummy = DATEADD(SECOND, CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 1, 2)) * 60 * 60 + CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 3, 2)) * 60 + SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), VHRGTM)), 6), 5, 2), CONVERT(DATETIME, CONVERT(VARCHAR(8), VHRGDT), 113))
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= tripleAxe2 ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT( DATETIME, CONVERT(VARCHAR(8), VHRGDT), 113) + CONVERT(DATETIME, STUFF(STUFF(RIGHT('0'+CONVERT(VARCHAR(6), VHRGTM), 6), 3, 0, ':'), 6,0,':'), 108)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Lynn ==================='
SET STATISTICS TIME ON;
select @Dummy = msdb.dbo.agent_datetime(VHRGDT,VHRGTM)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Kristen ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT(datetime, RTRIM(VHRGDT)) + (VHRGTM * 9 + VHRGTM % 10000 * 6 + VHRGTM % 100 * 10) / 216e4
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Kristen2 ==================='
SET STATISTICS TIME ON;
select @Dummy = DateTimeFromParts(VHRGDT / 10000, (VHRGDT % 10000) / 100, VHRGDT % 100 , VHRGTM / 10000, (VHRGTM / 100) % 100, VHRGTM % 100, 0)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Ed ==================='
SET STATISTICS TIME ON;
select @Dummy = CONVERT(DATETIME,CONVERT(VARCHAR(8),VHRGDT,0) + CHAR(32) + STUFF(STUFF(STUFF(CONVERT(VARCHAR(8) ,10000000 + VHRGTM,0),5,0,CHAR(58)),8,0,CHAR(58)),1,2,''),112)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Sean ==================='
SET STATISTICS TIME ON;
select @Dummy = CAST(CAST(CAST(STR(VHRGDT) AS DATE) AS CHAR(10)) + ' ' + STUFF(STUFF(RIGHT('0' + CAST(VHRGTM AS VARCHAR(10)), 6), 3, 0, ':'), 6, 0, ':') AS DATETIME)
from Testing;
SET STATISTICS TIME OFF;
CHECKPOINT
DBCC FREEPROCCACHE WITH NO_INFOMSGS;
DBCC DROPCLEANBUFFERS WITH NO_INFOMSGS;
PRINT '-- ============= Luis ==================='
SET STATISTICS TIME ON;
select @Dummy = CAST( CAST( VHRGDT AS char(9)) + STUFF( STUFF( RIGHT( VHRGTM + 1000000, 6), 5, 0, ':'), 3, 0, ':') AS datetime)
from Testing;
SET STATISTICS TIME OFF;
GO 5
DROP TABLE Testing
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1107 ms, elapsed time = 1270 ms.
CPU time = 1186 ms, elapsed time = 1261 ms.
CPU time = 1138 ms, elapsed time = 1258 ms.
CPU time = 1123 ms, elapsed time = 1282 ms.
CPU time = 1154 ms, elapsed time = 1309 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 843 ms, elapsed time = 1022 ms.
CPU time = 858 ms, elapsed time = 919 ms.
CPU time = 890 ms, elapsed time = 1039 ms.
CPU time = 905 ms, elapsed time = 973 ms.
CPU time = 936 ms, elapsed time = 1132 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5959 ms, elapsed time = 6142 ms.
CPU time = 6022 ms, elapsed time = 6341 ms.
CPU time = 5928 ms, elapsed time = 6250 ms.
CPU time = 6037 ms, elapsed time = 6243 ms.
CPU time = 5928 ms, elapsed time = 6200 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 842 ms, elapsed time = 916 ms.
CPU time = 858 ms, elapsed time = 920 ms.
CPU time = 842 ms, elapsed time = 911 ms.
CPU time = 827 ms, elapsed time = 892 ms.
CPU time = 811 ms, elapsed time = 891 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 874 ms, elapsed time = 989 ms.
CPU time = 905 ms, elapsed time = 1014 ms.
CPU time = 952 ms, elapsed time = 1121 ms.
CPU time = 843 ms, elapsed time = 937 ms.
CPU time = 890 ms, elapsed time = 993 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 952 ms, elapsed time = 1025 ms.
CPU time = 905 ms, elapsed time = 977 ms.
CPU time = 920 ms, elapsed time = 983 ms.
CPU time = 858 ms, elapsed time = 993 ms.
CPU time = 982 ms, elapsed time = 1049 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 1872 ms, elapsed time = 1978 ms.
CPU time = 1919 ms, elapsed time = 1992 ms.
CPU time = 1966 ms, elapsed time = 2071 ms.
CPU time = 1872 ms, elapsed time = 1946 ms.
CPU time = 1904 ms, elapsed time = 1994 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 967 ms, elapsed time = 1079 ms.
CPU time = 858 ms, elapsed time = 927 ms.
CPU time = 858 ms, elapsed time = 943 ms.
CPU time = 811 ms, elapsed time = 919 ms.
CPU time = 843 ms, elapsed time = 976 ms.
Awesome thanks Luis. I have been focusing on other aspects of this process today and hadn't gotten to writing up a test harness for performance yet. I will of course roll this against the actual table structures and see what happens. If the testing is consistent with your findings I will likely go with your solution. While it may not quite be the fastest, the code is a bit simpler and easier to maintain at least imho.
_______________________________________________________________
Need help? Help us help you.
Read the article at http://www.sqlservercentral.com/articles/Best+Practices/61537/ for best practices on asking questions.
Need to split a string? Try Jeff Modens splitter http://www.sqlservercentral.com/articles/Tally+Table/72993/.
Cross Tabs and Pivots, Part 1 – Converting Rows to Columns - http://www.sqlservercentral.com/articles/T-SQL/63681/
Cross Tabs and Pivots, Part 2 - Dynamic Cross Tabs - http://www.sqlservercentral.com/articles/Crosstab/65048/
Understanding and Using APPLY (Part 1) - http://www.sqlservercentral.com/articles/APPLY/69953/
Understanding and Using APPLY (Part 2) - http://www.sqlservercentral.com/articles/APPLY/69954/
September 17, 2015 at 9:09 am
Sean Lange (9/17/2015)
Awesome thanks Luis. I have been focusing on other aspects of this process today and hadn't gotten to writing up a test harness for performance yet. I will of course roll this against the actual table structures and see what happens. If the testing is consistent with your findings I will likely go with your solution. While it may not quite be the fastest, the code is a bit simpler and easier to maintain at least imho.
You're welcome.
It seems that tripleAxe2, Kristen, Kristen2, Eirikur (included as Ed:w00t:) and mine have similar performance. It should come to personal preference.
September 17, 2015 at 9:13 am
Luis Cazares (9/17/2015)
It should come to personal preference.
... so long as Personal Preference doesn't include the, 7x slower, MS provided one!!!
September 17, 2015 at 9:17 am
Luis Cazares (9/17/2015)
Sean Lange (9/17/2015)
Awesome thanks Luis. I have been focusing on other aspects of this process today and hadn't gotten to writing up a test harness for performance yet. I will of course roll this against the actual table structures and see what happens. If the testing is consistent with your findings I will likely go with your solution. While it may not quite be the fastest, the code is a bit simpler and easier to maintain at least imho.
You're welcome.
It seems that tripleAxe2, Kristen, Kristen2, Eirikur (included as Ed:w00t:) and mine have similar performance. It should come to personal preference.
Oh dear. I did do that. My apologies to Eirikur for incorrectly giving credit for your excellent work to somebody else.
_______________________________________________________________
Need help? Help us help you.
Read the article at http://www.sqlservercentral.com/articles/Best+Practices/61537/ for best practices on asking questions.
Need to split a string? Try Jeff Modens splitter http://www.sqlservercentral.com/articles/Tally+Table/72993/.
Cross Tabs and Pivots, Part 1 – Converting Rows to Columns - http://www.sqlservercentral.com/articles/T-SQL/63681/
Cross Tabs and Pivots, Part 2 - Dynamic Cross Tabs - http://www.sqlservercentral.com/articles/Crosstab/65048/
Understanding and Using APPLY (Part 1) - http://www.sqlservercentral.com/articles/APPLY/69953/
Understanding and Using APPLY (Part 2) - http://www.sqlservercentral.com/articles/APPLY/69954/
September 17, 2015 at 9:20 am
Kristen-173977 (9/17/2015)
Luis Cazares (9/17/2015)
It should come to personal preference.
... so long as Personal Preference doesn't include the, 7x slower, MS provided one!!!
Heh I would choose my own complicated version over a scalar function. Oddly enough I was unfamiliar with that function until today so the option was worth while since I learned something. Lynn is the man coming up with a suggestion that he shot down himself but taught me something I didn't know.
_______________________________________________________________
Need help? Help us help you.
Read the article at http://www.sqlservercentral.com/articles/Best+Practices/61537/ for best practices on asking questions.
Need to split a string? Try Jeff Modens splitter http://www.sqlservercentral.com/articles/Tally+Table/72993/.
Cross Tabs and Pivots, Part 1 – Converting Rows to Columns - http://www.sqlservercentral.com/articles/T-SQL/63681/
Cross Tabs and Pivots, Part 2 - Dynamic Cross Tabs - http://www.sqlservercentral.com/articles/Crosstab/65048/
Understanding and Using APPLY (Part 1) - http://www.sqlservercentral.com/articles/APPLY/69953/
Understanding and Using APPLY (Part 2) - http://www.sqlservercentral.com/articles/APPLY/69954/
September 17, 2015 at 9:21 am
What the heck... I'll put a dog in the hunt...
CAST(CAST(VHRGDT AS CHAR(8)) + ' ' + STUFF(STUFF(RIGHT('0' + CAST(VHRGTM AS VARCHAR(6)), 6), 5, 0, ':'), 3, 0, ':') AS DATETIME) AS CastToDateTime
Here's how it stacked up using Luis's test harness.
(1000000 row(s) affected)
Beginning execution loop
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1326 ms, elapsed time = 1325 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 967 ms, elapsed time = 979 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5804 ms, elapsed time = 5935 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 905 ms, elapsed time = 921 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 889 ms, elapsed time = 899 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 932 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 2590 ms, elapsed time = 2630 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 951 ms, elapsed time = 946 ms.
-- ============= Jason ===================
SQL Server Execution Times:
CPU time = 905 ms, elapsed time = 911 ms.
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1466 ms, elapsed time = 1491 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 1014 ms, elapsed time = 1014 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5772 ms, elapsed time = 6057 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 920 ms, elapsed time = 929 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 889 ms, elapsed time = 886 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 952 ms, elapsed time = 948 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 2496 ms, elapsed time = 2501 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 936 ms.
-- ============= Jason ===================
SQL Server Execution Times:
CPU time = 921 ms, elapsed time = 915 ms.
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1311 ms, elapsed time = 1318 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 1014 ms, elapsed time = 1017 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5662 ms, elapsed time = 5854 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 930 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 874 ms, elapsed time = 888 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 928 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 2480 ms, elapsed time = 2483 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 999 ms, elapsed time = 995 ms.
-- ============= Jason ===================
SQL Server Execution Times:
CPU time = 951 ms, elapsed time = 938 ms.
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1311 ms, elapsed time = 1316 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 998 ms, elapsed time = 1015 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5741 ms, elapsed time = 5933 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 951 ms, elapsed time = 966 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 889 ms, elapsed time = 898 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 937 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 2450 ms, elapsed time = 2461 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 942 ms.
-- ============= Jason ===================
SQL Server Execution Times:
CPU time = 905 ms, elapsed time = 916 ms.
-- ============= tripleAxe ===================
SQL Server Execution Times:
CPU time = 1357 ms, elapsed time = 1366 ms.
-- ============= tripleAxe2 ===================
SQL Server Execution Times:
CPU time = 1014 ms, elapsed time = 1021 ms.
-- ============= Lynn ===================
SQL Server Execution Times:
CPU time = 5866 ms, elapsed time = 6067 ms.
-- ============= Kristen ===================
SQL Server Execution Times:
CPU time = 936 ms, elapsed time = 927 ms.
-- ============= Kristen2 ===================
SQL Server Execution Times:
CPU time = 889 ms, elapsed time = 882 ms.
-- ============= Ed ===================
SQL Server Execution Times:
CPU time = 998 ms, elapsed time = 993 ms.
-- ============= Sean ===================
SQL Server Execution Times:
CPU time = 2496 ms, elapsed time = 2493 ms.
-- ============= Luis ===================
SQL Server Execution Times:
CPU time = 921 ms, elapsed time = 938 ms.
-- ============= Jason ===================
SQL Server Execution Times:
CPU time = 873 ms, elapsed time = 900 ms.
Batch execution completed 5 times.
September 17, 2015 at 9:33 am
Kristen-173977 (9/17/2015)
Luis Cazares (9/17/2015)
It should come to personal preference.
... so long as Personal Preference doesn't include the, 7x slower, MS provided one!!!
I didn't think that was mine. 😛 The ITVF handles the case where the year is less then 1000. Then again, the performance of the ITVF doesn't match what you have posted. Maybe the best option would be to encapsulate the best-performing one into an ITVF and test it.
September 17, 2015 at 9:35 am
Sean Lange (9/17/2015)
Jeff Moden (9/17/2015)
Sean Lange (9/17/2015)
The table is small right now because the system is not yet in production but it isn't likely to get too huge. Maybe 100 or so new rows each week is all.Heh... famous last words. 😛
This same data in our current system has only generated around 500,000 rows in almost 20 years. 😛 Of course you know me well enough to know that I am never really satisfied with "good enough" when I think a better solution is available. Hence the reason for my posting this thread in the first place.
Jayanth Kurup made a suggestion above that made me think of an alternate. You say that you can't modify the table but, as many such projects go, do you really "only" mean that you can't change the existing columns? If that's true, then add a persisted computed column with the formula (might be a call to a well written scalar function) to do the conversion. At least that way your queries stand a chance of using an index during temporal searches.
I can't change the table at all. It will violate our support contract. Not to mention it would be possible to get trashed with an update at some point. We are not allowed to do anything other than select statements or we are on our own.
I understand exactly what you mean in your last point. I see the development environment hasn't changed in that it drops and recreates the tables whenever you make any minor change to anything. I understand your pain.
September 17, 2015 at 9:47 am
Sean Lange (9/17/2015)
Eric M Russell (9/17/2015)
Someone had mentioned that the date/time conversions can be expensive. Depending on what you're needing to do with the dates and times once they're converted, it might make sense just to create a set of date and time lookup tables. Yes, it's now an additional set of joins, but the tables are relatively small, easily cached in buffer, and they can contain other useful pre-computed columns. Maybe it isn't practical for your given scenario, but I just thought I'd toss this option out there to try on for size.A calendar table is way overkill for this. I am just creating an ETL process and this is for a single query that gets all rows that were created since the last run. All I need is the where predicate and the version I came up with is entirely too verbose for my liking. 😉
If that's all you're using VHRGDT, VHRGTM for, then it seems that you don't even need to bother converting these integers to actual DateTime. You just need to take TOP 1 VHRGDT, VHRGTM .. ORDER BY VHRGDT DESC, VHRGTM DESC from the previous ETL run, perhaps contain them in an ETLRun table having one record per run, and then for next run:
SELECT ... WHERE VHRGDT > @PREV_VHRGDT AND VHRGTM > @PREV_VHRGTM
"Do not seek to follow in the footsteps of the wise. Instead, seek what they sought." - Matsuo Basho
September 17, 2015 at 9:52 am
Eric M Russell (9/17/2015)
Sean Lange (9/17/2015)
Eric M Russell (9/17/2015)
Someone had mentioned that the date/time conversions can be expensive. Depending on what you're needing to do with the dates and times once they're converted, it might make sense just to create a set of date and time lookup tables. Yes, it's now an additional set of joins, but the tables are relatively small, easily cached in buffer, and they can contain other useful pre-computed columns. Maybe it isn't practical for your given scenario, but I just thought I'd toss this option out there to try on for size.A calendar table is way overkill for this. I am just creating an ETL process and this is for a single query that gets all rows that were created since the last run. All I need is the where predicate and the version I came up with is entirely too verbose for my liking. 😉
If that's all you're using VHRGDT, VHRGTM for, then it seems that you don't even need to bother converting these integers to actual DateTime. You just need to take TOP 1 VHRGDT, VHRGTM .. ORDER BY VHRGDT DESC, VHRGTM DESC from the previous ETL run, perhaps contain them in an ETLRun table having one record per run, and then for next run:
SELECT ... WHERE VHRGDT > @PREV_VHRGDT AND VHRGTM > @PREV_VHRGTM
Now that sir is excellent idea. I still have to convert it to a datetime as I export it but in the where predicate this has a lot of merit because it would get rid of the painful index scans. I knew there was a way to make this work better. I will explore this as an option but shouldn't be a problem because I have full control over the database I am using as a control for the ETL processes.
_______________________________________________________________
Need help? Help us help you.
Read the article at http://www.sqlservercentral.com/articles/Best+Practices/61537/ for best practices on asking questions.
Need to split a string? Try Jeff Modens splitter http://www.sqlservercentral.com/articles/Tally+Table/72993/.
Cross Tabs and Pivots, Part 1 – Converting Rows to Columns - http://www.sqlservercentral.com/articles/T-SQL/63681/
Cross Tabs and Pivots, Part 2 - Dynamic Cross Tabs - http://www.sqlservercentral.com/articles/Crosstab/65048/
Understanding and Using APPLY (Part 1) - http://www.sqlservercentral.com/articles/APPLY/69953/
Understanding and Using APPLY (Part 2) - http://www.sqlservercentral.com/articles/APPLY/69954/
September 17, 2015 at 11:49 pm
Using my normal test harness, Kristen's DATETIMEFROMPARTS solution is a clear winner, more than twice faster than the next one. Note that the slowest solutions and those generating conversion errors are not included in the test.
😎
Test harness
USE tempdb;
GO
SET NOCOUNT ON
/*
numeric date time to datetime test harness
http://www.sqlservercentral.com/Forums/Topic1720268-3412-1.aspx
*/
--/* -- UNCOMMENT THIS LINE TO SKIP GENERATING THE DATASET,
DECLARE @SAMPLE_SIZE INT = 1000000;
IF OBJECT_ID('dbo.TBL_SAMPLE_TIME_INT') IS NOT NULL DROP TABLE dbo.TBL_SAMPLE_TIME_INT;
CREATE TABLE dbo.TBL_SAMPLE_TIME_INT
(
STI_ID INT IDENTITY(1,1) NOT NULL CONSTRAINT PK_DBO_TBL_SAMPLE_TIME_INT_STI_ID PRIMARY KEY CLUSTERED
,STI_DATE_INT INT NOT NULL
,STI_TIME_INT INT NOT NULL
);
DECLARE @TTIME TIME = CONVERT(TIME,'00:00:00',0);
DECLARE @TDATE DATE = CONVERT(DATE,'20100101',112);
INSERT INTO dbo.TBL_SAMPLE_TIME_INT(STI_DATE_INT,STI_TIME_INT)
SELECT TOP(@SAMPLE_SIZE)
CONVERT(INT,CONVERT(VARCHAR(10),DATEADD(DAY,CHECKSUM(NEWID()) % 1460,@TDATE),112),0)
,CONVERT(INT,REPLACE(CONVERT(VARCHAR(8),DATEADD(SECOND,CHECKSUM(NEWID()) % 864000,@TTIME),121),CHAR(58),''),0)
FROM sys.all_columns SAC1
CROSS JOIN sys.all_columns SAC2
CROSS JOIN sys.all_columns SAC3
;
-- */
DECLARE @timer TABLE (T_TEXT VARCHAR(50) NOT NULL, T_TS DATETIME2(7) NOT NULL DEFAULT (SYSDATETIME()));
DECLARE @DATETIME_BUCKET DATETIME = CONVERT(DATE,'2015-01-01 00:00:00',120);
DECLARE @INT_BUCKET_01 INT = 0;
DECLARE @INT_BUCKET_02 INT = 0;
INSERT INTO @timer(T_TEXT) VALUES ('DRY RUN');
SELECT
@INT_BUCKET_01 = STI.STI_DATE_INT
,@INT_BUCKET_02 = STI.STI_TIME_INT
FROM dbo.TBL_SAMPLE_TIME_INT STI
INSERT INTO @timer(T_TEXT) VALUES ('DRY RUN');
INSERT INTO @timer(T_TEXT) VALUES ('EE');
SELECT
@DATETIME_BUCKET = CONVERT(DATETIME,CONVERT(VARCHAR(8),STI.STI_DATE_INT,0) + CHAR(32)
+ STUFF(STUFF(STUFF(CONVERT(VARCHAR(8)
,10000000 + STI.STI_TIME_INT,0),5,0,CHAR(58)),8,0,CHAR(58)),1,2,''),112)
FROM dbo.TBL_SAMPLE_TIME_INT STI
INSERT INTO @timer(T_TEXT) VALUES ('EE');
INSERT INTO @timer(T_TEXT) VALUES ('tripleAxe-1');
SELECT
@DATETIME_BUCKET = DATEADD(SECOND, CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), STI.STI_TIME_INT)),6),1,2)) * 60 * 60
+ CONVERT(INT, SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), STI.STI_TIME_INT)),6),3,2)) * 60
+ SUBSTRING(RIGHT('0' +(CONVERT(VARCHAR(6), STI.STI_TIME_INT)),6),5,2), CONVERT(DATETIME, CONVERT(VARCHAR(8), STI.STI_DATE_INT),113))
FROM dbo.TBL_SAMPLE_TIME_INT STI
INSERT INTO @timer(T_TEXT) VALUES ('tripleAxe-1');
INSERT INTO @timer(T_TEXT) VALUES ('Luis');
SELECT
@DATETIME_BUCKET = CAST( CAST( STI.STI_DATE_INT AS char(9)) + STUFF( STUFF( RIGHT( STI.STI_TIME_INT + 1000000, 6), 5, 0, ':'), 3, 0, ':') AS datetime)
FROM dbo.TBL_SAMPLE_TIME_INT STI
INSERT INTO @timer(T_TEXT) VALUES ('Luis');
INSERT INTO @timer(T_TEXT) VALUES ('Kristen');
SELECT
@DATETIME_BUCKET =
DATETIMEFROMPARTS (
STI.STI_DATE_INT / 10000 -- YEAR
,(STI.STI_DATE_INT % 10000) / 100 -- MONTH
,STI.STI_DATE_INT % 100 -- DAY
,STI.STI_TIME_INT / 10000 -- HOUR
,(STI.STI_TIME_INT % 10000) / 100 -- MINUTE
,STI.STI_TIME_INT % 100 -- SECOND
,0 -- MILLISECOND
)
FROM dbo.TBL_SAMPLE_TIME_INT STI;
INSERT INTO @timer(T_TEXT) VALUES ('Kristen');
SELECT
T.T_TEXT
,DATEDIFF(MICROSECOND,MIN(T_TS),MAX(T.T_TS)) AS DURATION
FROM @timer T
GROUP BY T.T_TEXT
ORDER BY DURATION ASC;
Results
T_TEXT DURATION
------------- -----------
DRY RUN 144009
Kristen 357020
Luis 783045
EE 849048
tripleAxe-1 1293074
September 18, 2015 at 2:06 am
The front runners were neck-and-neck in Luis's script?
September 18, 2015 at 3:46 am
Kristen-173977 (9/18/2015)
The front runners were neck-and-neck in Luis's script?
I've found the method I use for timing more accurate than the time statistics
😎
Viewing 15 posts - 31 through 45 (of 62 total)
You must be logged in to reply to this topic. Login to reply