Answered by:
Temp tables vs CTE

Question
-
Ok, now I do have 100% proof that CTE work much slower than temp tables. This is a continuation of multiline UDF vs. SP thread. I just ran this test:
DECLARE @cCostValuation char(4), @dtEnd DATETIME, @iLocation INT, @bFilterDCI BIT, @cDepartmentFrom char(10), @cCategoryFrom char(10), @cItemFrom char(10), @cDepartmentThru char(10), @cCategoryThru char(10), @cItemThru char(10), @bFilterVendor BIT, @vcVendorFrom varchar(25), @vcVendorThru varchar(25), @bIncludeZeroes bit DECLARE @StartTime DATETIME SET @StartTime = CURRENT_TIMESTAMP SELECT @cCostValuation = 'RWAC', -- char(4) @dtEnd = '20110519 23:59:59', -- datetime @iLocation = null, -- int @bFilterDCI = 1, -- bit @cDepartmentFrom = 'RT34HANDW ', -- char(10) @cCategoryFrom = '059OTHER ', -- char(10) @cItemFrom = '5916 ', -- char(10) @cDepartmentThru = 'RT34HANDW ', -- char(10) @cCategoryThru = '059OTHER ', -- char(10) @cItemThru = '5916 ', -- char(10) @bFilterVendor = 0, -- bit @vcVendorFrom = NULL, -- varchar(25) @vcVendorThru = NULL, -- varchar(25) @bIncludeZeroes = 1 -- bit -- UDF Only ;with Inventory as (SELECT i.pri_key, i.ref_no, DENSE_RANK() OVER (PARTITION BY i.department, i.category, i.item, i.invent_ID, i.locatn_ID ORDER BY i.date_time, i.po_link) AS Fifo_Rank, i.locatn_id, i.date_time, i.department, i.category, i.item, i.invent_id, i.trans_type, i.quantity, i.unit_cost FROM i_invent i JOIN siriusv_RetailVendorByDCI vv ON vv.department = i.department AND vv.category = i.category AND vv.item = i.item JOIN i_vendor iv ON iv.vendor_id = vv.vendor_id WHERE i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END AND iv.full_name BETWEEN CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE ISNULL(@vcVendorFrom, iv.full_name) END AND CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE COALESCE(@vcVendorThru, @vcVendorFrom, iv.full_name) END ), LastCost AS (SELECT department, category, item, invent_id, LastCost FROM siriusfn_RetailLastCost(@cDepartmentFrom, @cCategoryFrom, @cItemFrom, NULL, @cDepartmentThru, @cCategoryThru, @cItemThru, NULL) ), cteInventorySum AS ( SELECT Department, Category, Item, Invent_ID, Locatn_ID, SUM(Quantity) AS TotalInventory FROM Inventory GROUP BY Department, Category, Item, Invent_ID, Locatn_ID ), cteReverseInSum AS ( /* Perform a rolling balance ( in reverse order ) through the inventory movements in */ SELECT s.Department, s.Category, s.Item, s.Invent_ID, s.Locatn_ID, s.Fifo_Rank, ( SELECT SUM(i.Quantity) FROM Inventory AS i WHERE i.Department = s.Department and i.Category = s.Category and i.Item = s.Item and i.Invent_ID = s.Invent_ID and i.Locatn_ID = s.Locatn_ID AND i.Trans_Type IN ('P', 'A', 'T') AND i.Fifo_Rank >= s.Fifo_Rank ) AS RollingInventory, SUM(s.Quantity) AS ThisInventory FROM Inventory AS s WHERE s.Trans_Type IN ('P', 'A', 'T') GROUP BY s.Department, s.Category, s.Item, s.Invent_ID, s.Locatn_ID, s.Fifo_Rank ), cteWithLastTranDate AS ( SELECT w.Department, w.Category, w.Item, w.Invent_ID, w.Locatn_ID, w.TotalInventory, coalesce(LastPartialInventory.Fifo_Rank,0) as Fifo_Rank, coalesce(LastPartialInventory.InventoryToUse,0) as InventoryToUse, coalesce(LastPartialInventory.RunningTotal,0) as RunningTotal, w.TotalInventory - COALESCE(LastPartialInventory.RunningTotal,0) + COALESCE(LastPartialInventory.InventoryToUse,0) AS UseThisInventory FROM cteInventorySum AS w OUTER APPLY ( SELECT TOP(1) z.Fifo_Rank, z.ThisInventory AS InventoryToUse, z.RollingInventory AS RunningTotal FROM cteReverseInSum AS z WHERE z.Department = w.Department and z.Category = w.Category and z.Item = w.Item and z.Invent_ID = w.Invent_ID and z.Locatn_ID = w.Locatn_ID and z.RollingInventory >= w.TotalInventory ORDER BY z.Fifo_Rank DESC ) AS LastPartialInventory ), cteSource AS ( SELECT y.Department, y.Category, y.Item, y.Invent_ID, y.Locatn_ID, y.TotalInventory as QuantityOnHand, SUM(CASE WHEN e.Fifo_Rank = y.Fifo_Rank THEN y.UseThisInventory ELSE e.Quantity END * Price.Unit_Cost) AS CostOfGoodsOnHand, LastCost.LastCost FROM cteWithLastTranDate AS y LEFT JOIN Inventory AS e ON e.Department = y.Department AND e.Category = y.Category AND e.Item = y.Item AND e.Invent_ID = y.Invent_ID AND e.Locatn_ID = y.Locatn_ID AND e.Fifo_Rank >= y.Fifo_Rank AND e.Trans_Type IN ('P', 'A', 'T') LEFT JOIN LastCost LastCost ON y.Department = LastCost.Department AND y.Category = LastCost.Category AND y.Item = LastCost.Item AND y.Invent_ID = LastCost.Invent_ID OUTER APPLY ( /* Find the Price of the item in */ SELECT TOP(1) p.Unit_Cost FROM Inventory AS p WHERE p.Department = e.Department and p.Category = e.Category and p.Item = e.Item and p.Invent_ID = e.Invent_ID and p.Locatn_ID = e.Locatn_ID and p.Fifo_Rank <= e.Fifo_Rank and p.Trans_Type IN ('P', 'A', 'T') ORDER BY p.Fifo_Rank DESC ) AS Price GROUP BY y.Department, y.Category, y.Item, y.Invent_ID, y.Locatn_ID, y.TotalInventory, LastCost.LastCost ) SELECT Department, Category, Item, Invent_ID, Locatn_ID, QuantityOnHand, coalesce(CostOfGoodsOnHand,0) as CostOfGoodsOnHand, case when QuantityOnHand <> 0 and CostOfGoodsOnHand <> 0 then CostOfGoodsOnHand / QuantityOnHand else LastCost end as AverageCost from cteSource WHERE @bIncludeZeroes = 1 or (@bIncludeZeroes = 0 and CostOfGoodsOnHand <> 0) ORDER BY Department, Category, Item, Invent_ID, Locatn_ID PRINT 'New UDF - FIFO only ' + CAST(DATEDIFF(ms, @StartTime, CURRENT_TIMESTAMP) AS varchar(10)) -- SP Only --DECLARE @StartTime DATETIME SET @StartTime = CURRENT_TIMESTAMP IF OBJECT_ID('TempDb..#tblLastCost', 'U') IS NOT NULL DROP TABLE #tblLastCost CREATE TABLE #tblLastCost ( department CHAR(10) COLLATE DATABASE_DEFAULT , category CHAR(10) COLLATE DATABASE_DEFAULT , item CHAR(10) COLLATE DATABASE_DEFAULT , invent_id INT , LastCost MONEY , PRIMARY KEY ( department, category, item, invent_id ) ) INSERT INTO #tblLastCost SELECT department , category , item , invent_id , LastCost FROM siriusfn_RetailLastCost(@cDepartmentFrom, @cCategoryFrom, @cItemFrom, NULL, @cDepartmentThru, @cCategoryThru, @cItemThru, NULL) --print 'Last Cost table created' IF OBJECT_ID('TempDb..#tbli_invent', 'U') IS NOT NULL DROP TABLE #tbli_invent CREATE TABLE #tbli_Invent ( pri_key INT PRIMARY KEY NONCLUSTERED , ref_no NUMERIC(17, 0) , locatn_id INT , date_time DATETIME , fifo_rank INT , department CHAR(10) COLLATE DATABASE_DEFAULT , category CHAR(10) COLLATE DATABASE_DEFAULT , item CHAR(10) COLLATE DATABASE_DEFAULT , invent_id INT , trans_type CHAR(1) COLLATE DATABASE_DEFAULT , quantity NUMERIC(8, 2) , unit_cost MONEY ) IF @bFilterVendor = 1 INSERT INTO #tbli_invent ( pri_key , ref_no , locatn_id , date_time , fifo_rank , department , category , item , invent_id , trans_type , quantity , unit_cost ) SELECT i.pri_key , i.ref_no , i.locatn_id , i.date_time , DENSE_RANK() OVER ( PARTITION BY i.department, i.category, i.item, i.invent_id, i.locatn_id ORDER BY i.date_time, i.po_link ) , i.department , i.category , i.item , i.invent_id , i.trans_type , i.quantity , i.unit_cost FROM i_invent i JOIN siriusv_RetailVendorByDCI vv ON vv.department = i.department AND vv.category = i.category AND vv.item = i.item JOIN i_vendor iv ON iv.vendor_id = vv.vendor_id WHERE i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END AND iv.full_name BETWEEN CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE ISNULL(@vcVendorFrom, iv.full_name) END AND CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE COALESCE(@vcVendorThru, @vcVendorFrom, iv.full_name) END ELSE INSERT INTO #tbli_invent ( pri_key , ref_no , locatn_id , date_time , fifo_rank , department , category , item , invent_id , trans_type , quantity , unit_cost ) SELECT i.pri_key , i.ref_no , i.locatn_id , i.date_time , DENSE_RANK() OVER ( PARTITION BY i.department, i.category, i.item, i.invent_id, i.locatn_id ORDER BY i.date_time, i.po_link ) , i.department , i.category , i.item , i.invent_id , i.trans_type , i.quantity , i.unit_cost FROM i_invent i WHERE i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END --print 'Created tbli_invent table' --============================================== ; WITH cteInventorySum AS ( SELECT Department , Category , Item , Invent_ID , Locatn_ID , SUM(Quantity) AS TotalInventory FROM #tbli_Invent GROUP BY Department , Category , Item , Invent_ID , Locatn_ID ), cteReverseInSum AS ( /* Perform a rolling balance ( in reverse order ) through the inventory movements in */ SELECT s.Department , s.Category , s.Item , s.Invent_ID , s.Locatn_ID , s.Fifo_Rank , ( SELECT SUM(i.Quantity) FROM #tbli_Invent AS i WHERE i.Department = s.Department AND i.Category = s.Category AND i.Item = s.Item AND i.Invent_ID = s.Invent_ID AND i.Locatn_ID = s.Locatn_ID AND i.Trans_Type IN ( 'P', 'A', 'T' ) AND i.Fifo_Rank >= s.Fifo_Rank ) AS RollingInventory , SUM(s.Quantity) AS ThisInventory FROM #tbli_Invent AS s WHERE s.Trans_Type IN ( 'P', 'A', 'T' ) GROUP BY s.Department , s.Category , s.Item , s.Invent_ID , s.Locatn_ID , s.Fifo_Rank ), cteWithLastTranDate AS ( SELECT w.Department , w.Category , w.Item , w.Invent_ID , w.Locatn_ID , w.TotalInventory , COALESCE(LastPartialInventory.Fifo_Rank, 0) AS Fifo_Rank , COALESCE(LastPartialInventory.InventoryToUse, 0) AS InventoryToUse , COALESCE(LastPartialInventory.RunningTotal, 0) AS RunningTotal , w.TotalInventory - COALESCE(LastPartialInventory.RunningTotal, 0) + COALESCE(LastPartialInventory.InventoryToUse, 0) AS UseThisInventory FROM cteInventorySum AS w OUTER APPLY ( SELECT TOP ( 1 ) z.Fifo_Rank , z.ThisInventory AS InventoryToUse , z.RollingInventory AS RunningTotal FROM cteReverseInSum AS z WHERE z.Department = w.Department AND z.Category = w.Category AND z.Item = w.Item AND z.Invent_ID = w.Invent_ID AND z.Locatn_ID = w.Locatn_ID AND z.RollingInventory >= w.TotalInventory ORDER BY z.Fifo_Rank DESC ) AS LastPartialInventory ), cteSource AS ( SELECT y.Department , y.Category , y.Item , y.Invent_ID , y.Locatn_ID , y.TotalInventory AS QuantityOnHand , SUM(CASE WHEN e.Fifo_Rank = y.Fifo_Rank THEN y.UseThisInventory ELSE e.Quantity END * Price.Unit_Cost) AS CostOfGoodsOnHand , LastCost.LastCost FROM cteWithLastTranDate AS y LEFT JOIN #tbli_Invent AS e ON e.Department = y.Department AND e.Category = y.Category AND e.Item = y.Item AND e.Invent_ID = y.Invent_ID AND e.Locatn_ID = y.Locatn_ID AND e.Fifo_Rank >= y.Fifo_Rank AND e.Trans_Type IN ( 'P', 'A', 'T' ) LEFT JOIN #tblLastCost LastCost ON y.department = LastCost.department AND y.category = LastCost.category AND y.item = LastCost.item AND y.invent_id = LastCost.invent_id OUTER APPLY ( /* Find the Price of the item in */ SELECT TOP ( 1 ) p.Unit_Cost FROM #tbli_Invent AS p WHERE p.Department = e.Department AND p.Category = e.Category AND p.Item = e.Item AND p.Invent_ID = e.Invent_ID AND p.Locatn_ID = e.Locatn_ID AND p.Fifo_Rank <= e.Fifo_Rank AND p.Trans_Type IN ( 'P', 'A', 'T' ) ORDER BY p.Fifo_Rank DESC ) AS Price GROUP BY y.Department , y.Category , y.Item , y.Invent_ID , y.Locatn_ID , y.TotalInventory , LastCost.LastCost ) SELECT Department , Category , Item , Invent_ID , Locatn_ID , QuantityOnHand , COALESCE(CostOfGoodsOnHand, 0) AS CostOfGoodsOnHand , CASE WHEN QuantityOnHand <> 0 AND CostOfGoodsOnHand <> 0 THEN CostOfGoodsOnHand / QuantityOnHand ELSE LastCost END AS AverageCost FROM cteSource WHERE @bIncludeZeroes = 1 OR ( @bIncludeZeroes = 0 AND CostOfGoodsOnHand <> 0 ) ORDER BY Department, category, item, locatn_id PRINT 'SP - FIFO only ' + CAST(DATEDIFF(ms, @StartTime, CURRENT_TIMESTAMP) AS varchar(10))
with the following result
Warning: Null value is eliminated by an aggregate or other SET operation.
(19 row(s) affected)
New UDF - FIFO only 525056
(1 row(s) affected)
(29532 row(s) affected)
Warning: Null value is eliminated by an aggregate or other SET operation.
(19 row(s) affected)
SP - FIFO only 4383
Do you see any ways to make the function version work faster?Thanks in advance.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogTuesday, May 24, 2011 10:33 PM
Answers
-
First of all, I have to admit that I haven't gone through Naomi's whole script yet but I have already having different opinion on this:
now I do have 100% proof that CTE work much slower than temp tables I don't quite agree with this statement. In constrast, I will say in some cases, using the CTE will render much faster response time and better I/O access.( means the better execution plan will be chosen by using cte other than temp table.) I have examples to verify that.
Also, I have read the Uri, and Erland's comments and I will totally agree with Erland.
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE I think the first sentence is misleading. Again, as I said, in some cases that the CTE will be much faster than using temp table. While the statement sql server does not maintain statistics on CTE I believe it is indeed true, but it is not the reason.
As Erland pointed out that while using the CTE in the outer query, the query optimizer will use the underline table directly to generate query plan and therefore the indexes in the undlining table will be used. While using CTE Index seek is oftenly achieveable if the underlying table indexes and the outer query are properly designed.
The CTE is very good for modular design of query and better only reference the CTE once as like Erland mentioned it will expand the cte to the underline table everytime it is referenced.
- Marked as answer by Naomi N Wednesday, June 22, 2011 5:15 PM
Sunday, May 29, 2011 11:00 AM -
I've been testing it yesterday on a huge database. I had two cases in one script (first the original and then the new implementation). I tried to run several times always making the time interval smaller - I never got the script to finish. Finally, I only used about 3 days worth of data (no other restrictions) and ran only new code - it finished in 3 minutes. I then tried to run the old code and it ended up me disconnecting from the server. I plan to try one more test today, but it looks like the original code can not finish even for just 3 days. So, I can conclude that my re-write of the code was what this needed.
For every expert, there is an equal and opposite expert. - Becker's Law
My blog- Marked as answer by Naomi N Wednesday, June 22, 2011 6:47 PM
Wednesday, June 22, 2011 2:38 PM -
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE
Best Regards, Uri Dimant SQL Server MVP http://dimantdatabasesolutions.blogspot.com/ http://sqlblog.com/blogs/uri_dimant/- Marked as answer by Naomi N Wednesday, June 22, 2011 5:14 PM
Sunday, May 29, 2011 7:07 AMAnswerer -
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE
Uri, you should know better than saying such silly things!There are certainly situations where it is a good idea to materialise an intermediate result into a temp table rather than using a CTE. Not the least if there are multiple references to the CTE in the query. But this has nothing to do with statistics, but merely the fact that SQL Server will almost always compute the CTE each time it is referenced.
Or more precisely, the optimizer works with the expanded query, so it would need to be able to rematch and see that the same query expression appears multiple times in the query to be able to set up a spool for it. But the CTE as such may never be computed, as the optimizer may recast the computation order.
However, there is nothing that says that you will always get better performance with a temp table. Consider:
WITH CTE AS ( SELECT CustomerID, COUNT(*) AS cnt FROM Orders GROUP BY CustomerID ) SELECT C.CustomerName, CTE.cnt FROM Customers C JOIN CTE ON C.CustomerID = O.CustomerID WHERE C.CountryCode = 'SE'
Do think this query will run faster if we save the CTE into a temp table? (Assume that this is for a world-wide organisation, and the Swedish customers produce at most 1% of the orders.)
The expected plan is that SQL Server first locate the Swedish customers, and the seeks the index on Orders.CustomerID, and thus never reads entries for Dutch or French customers.
Erland Sommarskog, SQL Server MVP, esquel@sommarskog.se- Marked as answer by Naomi N Wednesday, June 22, 2011 5:14 PM
Sunday, May 29, 2011 9:33 AM -
>> Ok, amended statement can be - CTE is much slower than temp tables if CTE is used more than once in the query (as in this particular case and a case mentioned by Uri). <<
This is an optimizer flaw in T-SQL; DB2, Oracle, etc will decide to materialize a CTE or not. Even better, they will decide to materialize a VIEW that is shared by multiple sessions. Since a lot of reports are based on teh same summary data at the same time, this is a huge advantage.
--CELKO-- Books in Celko Series for Morgan-Kaufmann Publishing: Analytics and OLAP in SQL / Data and Databases: Concepts in Practice Data / Measurements and Standards in SQL SQL for Smarties / SQL Programming Style / SQL Puzzles and Answers / Thinking in Sets / Trees and Hierarchies in SQL- Marked as answer by Naomi N Wednesday, June 22, 2011 5:16 PM
Sunday, May 29, 2011 7:42 PM -
-
Ok, amended statement can be - CTE is much slower than temp tables if CTE is used more than once in the query (as in this particular case and a case mentioned by Uri).
CTE is evaluated everytime is used like a macro.In addition #temptable can be indexed for further performance gains.
Kalman Toth SQL SERVER 2012 & BI TRAINING
New Book: Beginner Database Design & SQL Programming Using Microsoft SQL Server 2012- Marked as answer by Naomi N Wednesday, June 22, 2011 6:47 PM
- Edited by Kalman Toth Monday, October 1, 2012 12:46 AM
Monday, June 6, 2011 2:27 PM
All replies
-
Hi,
It looks like your CTE is very complex and it has to do many complex operations before storing results in memory. Whereas in temp tables that is not the case. Temp tables are stored in tempdb and so there is no issues of memory shortage here.
Try to simplify your CTE and I am sure it will give better results to you. CTE is not the replacement of temp tables. We should use the combination of both for the better optimization of the code/procedures.
Thanks,
Neeraj Tiwary - Please mark the post as answered if it answers your questionFriday, May 27, 2011 10:17 AM -
I'm now playing with these two variations of the original CTE (I haven't yet tested the difference in the whole algorithm):
DECLARE @cCostValuation char(4), @dtEnd DATETIME, @iLocation INT, @bFilterDCI BIT, @cDepartmentFrom char(10), @cCategoryFrom char(10), @cItemFrom char(10), @cDepartmentThru char(10), @cCategoryThru char(10), @cItemThru char(10), @bFilterVendor BIT, @vcVendorFrom varchar(25), @vcVendorThru varchar(25), @bIncludeZeroes bit DECLARE @StartTime DATETIME SET @StartTime = CURRENT_TIMESTAMP SELECT @cCostValuation = 'RWAC', -- char(4) @dtEnd = '20110519 23:59:59', -- datetime @iLocation = null, -- int @bFilterDCI = 1, -- bit @cDepartmentFrom = 'RT34HANDW ', -- char(10) @cCategoryFrom = '059OTHER ', -- char(10) @cItemFrom = '5916 ', -- char(10) @cDepartmentThru = 'RT34HANDW ', -- char(10) @cCategoryThru = '059OTHER ', -- char(10) @cItemThru = '5916 ', -- char(10) @bFilterVendor = 0, -- bit @vcVendorFrom = NULL, -- varchar(25) @vcVendorThru = NULL, -- varchar(25) @bIncludeZeroes = 1 -- bit SET STATISTICS IO ON SET STATISTICS TIME ON ;with Inventory as (SELECT i.pri_key, i.ref_no, DENSE_RANK() OVER (PARTITION BY i.department, i.category, i.item, i.invent_ID, i.locatn_ID ORDER BY i.date_time, i.po_link) AS Fifo_Rank, i.locatn_id, i.date_time, i.department, i.category, i.item, i.invent_id, i.trans_type, i.quantity, i.unit_cost FROM i_invent i JOIN siriusv_RetailVendorByDCI vv ON vv.department = i.department AND vv.category = i.category AND vv.item = i.item JOIN i_vendor iv ON iv.vendor_id = vv.vendor_id WHERE i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END AND iv.full_name BETWEEN CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE ISNULL(@vcVendorFrom, iv.full_name) END AND CASE @bFilterVendor WHEN 0 THEN iv.full_name ELSE COALESCE(@vcVendorThru, @vcVendorFrom, iv.full_name) END ) SELECT * FROM Inventory PRINT 'Vendors separately' ;with Inventory as (SELECT i.pri_key, i.ref_no, DENSE_RANK() OVER (PARTITION BY i.department, i.category, i.item, i.invent_ID, i.locatn_ID ORDER BY i.date_time, i.po_link) AS Fifo_Rank, i.locatn_id, i.date_time, i.department, i.category, i.item, i.invent_id, i.trans_type, i.quantity, i.unit_cost FROM i_invent i WHERE @bFilterVendor = 0 AND i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END UNION ALL SELECT i.pri_key, i.ref_no, DENSE_RANK() OVER (PARTITION BY i.department, i.category, i.item, i.invent_ID, i.locatn_ID ORDER BY i.date_time, i.po_link) AS Fifo_Rank, i.locatn_id, i.date_time, i.department, i.category, i.item, i.invent_id, i.trans_type, i.quantity, i.unit_cost FROM i_invent i JOIN siriusv_RetailVendorByDCI vv ON vv.department = i.department AND vv.category = i.category AND vv.item = i.item JOIN i_vendor iv ON iv.vendor_id = vv.vendor_id WHERE @bFilterVendor = 1 AND i.date_time <= @dtEnd AND i.locatn_id = ISNULL(@iLocation, i.locatn_id) AND i.department BETWEEN CASE @bFilterDCI WHEN 0 THEN i.department ELSE ISNULL(@cDepartmentFrom, i.department) END AND CASE @bFilterDCI WHEN 0 THEN i.department ELSE COALESCE(@cDepartmentThru, @cDepartmentFrom, i.department) END AND i.category BETWEEN CASE @bFilterDCI WHEN 0 THEN i.category ELSE ISNULL(@cCategoryFrom, i.category) END AND CASE @bFilterDCI WHEN 0 THEN i.category ELSE COALESCE(@cCategoryThru, @cCategoryFrom, i.category) END AND i.item BETWEEN CASE @bFilterDCI WHEN 0 THEN i.item ELSE ISNULL(@cItemFrom, i.item) END AND CASE @bFilterDCI WHEN 0 THEN i.item ELSE COALESCE(@cItemThru, @cItemFrom, i.item) END AND iv.full_name BETWEEN ISNULL(@vcVendorFrom, iv.full_name) AND COALESCE(@vcVendorThru, @vcVendorFrom, iv.full_name) ) SELECT * FROM Inventory
With the results:
SQL Server parse and compile time:
CPU time = 8571 ms, elapsed time = 8571 ms.
SQL Server Execution Times:
CPU time = 0 ms, elapsed time = 0 ms.
SQL Server Execution Times:
CPU time = 0 ms, elapsed time = 0 ms.
SQL Server Execution Times:
CPU time = 0 ms, elapsed time = 0 ms.
SQL Server Execution Times:
CPU time = 0 ms, elapsed time = 0 ms.
(29532 row(s) affected)
Table 'Worktable'. Scan count 0, logical reads 0, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_vendor'. Scan count 0, logical reads 60936, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'Worktable'. Scan count 12, logical reads 30, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_invent'. Scan count 11, logical reads 376021, physical reads 3, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_po_hdr'. Scan count 0, logical reads 12, physical reads 2, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_pchord'. Scan count 6, logical reads 333, physical reads 1, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_items'. Scan count 3, logical reads 9, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'items'. Scan count 0, logical reads 15, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
SQL Server Execution Times:
CPU time = 4061 ms, elapsed time = 33085 ms.
Vendors separately
SQL Server Execution Times:
CPU time = 0 ms, elapsed time = 0 ms.
(29532 row(s) affected)
Table 'Worktable'. Scan count 0, logical reads 0, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'i_invent'. Scan count 5, logical reads 102019, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
Table 'Worktable'. Scan count 0, logical reads 0, physical reads 0, read-ahead reads 0, lob logical reads 0, lob physical reads 0, lob read-ahead reads 0.
SQL Server Execution Times:
CPU time = 2829 ms, elapsed time = 47189 ms.
---------------------------------------------------------------I think it does make sense to keep working in this direction and I may achieve a bit of improvement.Too bad I already posted the new version of UDF (inline vs. multiline) and my colleague already posted an update script. He will not be very happy to create a new update script... :)
For every expert, there is an equal and opposite expert. - Becker's Law
My blogFriday, May 27, 2011 7:12 PM -
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE
Best Regards, Uri Dimant SQL Server MVP http://dimantdatabasesolutions.blogspot.com/ http://sqlblog.com/blogs/uri_dimant/- Marked as answer by Naomi N Wednesday, June 22, 2011 5:14 PM
Sunday, May 29, 2011 7:07 AMAnswerer -
9 minutes vs. 13 seconds - too big of a difference.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogSunday, May 29, 2011 7:31 AM -
We had 21 minutes and 11 sec
Best Regards, Uri Dimant SQL Server MVP http://dimantdatabasesolutions.blogspot.com/ http://sqlblog.com/blogs/uri_dimant/Sunday, May 29, 2011 7:37 AMAnswerer -
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE
Uri, you should know better than saying such silly things!There are certainly situations where it is a good idea to materialise an intermediate result into a temp table rather than using a CTE. Not the least if there are multiple references to the CTE in the query. But this has nothing to do with statistics, but merely the fact that SQL Server will almost always compute the CTE each time it is referenced.
Or more precisely, the optimizer works with the expanded query, so it would need to be able to rematch and see that the same query expression appears multiple times in the query to be able to set up a spool for it. But the CTE as such may never be computed, as the optimizer may recast the computation order.
However, there is nothing that says that you will always get better performance with a temp table. Consider:
WITH CTE AS ( SELECT CustomerID, COUNT(*) AS cnt FROM Orders GROUP BY CustomerID ) SELECT C.CustomerName, CTE.cnt FROM Customers C JOIN CTE ON C.CustomerID = O.CustomerID WHERE C.CountryCode = 'SE'
Do think this query will run faster if we save the CTE into a temp table? (Assume that this is for a world-wide organisation, and the Swedish customers produce at most 1% of the orders.)
The expected plan is that SQL Server first locate the Swedish customers, and the seeks the index on Orders.CustomerID, and thus never reads entries for Dutch or French customers.
Erland Sommarskog, SQL Server MVP, esquel@sommarskog.se- Marked as answer by Naomi N Wednesday, June 22, 2011 5:14 PM
Sunday, May 29, 2011 9:33 AM -
Erland
>>Not the least if there are multiple references to the CTE in >>the query. But this has nothing to do with statistics, but >>merely the fact that SQL Server will almost always compute >>the CTE each time it is referenced.
Yes in our case we had self join CTEs that hits performance.Our CTE are computationally hard or return many many rows, and it is
accessed multiple times, so for sure then a table, temp table
could perform (much) better. I can not show you right now, but I swear, I observed, that Estimation was greatly improved only when we moved into a temporary table approach.
Best Regards, Uri Dimant SQL Server MVP http://dimantdatabasesolutions.blogspot.com/ http://sqlblog.com/blogs/uri_dimant/Sunday, May 29, 2011 9:52 AMAnswerer -
First of all, I have to admit that I haven't gone through Naomi's whole script yet but I have already having different opinion on this:
now I do have 100% proof that CTE work much slower than temp tables I don't quite agree with this statement. In constrast, I will say in some cases, using the CTE will render much faster response time and better I/O access.( means the better execution plan will be chosen by using cte other than temp table.) I have examples to verify that.
Also, I have read the Uri, and Erland's comments and I will totally agree with Erland.
Even without UDF , CTE works slower than tempdb.....I had a few clients that proofed that
One reason, sql server does not maintain statistics on CTE I think the first sentence is misleading. Again, as I said, in some cases that the CTE will be much faster than using temp table. While the statement sql server does not maintain statistics on CTE I believe it is indeed true, but it is not the reason.
As Erland pointed out that while using the CTE in the outer query, the query optimizer will use the underline table directly to generate query plan and therefore the indexes in the undlining table will be used. While using CTE Index seek is oftenly achieveable if the underlying table indexes and the outer query are properly designed.
The CTE is very good for modular design of query and better only reference the CTE once as like Erland mentioned it will expand the cte to the underline table everytime it is referenced.
- Marked as answer by Naomi N Wednesday, June 22, 2011 5:15 PM
Sunday, May 29, 2011 11:00 AM -
Yes in our case we had self join CTEs that hits performance.Our CTE are computationally hard or return many many rows, and it isaccessed multiple times, so for sure then a table, temp table
could perform (much) better. I can not show you right now, but I swear, I observed, that Estimation was greatly improved only when we moved into a temporary table approach.
I don't dispute it for an individual case. But I think it is a gross error to generalise it to a universal rule. When it comes to general performance questions, there is really on one answer: it depends....
Erland Sommarskog, SQL Server MVP, esquel@sommarskog.seSunday, May 29, 2011 12:20 PM -
It depends, ok I agree, but I still think that statistics play its role with performance consideration...but looks like gain it depends
Best Regards, Uri Dimant SQL Server MVP http://dimantdatabasesolutions.blogspot.com/ http://sqlblog.com/blogs/uri_dimant/Sunday, May 29, 2011 12:50 PMAnswerer -
Ok, amended statement can be - CTE is much slower than temp tables if CTE is used more than once in the query (as in this particular case and a case mentioned by Uri).
In the situation at hand, the only solution I see is to change the reports to not use the function as it's currently written. My idea is to change the function to work from temp table named inventory. The report's logic will produce this table first and then I will use amended function.
The downside of this, of course, that I will need to re-work the reports. I need to somehow demonstrate the problem to the management and get the work of re-writing justified.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogSunday, May 29, 2011 3:03 PM -
>> Ok, amended statement can be - CTE is much slower than temp tables if CTE is used more than once in the query (as in this particular case and a case mentioned by Uri). <<
This is an optimizer flaw in T-SQL; DB2, Oracle, etc will decide to materialize a CTE or not. Even better, they will decide to materialize a VIEW that is shared by multiple sessions. Since a lot of reports are based on teh same summary data at the same time, this is a huge advantage.
--CELKO-- Books in Celko Series for Morgan-Kaufmann Publishing: Analytics and OLAP in SQL / Data and Databases: Concepts in Practice Data / Measurements and Standards in SQL SQL for Smarties / SQL Programming Style / SQL Puzzles and Answers / Thinking in Sets / Trees and Hierarchies in SQL- Marked as answer by Naomi N Wednesday, June 22, 2011 5:16 PM
Sunday, May 29, 2011 7:42 PM -
The question that I asked just now, might be relevant to this debate.
http://social.msdn.microsoft.com/Forums/en/transactsql/thread/cd2f1c07-fa52-4367-af5c-053562250463
Sunday, May 29, 2011 9:08 PM -
-
Erland,
It is clear that materializing into a temp table works. My question was how to keep this as a function?
For every expert, there is an equal and opposite expert. - Becker's Law
My blogTuesday, May 31, 2011 10:02 PM -
I've made some changes to run this function for particular Department, Category, ID and call using CROSS APPLY.
Here are my test results:
Warning: Null value is eliminated by an aggregate or other SET operation.
New UDF - FIFO only 466946 -- original UDF with some improvements
Warning: Null value is eliminated by an aggregate or other SET operation.
SP - FIFO only 4330 - stored procedure
Warning: Null value is eliminated by an aggregate or other SET operation.
Newest - FIFO only 41600 -- CROSS APPLY with UDF
----------------------------------
The result is in ms.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogTuesday, May 31, 2011 10:28 PM -
> It is clear that materializing into a temp table works. My question was how to keep this as a function?
In a multi-statement UDF, you cannot use temp tables, but table variables could work. The lack of statistics could be an obstacle, though.
I see that this is something that involves FIFO. I've worked extensively with FIFO the last year, but it's all iterative processing. But I'm into computing profit/loss for stocks trading which is likely to be more complex than inventories.
Erland Sommarskog, SQL Server MVP, esquel@sommarskog.seWednesday, June 1, 2011 10:10 PM -
For Cost of Goods On Hand I am using the idea from this Phil Factor competition and actually Peter Larsson helped me to adapt that code instead of the one originally developed by my colleague.
I converted the function from multiline into inline. The original multiline was slow for both methods, new inline is fast for RWAC.
I wish I can get a task to re-write these reports again and this time I will try either creating separate SP for each one or try to apply SP and not a function.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogWednesday, June 1, 2011 10:17 PM -
Any progress?
Kalman Toth, SQL Server & BI Training; SQL 2008 GRAND SLAMMonday, June 6, 2011 4:54 AM -
The progress is that I determined I will need to re-write reports and I made several new stored procedures. I also entered a new ER into our system - it is now awaiting an approval from the management before I can start a re-write process.
The bad news is that one client is already suffering and I am not sure what to suggest, since I have to use the function if I don't want to touch the software's code (and I don't, since we're supposed to release a new version (the one he is using)) to everyone ASAP.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogMonday, June 6, 2011 1:34 PM -
Ok, amended statement can be - CTE is much slower than temp tables if CTE is used more than once in the query (as in this particular case and a case mentioned by Uri).
CTE is evaluated everytime is used like a macro.In addition #temptable can be indexed for further performance gains.
Kalman Toth SQL SERVER 2012 & BI TRAINING
New Book: Beginner Database Design & SQL Programming Using Microsoft SQL Server 2012- Marked as answer by Naomi N Wednesday, June 22, 2011 6:47 PM
- Edited by Kalman Toth Monday, October 1, 2012 12:46 AM
Monday, June 6, 2011 2:27 PM -
Yes, this is what we re-learned again in this thread and from this experience. It also limits a usage of CTE (if it needs to be referenced several times) in a UDF and essentially makes creating a UDF impossible (unless you want to suffer from the bad performance).
For every expert, there is an equal and opposite expert. - Becker's Law
My blogMonday, June 6, 2011 2:36 PM -
Any progress?
Kalman Toth, SQL Server & Business Intelligence Training; SQL 2008 Grand SlamWednesday, June 22, 2011 2:12 PM -
I've been testing it yesterday on a huge database. I had two cases in one script (first the original and then the new implementation). I tried to run several times always making the time interval smaller - I never got the script to finish. Finally, I only used about 3 days worth of data (no other restrictions) and ran only new code - it finished in 3 minutes. I then tried to run the old code and it ended up me disconnecting from the server. I plan to try one more test today, but it looks like the original code can not finish even for just 3 days. So, I can conclude that my re-write of the code was what this needed.
For every expert, there is an equal and opposite expert. - Becker's Law
My blog- Marked as answer by Naomi N Wednesday, June 22, 2011 6:47 PM
Wednesday, June 22, 2011 2:38 PM -
I've asked my other manager (my main supervisor is on vacation today) to assign me that work and seems like I got them.
I've tried testing just for half a day - the original code still never finished. The newest version of the code finishes in 45 sec. for half a day worth of data.
I marked several helpful responses as answers in this thread - essentially it confirmed that I indeed needed a re-write and also that it's very important to test new functionality on a huge database before making it 'life'.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogWednesday, June 22, 2011 5:20 PM -
UPDATE. Adding indexes to temp tables cut the time in half, although I'm not sure I used the best indexes for my cases.
BTW, I think I didn't index the temp table - let me see if I can get better performance.
Here is my test program, BTW - may be you can give some insights as how to achieve good performance here or what indexes can be helpful:
SET NOCOUNT ON; declare @StartTime datetime set @StartTime = current_timestamp if object_id('tempdb..#csrTempMain') is not null drop table #csrTempMain select distinct p.department, p.category, p.item, p.invent_id, p.trans_no, p.extension - p.tax_amount - p.tax_amt2 - p.fee_amount as Revenue, p.salespoint , loc.locat_id as locatn_id into #csrTempMain from transact p join i_loclnk loc on p.salespoint = loc.salespoint where p.date_time between '20050106 3:47:35 PM' and '20050106 11:59:59 PM' and loc.locat_id >=0 DECLARE @cCostValuationMethod CHAR(4), @dtStart DATETIME, @dtEnd DATETIME SET @cCostValuationMethod ='FIFO' SET @dtStart = '20050106 3:47:35 PM' SET @dtEnd = '20050106 11:59:59 PM' declare @DepartmentFrom char(10), @DepartmentTo char(10), @CategoryFrom char(10), @CategoryTo char(10), @ItemFrom char(10), @ItemTo char(10), @bFilterVendor bit, @vcVendorFrom varchar(25), @vcVendorTo varchar(25) select @DepartmentFrom = min(Department), @DepartmentTo = max(Department), @CategoryFrom = min(Department), @CategoryTo = max(Category), @ItemFrom = min(Item), @ItemTo = max(Item) from #csrTempMain SET @bFilterVendor = 0 SET @vcVendorFrom = NULL SET @vcVendorTo = NULL SELECT p.department, p.category, p.item, p.invent_id, p.locatn_id, i.descrip, ISNULL(mi.descrip,space(25)) as m_descrip, CASE when p.locatn_id = 0.1 then 'ALL Locations ' else il.descrip end as cLocation, CASE when p.invent_id > 0 then 'M' else 'I' end as invt_type, SUM(cogs.QuantitySold) as qty_Sold, SUM(p.Revenue) as ext_Sales, SUM(cogs.CostOfGoodsSold) AS cog, SUM(cogs.CostOfGoodsSold) / SUM(cogs.QuantitySold) AS AverageUnitCost from #csrTempMain p INNER JOIN dbo.siriusfn_RetailCostOfGoodsSold(@cCostValuationMethod, @dtStart, @dtEnd, NULL, 1, @DepartmentFrom, @CategoryFrom, @ItemFrom, @DepartmentTo, @CategoryTo, @ItemTo, @bFilterVendor, @vcVendorFrom, @vcVendorTo) cogs ON cogs.trans_no = p.trans_no JOIN items i ON i.department = p.department AND i.category = p.category AND i.item = p.item left join i_items mi on p.invent_id = mi.invent_id left join i_locatn il on p.locatn_id = il.locat_id join i_loclnk loc on p.salespoint = loc.salespoint GROUP BY p.Department, p.Category, p.Item, p.Invent_ID, p.locatn_id, i.descrip, mi.descrip, il.descrip HAVING SUM(cogs.QuantitySold) <> 0 order by p.locatn_id, p.department, p.category, p.item, p.invent_id print 'Old method: ' + cast(datediff(second, @StartTime, CURRENT_TIMESTAMP) as varchar(10)) + ' seconds' GO SET NOCOUNT ON; declare @StartTime datetime set @StartTime = current_timestamp if object_id('tempdb..#csrTempMain') is not null drop table #csrTempMain select distinct p.department, p.category, p.item, p.invent_id, p.trans_no, p.extension - p.tax_amount - p.tax_amt2 - p.fee_amount as Revenue, p.salespoint , loc.locat_id as locatn_id into #csrTempMain from transact p join i_loclnk loc on p.salespoint = loc.salespoint where p.date_time between '20050106 3:47:35 PM' and '20050106 11:59:59 PM' and loc.locat_id >=0 if object_id('tempdb..#Inventory') is not null drop table #Inventory SELECT p.pri_key , p.ref_no , p.locatn_id , p.date_time , DENSE_RANK() OVER ( PARTITION BY p.department, p.category, p.item, p.invent_id, p.locatn_id ORDER BY p.date_time, p.po_link ) as fifo_rank, p.department , p.category , p.item , p.invent_id , p.trans_type , p.quantity , p.unit_cost INTO #Inventory FROM i_invent p --join i_loclnk loc on p.salespoint = loc.salespoint where p.date_time between '20050106 3:47:35 PM' and '20050106 11:59:59 PM' --and loc.locat_id >=0 if object_id('tempdb..#Results') is not null drop table #Results CREATE TABLE #Results (trans_no numeric(17,0), department char(10), category char(10), item char(10), invent_id int, locatn_id int, QuantitySold int, CostOfGoodsSold money) DECLARE @dtStart DATETIME SET @dtStart = '20050106 3:47:35 PM' INSERT INTO #Results execute siriussp_CostOfGoodsSold_FIFO @dtStart SELECT p.department, p.category, p.item, p.invent_id, p.locatn_id, i.descrip, ISNULL(mi.descrip,space(25)) as m_descrip, CASE when p.locatn_id = 0.1 then 'ALL Locations ' else il.descrip end as cLocation, CASE when p.invent_id > 0 then 'M' else 'I' end as invt_type, SUM(cogs.QuantitySold) as qty_Sold, SUM(p.Revenue) as ext_Sales, SUM(cogs.CostOfGoodsSold) AS cog, SUM(cogs.CostOfGoodsSold) / SUM(cogs.QuantitySold) AS AverageUnitCost from #csrTempMain p INNER JOIN #Results cogs ON cogs.trans_no = p.trans_no JOIN items i ON i.department = p.department AND i.category = p.category AND i.item = p.item left join i_items mi on p.invent_id = mi.invent_id left join i_locatn il on p.locatn_id = il.locat_id join i_loclnk loc on p.salespoint = loc.salespoint GROUP BY p.Department, p.Category, p.Item, p.Invent_ID, p.locatn_id, i.descrip, mi.descrip, il.descrip HAVING SUM(cogs.QuantitySold) <> 0 order by p.locatn_id, p.department, p.category, p.item, p.invent_id print 'New method: ' + cast(datediff(second, @StartTime, CURRENT_TIMESTAMP) as varchar(10)) + ' seconds'
The first script never finishes - so let's concentrate on the second script (the one after first GO). I appreciate suggestions as how to improve it.Thanks in advance.
For every expert, there is an equal and opposite expert. - Becker's Law
My blogWednesday, June 22, 2011 5:24 PM