TryCatchFinally.net Some SQL, some .NET, and whatever else

8Sep/160

Query Business Objects repository for list of reports

We're migrating away from Business Objects Enterprise (XI-R2, not the newer 3.X version) and into SQL Reporting Services (it's about time!) and I needed an inventory of everything that was still in BOE. The repository is obfuscated so that it's not human readable, but thanks for some sleuthing and an 8-year-old post from a Business Objects forum, I wanted to share the script I was able to come up with.

The hard part (decoding the obfuscated text from the database into human-readable format) came from a post by "marklee" on a Business Objects board (his post is about 1/4 of the way down):

http://www.forumtopics.com/busobj/viewtopic.php?t=124272

His query was also written for Oracle, so I converted it to one that would run against SQL Server, and then I added some additional content types based on what we had in our database, and then attached a few additional rollups (like showing the root folder, building out the full path to the report, and returning the number of instances of each report that had been generated.

You can download the full script if you'd like, or you can view the contents below. Enjoy!

select 
 objectid, 
 parentid, 
 ownerid, 
 convert(datetime, REPLACE(left(lastmodifytime, 10),' ','-') + ' ' +
			replace(substring(lastmodifytime, 12, 8), ' ', ':') +
			'.' + substring(lastmodifytime, 21, 3)) as lastmodifytime, 
 upper(left(objectname,len(objectname)-2)) ObjectName, 
 typeid, 
 type, 
 convert(bit, 0) as IsInstance,
 si_cuid 
 into #results
 from 
 ( 
 SELECT 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 replace( 
 cast(objname as varchar(2000)) 
 ,'S','v') 
 ,'M','s') 
 ,'A','m') 
 ,')','a') 
 ,'+','b') 
 ,'C','n') 
 ,'-','c') 
 ,'/','d') 
 ,'O','t') 
 ,'E','o') 
 ,'1','e') 
 ,'3','f') 
 ,'G','p') 
 ,'5','g') 
 ,'7','h') 
 ,'W','x') 
 ,'U','w') 
 ,'Q','u') 
 ,'I','q') 
 ,'9','i') 
 ,':','i') 
 ,';','j') 
 ,'K','r') 
 ,'=','k') 
 ,'?','l') 
 ,'[','y') 
 ,']','z') 
 ,'!@',' ') 
 ,'B~S','&') 
 ,'!BO','.') 
 ,'B|C"','(') 
 ,'!B|D',')') 
 ,'M|Z','-') 
 ,'M}L',',') 
 ,'M|N','_') 
 ,'M}Z',':') 
 ,'!B{B','''') 
 ,'<code>|&lt;','0') 
 ,'</code>|&gt;','1') 
 ,'<code>|@','2') 
 ,'</code>|B','3') 
 ,'<code>|D','4') 
 ,'</code>|F','5') 
 ,'<code>|H','6') 
 ,'</code>|J','7') 
 ,'<code>|L','8') 
 ,'</code>|N','9') 
 ,'{','') 
 ,'!','') 
 ,'&quot;','') 
 ,'@','') 
 ObjectName, 
 convert(varchar(100), case 
	 when TypeID = 1 then 'Folder' 

	 when TypeID = 2 then 'Crystal Report'
	 when TypeID = 260 then 'Web Intelligence Document'
	 when TypeID = 290 then 'Object Package' 
	 when TypeID = 265 then 'Report Instance (TXT)'
	 when TypeID = 262 then 'Webi Report' 
	 when TypeID = 314 then 'Deski Report' 

	 when TypeID = 283 then 'PDF' 
	 when TypeID = 267 then 'Text' 
	 when TypeID = 323 then 'Excel' 
	 when TypeID = 319 then 'Excel (XLS)'
	 when TypeID = 259 then 'Word (DOC)'
	 when TypeID = 279 then 'Report Instance (PDF)'

	 when TypeID = 266 then 'Universe' 
	 when TypeID = 278 then 'Publication' 
	 when TypeID = 299 then 'Connection' 
	 when TypeID = 8  then 'Shortcut' 
	 when TypeID = 20 then 'Groups' 
	 when TypeID = 13 then 'Server' 
	 when TypeID = 16 then 'BO Server' 
	 when TypeID = 21 then 'Event' 
	 when TypeID = 24 then 'License Key' 

	 when TypeID = 19 then 'User type 19' 
	 when TypeID = 18 then 'User type 18' 
	 when TypeID = 47 then 'User type 47' 
	 when TypeID = 48 then 'User type 48' 
 
	 else 'Other' 
 end) Type, 
 * 
 FROM [dbo].[CMS_InfoObjects5] 
 where typeid &lt;&gt; 284 -- appears to be some kind of temporary file
 ) BORepository 

create clustered index #idx_c_results on #results (objectid)
create index #idx_results_parentid on #results (parentid)
create index #idx_results_ownerid on #results (ownerid)

-- Mark the ones that are instances of another object (children of non-folder)
update r
   set r.IsInstance = 1
 from #results r
join #results r2
  on r.ParentID = r2.ObjectID
where r2.TypeID &lt;&gt; 1
  and r.TypeID &lt;&gt; 1




-- Define all the levels of the objects and add them to a lookup table
DECLARE @ObjectLevel INT = 0

select 23 as objectid, 0 as parentid, 0 as ownerid, GETDATE() as lastmodifytime,
		convert(varchar(100), '$') as objectname, convert(int, null) as ChildInstances, convert(datetime, null) as LastInstanceTime,
		1 as typeid, convert(varchar(100), 'Root Folder') as type, @ObjectLevel as objectlevel 
into #objects

while @@rowcount &gt; 0
begin
	;with p as (select parentid, count(*) as instances, max(lastmodifytime) as LastInstanceTime from #results group by parentid)
	update #objects
	   set ChildInstances = p.instances,
		   LastInstanceTime = p.LastInstanceTime
	  from #objects o
	  join p
	    on o.objectid = p.ParentID
	 where o.ChildInstances is null
	   and o.typeid &lt;&gt; 1
	
	SET @ObjectLevel = @ObjectLevel + 1

	insert into #objects
	select r.objectid, r.parentid, r.ownerid, r.lastmodifytime, r.objectname, null, null, r.typeid, r.type, @ObjectLevel as objectlevel
	from #results r
	join #objects o
      on r.parentid = o.objectid
	 and o.typeid = 1
	left
	join #objects o2
	  on r.objectid = o2.objectid
   where o2.objectid is null

end


-- Set up the object paths
select objectid, parentid, lastmodifytime, convert(varchar(4000), '') as ObjectPath,
		objectname, childinstances, lastinstancetime, typeid, type
  into #objectswithpath
  from #objects
 where typeid &lt;&gt; 1

while @@rowcount &gt; 0
BEGIN
	update #objectswithpath
	   set parentid = o.parentid,
			objectpath = o.objectname + '\' + objectpath
	  from #objectswithpath owp
	  join #objects o
	    on owp.parentid = o.objectid
END

select objectpath, objectname, lastmodifytime, ChildInstances, LastInstanceTime, type
 from #objectswithpath
order by ObjectPath, objectname


--select * from #results
--select typeid, type, count(*) from #objects group by typeid, type order by count(*) DESC

/*
select type, case when isnull(ChildInstances,0) = 0 then 0 else 1 end, count(*), max(LastInstanceTime) --objectpath, objectname, lastmodifytime, ChildInstances, LastInstanceTime, type
 from #objectswithpath
 where ObjectPath not like '$\REPORTS OFF LINE\%'
group by type, case when isnull(ChildInstances,0) = 0 then 0 else 1 end
order by type, case when isnull(ChildInstances,0) = 0 then 0 else 1 end
*/

--select * from #objectswithpath where type = 'Word (DOC)'


drop table #objectswithpath
drop table #objects
drop table #results

2Mar/160

SQL Agent job fails with QUOTED_INDENTIFIER error

If you have a SQL Agent job that's failing, but the query succeeds when you run it manually in SQL Management Studio, it's possibly a difference between the SQL Server default connection settings and those SSMS uses as the default. In my case, I was inserting from a stored procedure and received the following error:

[SQLSTATE 42000] (Error 1934) INSERT failed because the following SET options have incorrect settings: 'QUOTED_IDENTIFIER'. Verify that SET options are correct for use with indexed views and/or indexes on computed columns and/or filtered indexes and/or query notifications and/or XML data type methods and/or spatial index operations.

To view the default connection options for your SQL Server instance, right-click on your instance in SSMS and select "Properties", and then browse to "Connections" - you'll see something like the following list (on my server, none of these are enabled by default for new connections):

Server Defaults

To compare it to the default settings for your installation of SSMS, click on "Tools" and then "Options...", and then browse to "Query Execution" -> "SQL Server" and view the settings under both "Advanced" and ANSI" (In my case, SSMS was enabling a number of settings by default for my connections that SQL Agent jobs wouldn't have enabled):

SSMS Results ANSI

SSMS Results Advanced

In my case, I just added an explicit "SET QUOTED_IDENTIFIER ON" to the beginning of the script in my SQL Agent job and it resolved the error message. Hope this helps you too!

14Jan/150

Querying Active Directory from SQL Server

SQL Server provides some pretty flexible integration with Active Directory through the ADSI Linked Server provider, something that's present by default when you install SQL Server. If you've never used it before, it allows you to connect to a domain controller and query AD the same way you'd query any other linked server. For example, it gives you the option to:

  • Identify when logins to SQL Servers or databases that support financial applications exist, but have no matching AD account (either direct integrated logins, or if SQL logins or rows in a "User" table have been set up to match the AD login)
  • Kick off alerts to provision the user in various systems based on their AD group membership
  • Automatically trigger an action when a new account appears in active directory (for example, we auto-provision security badges and send an email alert to our head of security to assign the appropriate rights)

While much of this could also be done from Powershell as well, we use the SQL Server Agent to manage many of our scheduled job (because it's so handy to have the agent remotely accessible), as well as sometimes just needing data from AD in a query. To support a number of processes we have in place, we run a synchronization job every so often throughout the day that pulls about two dozen fields for all users and synchronizes them into a table if anything has changed.

Setting up the linked server itself is pretty straightforward (courtesy of http://community.spiceworks.com/how_to/show/27494-create-a-sql-linked-server-to-adsi):

  1. Create the linked server itself
  2. Set the security context (if you want to query AD as something other than the SQL Server Service account - by default, all domain users can do this and it's only required if the domain is remote or if, for some reason, your SQL Service account's AD rights have been restricted, like if you're running as "LOCAL SERVICE")
  3. Enable OPENQUERY (Ad Hoc Distributed Queries)

You'll notice that setting up the linked server itself doesn't actually specify where Active Directory is located or what domain/forest you'll be querying - that's actually done in the query itself. In each query, you'll need to specify the FQDN (Fully-qualified domain name) of the domain (or OU) of the domain you're querying. For example, we'd get all users from a domain by issuing the following query (in this example, "ADLinkedServerName" is the linked server we just created, and our domain is "corp.mycompany.local"):

SELECT EmployeeNumber, Name AS FullName, givenName as FirstName, sn as LastName,
L AS Location, samAccountName as ADAccount
FROM OPENQUERY(ADLinkedServerName,'SELECT Name, L, givenName, sn,
EmployeeNumber, EmployeeID,samAccountName,createtimestamp
FROM ''LDAP://OU=Users,DC=corp,DC=mycompany,DC=local''
WHERE objectClass =''user''') ad

This query will search that OU ("Users", in this case) and everything below it, so changing the FROM to "LDAP://DC=corp,DC=mycompany,DC=local" would fetch the entire directory (for all the "user" objects), regardless of what folder they appeared it - if your directory puts users in another OU (like "Associates", for example), you should adjust the query accordingly.

For column names, you can pull any AD properties at all that you’re looking for – even custom ones that aren't part of a standard AD configuration. To get an easy list of AD properties to choose from, I like using ADSIEDIT (part of Microsoft’s Remote Server Administration Tools - download RSAT for Windows 7 or RSAT for Windows 8.1) – just drill down all the way down to an object, like a user, right click on them and select “Properties”, and you can see a list of all the properties on that account. If you’ve got Domain Admin rights, this tool can be used to modify these values too, but for querying, you only need to be a domain user or somebody who has rights to browse AD. Make a note of the names of particular properties that you're interested in - also note that AD queries are case-sensitive, so you'll need to note the casing of these properties as well.

One potential gotcha that I've run into is that maximum result size that AD will return in a single query can be set as part of domain policy - by default it's 1000 records at once, and can be configured by setting or adjusting the "PageSize" property on your domain controllers (see https://support.microsoft.com/kb/315071/en-us). Also, there's a "MaxResultSetSize" property as well that's set to 256KB by default, but I've never hit it - unless you're pulling every single property back, you'd likely hit the PageSize row limit before you hit the ResultSize byte limit, but remember that both are there. If you do hit the AD result count limit, it will return the rows up to the limit, but then execution stops with a kind of cryptic error:

Msg 7330, Level 16, State 2, Line 2
Cannot fetch a row from OLE DB provider "ADsDSOObject" for linked server "YOURDOMAIN".

If your domain is larger than the PageSize limit, you'll need to cut your query into multiple return sets of data so you don't exceed the limit on any single query. Since our domain contains about 2400 users, we were able to do it in two queries, broken up like this:

SELECT samAccountName
  FROM OPENQUERY(ADLinkedServerName,'SELECT samAccountName
                                       FROM ''LDAP://OU=Users,DC=corp,DC=mycompany,DC=local''
                                      WHERE objectClass =''user''
                                        AND givenName<''L''') as c
UNION ALL
SELECT samAccountName
  FROM OPENQUERY(ADLinkedServerName,'SELECT samAccountName
                                       FROM ''LDAP://OU=Users,DC=corp,DC=mycompany,DC=local''
                                      WHERE objectClass =''user''
                                        AND givenName>=''L''') as c

By dividing the names on L, this cut the directory roughly in half - if yours was larger, you could divide it by querying each OU separately, or by looping through letters of the alphabet, or whatever makes sense in your setting. You could even do something dynamic like pull as many records as you can, then grab the value from the last record you pulled and use it as the baseline to pull the next set as far as you can, and then repeat until you run out of records. Linked servers don’t allow you to dynamically assemble your query at run-time – it has to be hard-coded in the query – but there are some ways around that (like building your OPENQUERY as a string and then executing it via sp_executesql, for example).

Now that you have your AD records stored in a temp table, you can identify new/changed records and merge them into a SQL table you already have ready using an INSERT/UPDATE/DELETE or MERGE statement, or possibly trigger notifications or some other business process.

I hope this is helpful - if you'd like some more detail, please leave a comment and I'm happy to elaborate where it's necessary!

2Jan/150

Removing expired/unused SSRS subscriptions

SQL Reporting Services doesn't do a very good job keeping the SQL Agent clean by removing expired or otherwise unusable subscriptions from the job list. To deal with this, we created a script that pulls some details about these old subscriptions, including the report responsible, the last run time and status, and the user who originally scheduled it. If you notice your SQL Agent job list getting excessively long, you can use this query to identify the culprit reports and owners, and then either notify them or remove the old subscriptions manually yourself (run this on the server with your SSRS databases):

  select c.Name as ReportName,
         s.EventType,
         s.Description as SubscriptionDescription,
         s.LastStatus as LastSubscriptionStatus,
         s.LastRunTime SubscriptionLastRunTime,
         case
            when recurrencetype = 1 then 'One Time'
            when recurrencetype = 2 then 'Hourly'
            when recurrencetype = 4 then 'Daily'
            when recurrencetype = 5 then 'Monthly' 
            when recurrencetype = 6 then 'Month Week' 
            else 'Other'
         end as RecurranceType,
         s.DeliveryExtension,
         u.UserName as SubscriptionSetUpBy,
         s.ModifiedDate as SubscriptionLastModifiedDate
    from [ReportServer].[dbo].[Subscriptions] s
    join [ReportServer].[dbo].[Catalog] c
      on c.ItemID = s.Report_OID
    join [ReportServer].[dbo].[Users] u
      on u.UserID = s.OwnerID
    join [ReportServer].[dbo].[reportschedule] rs
      on c.itemid = rs.reportid 
     and s.subscriptionid = rs.subscriptionid
    join [ReportServer].[dbo].[schedule] sch
      on rs.scheduleid = sch.scheduleid
   where s.EventType <> 'RefreshCache'
     and s.LastRunTime < dateadd(m, -3, getdate())
order by c.name

There are a number of similar scripts out there that pull much of this information together, but there wasn't one that collected all the details we were looking for in one place. From here, you can deal with the subscriptions as you see fit.

Note that you can just remove the old subscriptions by brute force if you'd prefer, and SSRS will clean up the orphaned SQL jobs, but I've preferred to review the list and notify users as we've never had too much volume to deal with. If you want to just delete them straight away, you can do so here:

DELETE ReportServer.dbo.Subscriptions
WHERE InactiveFlags != 0
	OR LastRunTime < dateadd(m, -3, getdate())
10Jan/131

Calculating working hours between two dates

As a follow-up to an earlier post (Return a list of all dates between a start and end date), I need to find the number of working hours between two timestamps - in this case, it was to see how long a support ticket had been open before it was initially assigned, but the user didn't want non-work hours to count against them.

To do this, I used the previous script to generate a list of dates and hours, and then marked the rows as work time or not (based on day of week and hour of day, evaluated together). The result was a table that would effectively let me do a SUM to find the value I was looking for. Once I had that table, I could join to it for rows between the two datetimes in question and SUM up rows that had "WorkTime" marked:

SELECT tt.TicketNumber,
       tt.TicketCreateTime,
       tt.TicketAssignTime,
       SUM(  CONVERT(INT, wh.WorkTime)) as WorkHoursBeforeAssigned
       COUNT(CONVERT(INT, wh.WorkTime)) as TotalHoursBeforeAssigned
  FROM TroubleTickets tt
  JOIN #WorkingHours wh
    ON wh.EvaluateTime BETWEEN tt.TicketCreateTime
                           AND tt.TicketAssignTime
GROUP BY tt.TicketNumber,
         tt.TicketCreateTime,
         tt.TicketAssignTime

In this case, tickets that were created and picked up after hours, without passing any worktime, would show as zero hours old (as they should, since they were interested in working time) - however, I've also included COUNT here to show total hours as well as work hours.

Also, this script only counts for raw day-of-week and hour-of-day working time - it ignores holidays and other special circumstances. I have a script that tracks holidays (American ones, at least), and I'll put that up shortly as well - if you want to take holidays into account, you could incorporate that into your evaluation.

Here's the script that builds the working time table (you can also download it here):

-- Set things up before we get started
--------------------------------------
DECLARE @WorkTimeStart		TINYINT,
		@WorkTimeEnd		TINYINT,
		@WorkDayOfWeekStart	TINYINT,
		@WorkDayOfWeekEnd	TINYINT

DECLARE @StartDate			DATETIME,
		@EndDate			DATETIME

CREATE TABLE #WorkingHours (
		EvaluateTime	DATETIME,
		IsWorktime		BIT DEFAULT(0)
)

--------------------------------------

	SET @WorkTimeStart = 7  --7AM
	SET @WorkTimeEnd   = 16 --4PM hour (4-5PM count as working)
	SET @WorkDayOfWeekStart = 2 --Monday
	SET @WorkDayOfWeekEnd   = 6 --Friday

	SET @StartDate	= '2000-01-01 00:00:00'
	SET @EndDate	= '2020-12-31 23:59:59'

--------------------------------------


-- Built the list of timestamps we're working with
;WITH numberlist(number)
   AS (SELECT RANK() over(order by c1.object_id,
                                   c1.column_id,
                                   c2.object_id,
                                   c2.column_id)
		 from sys.columns c1
        cross 
         join sys.columns c2)
INSERT INTO #WorkingHours (EvaluateTime)
SELECT DATEADD(hh, number-1, @StartDate)
  FROM numberlist
 WHERE DATEADD(hh, number-1, @StartDate) <= @EndDate


-- Set the times to worktime if they match criteria
UPDATE #WorkingHours
   SET IsWorktime = CASE WHEN (DATEPART(dw, EvaluateTime)
								BETWEEN @WorkDayOfWeekStart
								AND @WorkDayOfWeekEnd)
							  AND
							  (DATEPART(hh, EvaluateTime)
							   BETWEEN @WorkTimeStart
							   AND @WorkTimeEnd) THEN 1
						 ELSE 0
					END


-- Retun the results
 SELECT * FROM #WorkingHours
 ORDER BY EvaluateTime

 DROP TABLE #WorkingHours
19Oct/121

Receiving (or stopping!) SQL Server maintenance plan summary emails

When you use the Maintenance Plan wizard in SQL Management Studio to set up a new plan, you have the option to have the SQL Agent send you a plan execution summary email:

It's a pretty handy email - not just a success/failure on the SQL Agent job, but details about the actual maintenance plan steps and which ones ran and for how long. The problem we ran into was how to actually turn this email off once it was initially enabled in the wizard! It's not anywhere in the SQL Agent job or in the properties of the maintenance plan, or even anywhere obvious in the maintenance plan designer. It's hidden in a tiny button on the toolbar in the designer called "Reporting and Logging":

Clicking there gets you some logging options, including sending a summary email to an operator:

From there, you can change the operator or stop the email summary (or start it, if that's what you're looking for).

26Jul/120

SQL Server Contention Monitor (super-alpha) posted to Codeplex

In response to some conversation in an Experts-Exchange question, I decided to post an application I wrote about four years ago and never did anything with (or add much polish, as you'll see). It's a small VB.NET application that polls MSSQL servers and displays information about currently blocked SPIDs, as well as the block chain and age of blocked processes.

It's something I've used when I troubleshoot blocking on a SQL Server, and generally leave running in my system tray - it will pop up a balloon when there's a blocked process that lasts longer than some pre-determined threshold, and then optionally pop another balloon when it's resolved. It works well as an early warning system for a critical server, and I've often used it when I'm performing maintenance on a production server that I'm concerned might block legitimate use - running the application lets me know within a few seconds when I've gotten in somebody's way so I can act accordingly.

It's still very much a work in progress and is pretty rough around the edges (and well inside the edges), but I've posted it to a Codeplex project for anybody else who might find it useful. Though I'm working on a few other things right now and haven't done any development with this app in almost four years, I'll keep an eye on the issues list at Codeplex (or feedback here) and try to address anything that comes up.

Without any further ado, here it is!

https://mssqlblockmonitor.codeplex.com/

6Apr/120

Export from SQL Server to XLS and email results

Sometimes you want to take some query results and export them directly to an XLS file - here's how you can set that up in SQL Server. The biggest caveat is that you need to run it from an x86 instance of SQL Server - the x64 instance won't have access to the Jet driver needed to write the Excel file (Microsoft.Jet.OLEDB.4.0), where the x86 version will. In fact, we maintain an older x86 instance of SQL Server for random processes like this that need it - x64 is better in almost every case, but we can't see to completely ditch x86... 🙂

I use a stored proc that I call from a SQL Agent Job, which works great. The actual process is a bit awkward - for starters, you'll need access to xp_cmdshell. SQL Server can't create a new Excel file from scratch, so you have to keep a blank Excel file around, make a copy of it, and then insert into the copy to get your final result.

That said, here's the code to generate the XLS file from your query results:

SELECT Column1, Column2, Column3, Column4
  INTO ##YourTempTable
  FROM SomeOtherTable

SET @Folder = 'C:\Temp\'
SET @DocumentBlank = 'Your Document - Blank'
SET @DocumentLong = 'Your Document - ' + CONVERT(VARCHAR(10), GETDATE(), 120)

DECLARE @CMD NVARCHAR(4000)
SET @CMD = 'COPY "' + @folder + @DocumentBlank + '.xls" "' + @Folder + @DocumentLong + '.xls"'
exec master..xp_cmdshell @CMD

-- Export the Excel sheet
SET @CMD = 'insert into OPENROWSET(''Microsoft.Jet.OLEDB.4.0'',
	''Excel 8.0;Database=' + @Folder + @DocumentLong + '.xls;'',
	''SELECT * FROM [Sheet1$]'')
	select Column1, Column2, Column3, Column4 from ##YourTempTable'

exec sp_executesql @CMD

Once that's exported, you can just set up the email process using sp_send_dbmail and attach the file you just generated:

DECLARE @Body VARCHAR(2000)

SET @Attachments = @Folder + @DocumentLong  + '.xls'
SET @Body = 'Your file has been generated for ' + CONVERT(VARCHAR(10), GETDATE(), 120)

exec msdb..sp_send_dbmail @profile_name = 'YourMailProfile',
	@Recipients = 'Recipients@YourDomain.biz',
	@subject = 'Your file is ready',
	@Body = @Body,
	@file_attachments = @DocumentLong
15Mar/120

Return a list of all dates between a start and end date

In some situations, you'll need to work with a list of numbers or dates that are between some start or end, and you don't have a complete list handy for joining to. Using the script below, you can create one to use in your query - if you wanted to use this in-line in another SQL Statement (and assuming you're using SQL 2005+), you could either do a subquery or a WITH CTE clause and then join directly to it.

To do it, I'm using a table that has a large number of rows in it, even in an empty database (sys.columns), and then doing a cross-join to ensure that I'll have enough rows to satisfy my entire range. This table has about 890 rows in an empty databases (or you can use the "model" database if you don't have any user databases handy), meaning that the cross join yields about 800,000 rows - enough for almost 2200 years of days, or 100 years of hourly increments (change the "dd" in the "DATEADD" statements below to "hh" or even "mi" to do any increment of time you want).

The code:

DECLARE @StartDate DATETIME,
	    @EndDate   DATETIME
	
	SET @StartDate = '2012-12-01'
	SET @EndDate   = '2015-12-31'
	
;WITH numberlist(number)
   AS (SELECT RANK() over(order by c1.object_id,
								   c1.column_id,
								   c2.object_id,
								   c2.column_id)
	     from sys.columns c1
   	    cross
	     join sys.columns c2)
SELECT DATEADD(dd, number-1, @StartDate)
  FROM numberlist
 WHERE DATEADD(dd, number-1, @StartDate) <= @EndDate

I'm using dates above, but if you wanted to use INT instead, it's pretty straightforward:

DECLARE @Start INT,
		@End   INT
	
	SET @Start = 1500
	SET @End   = 64000
	
;WITH numberlist(number)
   AS (SELECT RANK() over(order by c1.object_id,
								   c1.column_id,
								   c2.object_id,
								   c2.column_id)
	     from sys.columns c1
   	    cross
	     join sys.columns c2)
SELECT @Start + number - 1
  FROM numberlist
 WHERE @Start + number - 1 <= @End
22Feb/120

Failing maintenance plan on SQL Server 2005 when databases are offline

My backup/maintenance plan recently started failing with the following (not so helpful) error message:

Code: 0xC0024104
Source: Update Statistics
Description: The Execute method on the task returned error code 0x80004002
(Unable to cast object of type 'System.DBNull' to type 'System.String'.).
The Execute method must succeed, and indicate the result using an
"out" parameter.

It turns out the error was as a result of one of my databases being offline - the plan was set to work on all databases (the default), but there's a checkbox that tells SQL Server to skip databases that aren't online, and it's not checked by default when you create your maintenance plan. Checking it solved the problem and the maintenance plan ran normally again.

To find the box, open your maintenance plan, and then in each task that runs in your databases, open the properties and click the dropdown, then check the box as in this picture: