Options to Delete with Two Tables


--option 1
delete from jobs 
where exists  (select 1 from calibrations c where id =c.JobId)
--option 2
delete from jobs 
from calibrations c inner join jobs j on j.id =c.JobId

--option 3

delete from jobs 
where id in  (select Jobid from calibrations )

--This is not a good option if you use the id column from your delete table, all rows will be deleted
delete from jobs 
where id in  (select Id from calibrations )


Calculate Thanksgiving Date with T-SQL

In the US, Thanksgiving is always celebrated on the fourth Thursday of November.
For any given date, we can find out Thanksgiving day in that year with T-SQL.

 


declare @anydate datetime='2014-01-30'
declare @dt datetime='20151101'
Select  @dt=dateadd(month,10,dateadd(year, datediff(year,0,@anydate),0)) ---November 1st


select  DateAdd(day, (7+5 -DatePart(weekday, @dt))%7, @dt) firstThuIntheMonth
select  DATEADD(WEEK, 3, DATEADD(d, Case when DATEPART(dw, @dt)>5 Then 7 Else 0 ENd + (5 - DATEPART(dw, @dt)), @dt)) Thanksgivingday
Select  DATEADD(WEEK, 3, DateAdd(day, (7+5 -DatePart(weekday, @dt))%7, @dt) ) Thanksgivingday

/*
Year	U.S. Thanksgiving	
2012	Thursday, November 22	
2013	Thursday, November 28	
2014	Thursday, November 27	
2015	Thursday, November 26	
2016	Thursday, November 24	
2017	Thursday, November 23	
2018	Thursday, November 22	
2019	Thursday, November 28	
*/


 
  
 --Robyn Page's NthDayOfWeekOfMonth
      CREATE FUNCTION NthDayOfWeekOfMonth (
        @TheYear CHAR(4), --the year as four characters (e.g. '2014')
        @TheMonth CHAR(3), --in english (Sorry to our EU collegues) e.g. Jun, Jul, Aug
        @TheDayOfWeek CHAR(3), -- one of Mon, Tue, Wed, Thu, Fri, Sat, Sun
        @Nth INT) --1 for the first date, 2 for the second occurence, 3 for the third
        RETURNS DATETIME
        WITH EXECUTE AS CALLER
        AS
        BEGIN
        RETURN DATEADD(MONTH, DATEDIFF(MONTH, 0, CONVERT(date,'1 '+@TheMonth+' '+@TheYear,113)), 0)+ (7*@Nth)-1
                -(DATEPART (Weekday, DATEADD(MONTH, DATEDIFF(MONTH, 0, CONVERT(date,'1 '+@TheMonth+' '+@TheYear,113)), 0))
                +@@DateFirst+(CHARINDEX(@TheDayOfWeek,'FriThuWedTueMonSunSat')-1)/3)%7
        END        
        GO

		SELECT dbo.NthDayOfWeekOfMonth ('2019','Nov','Thu',4)
   
   --Robyn Page's SQL Server DATE/TIME Workbench
 

Robyn Page’s SQL Server DATE/TIME Workbench


Get Most Recent Backup Chain in Full, Differential and Log Backups

 
 
 use [myDB];

declare @dt datetime=getdate()
declare @dbname sysname= (select DB_NAME())
declare @server_name sysname ='DEVSQL'
 
;WIth mycte as (SELECT
s.database_name,
s.backup_start_date,
s.first_lsn, s.last_lsn,
s.database_backup_lsn,
s.checkpoint_lsn,
 s.[type],-- D--FULL,I--Diff,L--LOG
 m.physical_device_name, 
s.server_name,s.recovery_model
,s.backup_finish_date
,row_number() Over(Partition by s.[type] Order by s.backup_finish_date DESC ) rn
FROM msdb.dbo.backupset s
INNER JOIN msdb.dbo.backupmediafamily m ON s.media_set_id = m.media_set_id
WHERE s.database_name = @dbname
AND server_name=@server_name
AND s.backup_finish_date >=(select top 1 s.backup_finish_date from msdb.dbo.backupset s 
Where s.[type]='D' and s.server_name= @server_name AND s.database_name = @dbname 
AND s.backup_finish_date<= @dt
Order by s.backup_finish_date DESC)
AND s.backup_finish_date<= @dt
)
 
Select * from mycte
WHERE (rn=1 and [type]<>'L') Or (last_lsn >= (select last_lsn+1 from mycte WHERE rn=1 and [type]='I') )
ORDER BY backup_finish_date desc,backup_start_date desc

 
 

 

Get max min per group in sequence — A solution for the question

You can find the original question from MSDN and I tried to come up with a solution but with requirement changes, here is the final version. I hope this is.

 



if object_id('test','U') is not null drop table test 
Create table test (Car int, X Char(1), [Time] int)

INSERT  INTO test
VALUES  ( 11, 'A', 5 ),
        ( 11, 'B', 6 ),
        ( 11, 'B', 7 ),
        ( 11, 'B', 8 ),		 
        (  11,'C', 9 ),
        (  11,'C', 10 ),
        (  11,'C', 11 ),
		(  11,'A', 12 ),
		(  11,'B', 13 ) ;


	with mycte as (	 	
	Select  Car, X,  [Time] time1, lead( [Time],1) Over( Order by  [Time]) leadTime 
	,row_number()Over(order by [Time]) - ROW_NUMBER() OVER (PARTITION BY X ORDER BY [Time]) delta from test )

 ,mycte1 as (
 Select Car, X,  time1,leadTime, delta
 , ROW_NUMBER() OVER (PARTITION BY delta ORDER BY [Time1] DESC) rn
 , count(*) OVER (PARTITION BY X, delta ) cnt   
 from mycte)





 Select Car, X, min(time1) minTime, max(leadTime)  maxTime,  0 as moving 
 from mycte1
 WHERE rn&gt;1
Group by  Car, X, delta 
 UNION ALL
Select  Car, X,  min(time1) minTime, max(CASE WHEN cnt&gt; 1 Then leadTime Else time1 END )  maxTime , 1 as Moving 
from mycte1
 WHERE rn=1
GROUP BY Car, X, delta
Order by minTime


/*
Car	X	minTime	maxTime	moving
11	A	5	5	1
11	B	6	8	0
11	B	8	9	1
11	C	9	11	0
11	C	11	12	1
11	A	12	12	1
11	B	13	13	1
*/



 

Arrange Column Order In a Select Statement (Dynamic query)

 
--create table test (examID int, col1 int, col4 int, col3 int, col2 int)
--Insert into test values (1, 1,2,3,4),(1,4,3,2,1),(1,9,9,9,9)


Declare @ExamID int=1

DECLARE @colsSorted AS NVARCHAR(2000),
   @sql  AS NVARCHAR(4000)

   declare @mytable varchar(50)='test', @myschema  varchar(50) ='dbo'

select @colsSorted 
  = STUFF((select ', '+quotename(column_name,'[')
           from information_schema.columns 
           where table_name = @mytable and TABLE_SCHEMA=@myschema
		   AND COLUMN_NAME<>'ExamID'
			Order by NewID()
           FOR XML PATH (''),type).value('.','varchar(max)'), 1, 2, '')


print  @colsSorted

Set @sql=N'SELECT '+ @colsSorted  +  ' FROM ' + @myschema+ '.'+ @mytable + ' WHERE ExamID='+ CAST(@ExamID as varchar(4))

exec sp_executesql @sql;


---    drop table test


 

Number Table and Split STring

 
--===== Create number table on-the-fly
;WITH Num1 (n) AS (
SELECT 1 as n
UNION ALL SELECT n+1 as n
FROM Num1 Where n<101),
Num2 (n) AS (SELECT 1 FROM Num1 AS X, Num1 AS Y),
Nums (n) AS (SELECT ROW_NUMBER() OVER(ORDER BY n) FROM Num2)
 

,mycte as (
select strCol , Row_NUmber() Over(Order By n) rn

, CAST(substring(strCol, n, charindex('/', strCol + '/', n) - n) as MONEY)  splitVal

from test
cross apply Nums
Where n <= len(strCol) AND substring('/' + strCol, n, 1) = '/' )
Select * from mycte 

WHERE rn=4


Drop table test 


 

https://social.msdn.microsoft.com/Forums/sqlserver/en-US/08de9a11-ed07-4e8a-834b-1d96a15900c8/substring-use-to-extract-portion-of-field?forum=transactsql


Island Question Variation

 

if object_id('test','U') is not null drop table test 
create table test (MBR_ID int,  DS varchar(10))
Insert into test values
(1,'2011-07-15'), (1,'2011-07-21'),(1,'2011-07-29'),(1,'2011-08-04'),(1,'2011-08-11'),(1,'2011-08-25'),(1,'2011-09-01'),(1,'2011-09-21')
,(1,'2012-01-11'),(1,'2012-03-15'),(1,'2012-06-04'),(1,'2012-10-11'),(1,'2013-02-14'),(1,'2013-06-18'),(1,'2013-07-08'),(1,'2013-07-15')
,(1,'2013-07-17'),(1,'2013-08-07'),(1,'2013-08-14'),(1,'2013-08-28'),(1,'2013-09-11'),(1,'2013-09-25'),(1,'2013-11-20')
,(1,'2013-12-04'),(1,'2013-12-17'),(1,'2014-01-14'),(1,'2014-01-29'),(1,'2014-02-25'),(1,'2014-02-26'),(1,'2014-03-12')
,(1,'2014-03-19'),(1,'2014-03-22'),(1,'2014-03-24')
 

;with mytest as (
--move date to the end of the month
select MBR_ID, eomonth(ds) as ds from test)
,mycte as (
SELECT MBR_ID, ds,  lag(ds, 1) Over(partition by MBR_ID  ORDER BY ds) dslag
 ,Case when datediff(month,lag(ds, 1) Over(partition by MBR_ID ORDER BY ds), ds) <=2 
 Then 0 Else datediff(month,ds,lag(ds, 1) Over(partition by MBR_ID ORDER BY ds) ) End diff
 
FROM mytest)

---get group grn
,mycte1 as (
Select MBR_ID, ds, SUM(diff) Over(Order by ds) grn  from mycte )

--Get YYYY-MM
,mycte2 as (
select MBR_ID, Convert(varchar(7),min(ds),126) START_DT, Convert(varchar(7),max(ds),126)  END_DT 
from mycte1 
Group by MBR_ID,grn)

--Sort
Select MBR_ID, START_DT,END_DT from mycte2 
Order by Cast(START_DT+'-01' as date)
 
 /*
 MBR_ID	START_DT	END_DT
1	2011-07	2011-09
1	2012-01	2012-03
1	2012-06	2012-06
1	2012-10	2012-10
1	2013-02	2013-02
1	2013-06	2014-03

 */


 

https://social.msdn.microsoft.com/Forums/en-US/1a8ff1d3-fdbb-4cb8-b596-249cded7b874/sql-query-help?forum=transactsql


Import Text file with and Extral Column Delimiter

I ran into a text file (generated from informix(?) ) with an extral pipe (|) appended at the row end. We need to manipulate the importing procees from import/export wizard to skip the last pipe.
You can play with the an extra column from the source and ingnore it from target to do it.
However, I found an easy way :

You can add a pipe (|) directly to Row delimiter (both header and regular Columns) and it should work.

If you are using bcp or bulk insert, you can define both column and row delimiter to import the data to skip the list pipe.
Here is the sample code for bco and bulk insert.

 

--- Bulk Insert
  BULK INSERT  [TestFullMode].[dbo].[InfoData2]
  FROM 'C:\temp\myDataPipetail.txt'
  WITH
     (
        FIELDTERMINATOR ='|',
        ROWTERMINATOR ='|\n',
        FirstRow=2
     );

--bcp
DECLARE @cmd NVARCHAR(4000) 
set @cmd = 'BCP [TestFullMode].[dbo].[InfoData2] IN C:\temp\myDataPipetail.txt -c -T -t\^| -F 2 -r ^|\n -S'+ @@servername
exec master..xp_cmdshell @cmd


 

Searh Comma Separated String (single character) without SPLIT the Parameter

We can take advantage of the the wildcard match [] with LIKE in TSQL for single letter search.
Here is a code snippet:


CREATE TABLE Testa ( id INT, col1 VARCHAR(100))
INSERT INTO Testa VALUES (1,'a,b,c,d')
,(2,'d,e,f,g')
, (3,'a,s,k')
, (4,'t')


declare @s varchar (20)='a,z,t'
select *
from testa
where ','+col1+',' like '%,'+quotename(@s)+',%'

--drop  table testa

 

http://msdn.microsoft.com/en-us/library/ms179884.aspx
http://msdn.microsoft.com/en-us/library/ms179859.aspx


Recover Data from Delete or Truncate Table without a Backup File

As a last resort, you may be able to recover your deleted data or truncated data from intact log file.
It is very unlikely for someone who knows how to recover data without a backup in their own work.
But the point is that SQL Server tracking all operations in its log file and as long as the log file is intact,
the real thing is to have the knowledge to get the data back.
The techniques are using undocumented functions: fn_dblog and DBCC PAGE to get access to the raw data inside the log file.
It takes extra efforts to retrieve the raw data back.
If you have full backup and log backups, the process can be a little simpler.
This blog by itself may not help you directly but it points to a few resources to get your hand dirty.
If you really want to do it now, download Mr. Imran’s scripts and give them a try.

I have listed a few pointers incase you are interested in this process.
1. Paul Randal has posted detailed techniques and inside knowledge about the internals of log file.

Using fn_dblog, fn_dump_dblog, and restoring with STOPBEFOREMARK to an LSN

2. Mladen Prajdić has a good write up in this topic with code samples:
http://weblogs.sqlteam.com/mladenp/archive/2010/10/12/sql-server-ndash-undelete-a-table-and-restore-a-single.aspx

3. Muhammad Imran posted two stored procedures to recover these lost data.
Recover_Deleted_Data_Proc:
http://raresql.wordpress.com/2011/10/
Recover_Truncated_Data_Proc
http://raresql.wordpress.com/2012/04/08/how-to-recover-truncated-data-from-sql-server-without-backup/
To get truncated data back in SQL Server 2014, you need to modify the code a little since the underneath data format changes.

In order to make these operations work, the codition is to be able to read intact log file.

In Full recover mode, most likely you have all your log chains since you don’t have a log backup. You can go ahead to try these two stored procedures posted by Mr. Imran.
If your database is in Simple Recover Mode, you may or may not be successful. If your log file is big enough and ther is no checkpoints run (before log clear),
you may be able to get your data back and you are lucky.

This excercise helps to get a peek inside SQL Server log file and to get a better understanding related to SQL Server log file management.
The normal route to make sure we can safeguard our data is to implement a robust disaster recovery solution with good backups in a non-broken chain.
It should be handy to get lost data back more efficiently.