Base-64 conversion in SQL Server

create function dbo.ConvertFromBase64String (@Base64Value varchar(max))
returns varbinary(max)
as
begin
    return cast('' as xml).value('xs:base64Binary(sql:variable("@Base64Value"))', 'varbinary(max)');
end;
go

create function dbo.ConvertToBase64String (@BinaryValue varbinary(max))
returns varchar(max)
as
begin
    return cast('' as xml).value('xs:base64Binary(sql:variable("@BinaryValue"))','varchar(max)');
end;
go
declare @b varbinary(max) = cast('hello' as varbinary(max));
select @b; -- 0x68656C6C6F

declare @b64 varchar(max);
select @b64 = dbo.ConvertToBase64String(@b);
select @b64; -- aGVsbG8=

declare @result varbinary(max);
select @result = dbo.ConvertFromBase64String(@b64);
select @result; -- 0x68656C6C6F

declare @t varchar(max);
select @t = cast(@result as varchar(max));
select @t; -- hello

GUID nonclustered index

Need a globally unique key for a table, but don’t want to risk performance issues arising from a random clustered index? And you don’t want to deal with similar keys by using newsequentialid?

The solution is to make a clustered index out of a regular identity integer, and use a GUID as a nonclustered primary key:

/*
Identity is the clustered index, but not the primary key.

Primary key has a non-clustered index.

The records are physically inserted in the database in order by the clustered key, but the primary key as a random GUID is still indexed.
*/

create table dbo.SomeTable
(
    CX int not null identity
    ,SomeTableKey uniqueidentifier not null
    ,...
);
go
alter table dbo.SomeTable
add constraint PK_SomeTable primary key nonclustered (SomeTableKey);
go
create unique clustered index CIX_SomeTable on dbo.SomeTable (CX);
go
alter table dbo.SomeTable
add constraint DF_SomeTable_Key default (newid()) for SomeTableKey;
go

View Job History in SQL Server

create procedure util.ViewJobHistory
(
    @ExecutionDate date = null
    ,@JobName sysname = null
)
as
begin
    declare @executionDateInt int;
    set @executionDateInt = year(@ExecutionDate) * 10000
        + month(@ExecutionDate) * 100
        + day(@ExecutionDate);

    select
        JobName
        ,JobSuccess
        ,RunDateFormatted + ' ' 
            + left(RunTimePadded, 2)
            + ':' + substring(RunTimePadded, 3, 2)
            + ':' + right(RunTimePadded, 2) [RunDateTime]
        ,DurationSeconds + (60 * DurationMinutes) + (60 * 60 * DurationHours) [TotalDurationSeconds]
    from
    (
        select
            sj.name [JobName]
            ,case sjh.run_status
                when 1 then 'Success'
                else 'FAIL'
            end [JobSuccess]
            ,left(cast(sjh.run_date as varchar(8)), 4)
                + '-'
                + substring(cast(sjh.run_date as varchar(8)), 5, 2)
                + '-'
                + right(cast(sjh.run_date as varchar(8)), 2) [RunDateFormatted]
            ,right('000000' + cast(sjh.run_time as varchar(6)), 6) [RunTimePadded]
            ,sjh.run_duration / 10000 [DurationHours]
            ,(sjh.run_duration - (sjh.run_duration / 10000 * 10000)) / 100 [DurationMinutes]
            ,(sjh.run_duration - (sjh.run_duration / 100 * 100)) [DurationSeconds]
        from msdb..sysjobs sj
        join msdb..sysjobhistory sjh on sj.job_id = sjh.job_id
        where isnull(@executionDateInt, sjh.run_date) = sjh.run_date
        and isnull(@JobName, sj.name) = sj.name
        and sjh.step_id = 0
    ) x
end;
go

SQL Server Flatten and Unflatten

Assuming you have a table type named SingleColumnText which contains (obviously) a single column of text, named [TextValue]:

create function util.Flatten
(
    @input util.SingleColumnText readonly
    ,@delimiter nvarchar(max)
)
returns nvarchar(max)
as
begin
    declare @result nvarchar(max);
    select @result = coalesce(@result + @delimiter, '') + TextValue
    from @input
    order by PK;

    return @result;
end;
create function util.Unflatten
(
    @input nvarchar(max)
    ,@delimiter nchar(1)
)
returns table
as
    return
    (
        select
            row_number() over (order by n) - 1 [Idx]
            ,substring(@input, n, charindex(@delimiter, @input + @delimiter, n) - n) [TextValue]
        from util.Numbers
        where n <= len(@input) 
        and substring(@delimiter + @input, n, 1) = @delimiter
    );
go

Read/write large blob to SQL Server from C#

Rather than try to make it happen in one big command, here’s breaking it out into many commands. Whether or not you use the transaction is up to you and your use case – you could always have a “Completed” column and only set that to true after success, if you wanted to skip a transaction.

First, insert a record, leaving the blob column as an empty byte array, making sure you have access to the primary key of the record you just inserted:

using var conn = new SqlConnection(_config.GetConnectionString("MyDB"));
await conn.OpenAsync().ConfigureAwait(false);
using var tran = (SqlTransaction)await conn.BeginTransactionAsync();
int fileID;
using (var comm = conn.CreateCommand())
{
    comm.Transaction = tran;
    comm.CommandText = @"
        insert dbo.Files (FileName, FileContents)
        values (@FileName, 0x);
        select scope_identity();
    ";
    comm.Parameters.AddWithValue("@FileName", fileName);
    fileID = (int)(await comm.ExecuteScalarAsync().ConfigureAwait(false));
}

Then, in the same connection, write the bytes, 8000 at a time, appending the blob column, and commit the transaction once they’re all done. The “set FileContents.Write” function is a little clumsy, but it works:

using var fileStream = System.IO.File.OpenRead(file);
byte[] buffer = new byte[8000];
int count;
while ((count = fileStream.Read(buffer, 0, buffer.Length)) > 0)
{
    byte[] tmp = new byte[count];
    Array.Copy(buffer, 0, tmp, 0, count);
    using var comm = conn.CreateCommand();
    comm.Transaction = tran;
    comm.CommandText = "update dbo.Files set FileContents.write(@ContentChunk, null, null) where FileID = @FileID";
    comm.Parameters.AddWithValue("@FileID", fileID);
    comm.Parameters.AddWithValue("@ContentChunk", tmp);
    await comm.ExecuteNonQueryAsync();
}
await tran.CommitAsync();

To read the data, use the substring function:

private async IAsyncEnumerable<byte[]> ReadFileAsync(int fileID)
{
    int startingByte = 1;
    while (true)
    {
        byte[] bytes;
        using var conn = new SqlConnection(_config.GetConnectionString("MyDB"));
        await conn.OpenAsync().ConfigureAwait(false);
        using var comm = conn.CreateCommand();
        comm.CommandText = @"
            select substring(FileContents, @StartingByte, 8000) [FileContents]
            from dbo.Files
            where FileID = @FileID;
        ";
        comm.Parameters.Add(new SqlParameter("@FileID", SqlDbType.Int) { Value = fileID });
        comm.Parameters.Add(new SqlParameter("@StartingByte", SqlDbType.Int) { Value = startingByte });
        using var rdr = await comm.ExecuteReaderAsync().ConfigureAwait(false);
        if (!await rdr.ReadAsync().ConfigureAwait(false))
        {
            break;
        }
        bytes = (byte[])rdr[0];
        if (bytes == null || bytes.Length == 0)
        {
            break;
        }
        startingByte += bytes.Length;
        yield return bytes;
    }
}


await foreach (var byteArray in ReadImportFileAsync(importBatchID))
{
    fileWriter.Write(byteArray, 0, byteArray.Length);
}

Select large text blocks in SSMS

If you have a large string, possibly with line breaks, it may be difficult to read this in SSMS – there are ways to do it, or you can do it outside the editor, but if you’re looking for something quick and dirty to let you analyze text, throw your text into a variable and call the following – the result will be a clickable XML value, with the text inside a CDATA element.

create procedure util.SelectStringAsCdata
(
    @text varchar(max)
)
as
begin
    -- This creates a clickable XML result which opens a new tab in SSMS, allowing you to see
    -- a large text value. PRINT and RAISERROR have limitations, but this will show any
    -- size result.

    declare @crlf char(2) = char(13) + char(10);
    select 1 [TAG], 0 [PARENT], @crlf + @text + @crlf [root!1!!CDATA] for xml explicit;
end;
go

I was hoping to build a function that returns XML, so you could select text like this in a regular query, but apparently SQL Server does not keep CDATA in XML values.

SQL Server Date/Time Formatting

Here are my favorite date formats in SQL Server. These work with older versions, in case you’re not able to use the new format feature:

declare @theDate datetime = getdate();

select
    -- yyyy-MM-dd
    convert(char(10), @theDate, 126) 

    -- MM/dd/yyyy
    ,convert(char(10), @theDate, 101)

    -- yyyy-MM-dd HH:mm:ss
    ,convert(char(19), @theDate, 120)

    -- yyyy-MM-dd HH:mm:ss.fff 
    ,convert(char(23), @theDate, 121)

    -- yyyy-MM-ddTHH:mm:ss.fff (ISO8601)
    ,convert(char(23), @theDate, 126)

    -- MMM _d yyyy _h:mm:ss.fff 
    --(if day/hour < 10, space is inserted so len is always 26)
    ,convert(char(26), @theDate, 109)