As Harry mentioned in the comments, one way of doing this is to expand single row with the date ranges into multiple rows, each representing a value at the desired level of granularity (hour, minute, etc.). This is all done because SQL Server is not really efficient when working with ranges and also, transaction data may extend over multiple days.
Following example expands data into minute level granularity and gives desired result. Keep in mind that I spent no time in trying to optimize the code, so there is definitely room for improvement:
-- Input
;with PeakHours as (
select 1 as id, '05:00' as [start], '09:00' as [end]
union all
select 2 as id, '12:00' as [start], '15:00' as [end]
union all
select 3 as id, '17:00' as [start], '22:00' as [end]
)
, data as (
select 1 as id, '2019-05-07 04:00' as started_at, '2019-05-07 16:00' as completed_at
)
-- Convert start and end to UNIX to be able to get ranges
, data2 as (
select *
,DATEDIFF(s, '1970-01-01', started_at) as started_at_unix
,DATEDIFF(s, '1970-01-01', completed_at) as completed_at_unix
from data
)
-- Find min start and max end to cover whole possible range
, data3 as (
select min(started_at_unix) as min_started_at_unix, max(completed_at_unix) as max_completed_at_unix
from data2
)
-- expand data using Tally table technique
,lv0 AS (SELECT 0 g UNION ALL SELECT 0)
,lv1 AS (SELECT 0 g FROM lv0 a CROSS JOIN lv0 b)
,lv2 AS (SELECT 0 g FROM lv1 a CROSS JOIN lv1 b)
,lv3 AS (SELECT 0 g FROM lv2 a CROSS JOIN lv2 b)
,lv4 AS (SELECT 0 g FROM lv3 a CROSS JOIN lv3 b)
,lv5 AS (SELECT 0 g FROM lv4 a CROSS JOIN lv4 b)
,Tally (n) AS (SELECT ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) FROM lv5)
, data_expanded as (
SELECT TOP (select (max_completed_at_unix - min_started_at_unix) / 60 from data3) (n - 1) * 60 + d3.min_started_at_unix as unix_timestamp_min
from Tally as t
cross apply data3 as d3
)
-- Aggregate
select
1.0 * sum(case when ph.id is not null then 1 else 0 end) / 60 as peak_hours_total
,1.0 * sum(case when ph.id is null then 1 else 0 end) / 60 as non_peak_hours_total
from data_expanded as de
inner join data2 as d2
on de.unix_timestamp_min between d2.started_at_unix and d2.completed_at_unix
left join PeakHours as ph
on cast(dateadd(s, de.unix_timestamp_min, '1970-01-01') as time(0)) between ph.[start] and dateadd(SECOND, -1, cast(ph.[end] as time(0)))