· 6 years ago · Jun 16, 2019, 02:02 PM
1DROP TABLE IF EXISTS #BIG_TABLE;
2
3CREATE TABLE #BIG_TABLE (
4 ID BIGINT NOT NULL,
5 FOR_U NVARCHAR(MAX),
6 PRIMARY KEY (ID)
7);
8
9DECLARE @big_string NVARCHAR(MAX) = REPLICATE(CAST(N'Z' AS VARCHAR(MAX)), 200000);
10
11INSERT INTO #BIG_TABLE WITH (TABLOCK)
12SELECT TOP (250000) ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) RN, @big_string -- 100k is good
13FROM master..spt_values t1
14CROSS JOIN master..spt_values t2
15OPTION (MAXDOP 1);
16
17CHECKPOINT;
18DBCC DROPCLEANBUFFERS;
19
20
21SELECT COUNT_BIG(*) total_sampled_rows
22, COUNT_BIG(CASE WHEN LEN(FOR_U) > 8000 THEN 1 ELSE NULL END) sampled_rows_too_long
23FROM #BIG_TABLE TABLESAMPLE (1 PERCENT);
24
25DECLARE @start_id BIGINT = 1;
26DECLARE @end_id BIGINT = 500;
27
28DELETE FROM #BIG_TABLE
29WHERE ID BETWEEN @start_id AND @end_id;