Search

Search IconIcon to open search

Wikipedia Data sets

Last updated by Simon Späti

from Wikimedia Downloads: Analytics, Pageviews: statistics compiled using the current  Pageview Definition. Available as:

# Others

# Downloading

# Querying with DuckDB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
-- This is the actual public data format
describe
FROM read_csv('https://dumps.wikimedia.org/other/pageviews/2025/2025-10/pageviews-20251001-000000.gz',
    delim=' ',
    header=false,
    compression='gzip')
LIMIT 10;


--auto detection:
SELECT *
FROM read_csv(
    'https://dumps.wikimedia.org/other/pageviews/2025/2025-10/pageviews-20251001-000000.gz',
    delim=' ',
    header=false,
    compression='gzip',
    all_varchar=true  -- Initially read all as text to see the raw data
)
LIMIT 20;

--manually
-- When using the columns parameter, use the names you defined
SELECT
    project,
    page_title,
    view_count,
    bytes_served
FROM read_csv(
    'https://dumps.wikimedia.org/other/pageviews/2025/2025-10/pageviews-20251001-000000.gz',
    delim=' ',
    header=false,
    compression='gzip',
    columns={
        'project': 'VARCHAR',
        'page_title': 'VARCHAR',
        'view_count': 'BIGINT',
        'bytes_served': 'BIGINT'
    }
)
WHERE project IS NOT NULL
LIMIT 10;


---predicate pushdown

SELECT
    page_title,
    view_count,
    bytes_served
FROM read_csv(
    'https://dumps.wikimedia.org/other/pageviews/2025/2025-10/pageviews-20251001-000000.gz',
    delim=' ',
    header=false,
    compression='gzip',
    columns={
        'project': 'VARCHAR',
        'page_title': 'VARCHAR',
        'view_count': 'BIGINT',
        'bytes_served': 'BIGINT'
    }
)
WHERE page_title IN (
    'DuckDB',
    'Apache_Spark',
    'Apache_Kafka',
    'Data_engineering',
    'Extract,_transform,_load',
    'ClickHouse',
    'Snowflake_(company)',
    'Apache_Airflow',
    'Databricks',
    'Data_warehouse',
    'Data_lake',
    'dbt_(software)',
    'Apache_Flink',
    'PostgreSQL',
    'BigQuery'
)
LIMIT 1000;

# ClickHouse way

From WikiStat | ClickHouse Docs:

The dataset contains 0.5 trillion records.

Getting the list of links:

1
2
3
4
5
6
7
for i in {2015..2023}; do
  for j in {01..12}; do
    echo "${i}-${j}" >&2
    curl -sSL "https://dumps.wikimedia.org/other/pageviews/$i/$i-$j/" \
      | grep -oE 'pageviews-[0-9]+-[0-9]+\.gz'
  done
done | sort | uniq | tee links.txt

How to loop Shell and get a list from urls YEARS-MONTH

Downloading the data:

1
2
sed -r 's!pageviews-([0-9]{4})([0-9]{2})[0-9]{2}-[0-9]+\.gz!https://dumps.wikimedia.org/other/pageviews/\1/\1-\2/\0!' \
  links.txt | xargs -P3 wget --continue

(it will take about 3 days)

Creating a table:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
CREATE TABLE wikistat
(
    time DateTime CODEC(Delta, ZSTD(3)),
    project LowCardinality(String),
    subproject LowCardinality(String),
    path String CODEC(ZSTD(3)),
    hits UInt64 CODEC(ZSTD(3))
)
ENGINE = MergeTree
ORDER BY (path, time);

Loading the data:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
clickhouse-local --query "
  WITH replaceRegexpOne(_path, '^.+pageviews-(\\d{4})(\\d{2})(\\d{2})-(\\d{2})(\\d{2})(\\d{2}).gz$', '\1-\2-\3 \4-\5-\6')::DateTime AS time, 
       extractGroups(line, '^([^ \\.]+)(\\.[^ ]+)? +([^ ]+) +(\\d+) +(\\d+)$') AS values
  SELECT 
    time, 
    values[1] AS project,
    values[2] AS subproject,
    values[3] AS path,
    (values[4])::UInt64 AS hits
  FROM file('pageviews*.gz', LineAsString)
  WHERE length(values) = 5 FORMAT Native
" | clickhouse-client --query "INSERT INTO wikistat FORMAT Native"

Or loading the cleaning data:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
INSERT INTO wikistat WITH
    parseDateTimeBestEffort(extract(_file, '^pageviews-([\\d\\-]+)\\.gz$')) AS time,
    splitByChar(' ', line) AS values,
    splitByChar('.', values[1]) AS projects
SELECT
    time,
    projects[1] AS project,
    projects[2] AS subproject,
    decodeURLComponent(values[2]) AS path,
    CAST(values[3], 'UInt64') AS hits
FROM s3(
    'https://clickhouse-public-datasets.s3.amazonaws.com/wikistat/original/pageviews*.gz',
    LineAsString)
WHERE length(values) >= 3

Origin: Find good Data Sets or Sources
References:
Created 2025-11-06