1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
|
# File 'modules/snapshot/manifests/systemdjobs/cirrussearch.pp', line 1
class snapshot::systemdjobs::cirrussearch(
$user = undef,
$filesonly = false,
) {
$confsdir = $snapshot::dumps::dirs::confsdir
$apachedir = $snapshot::dumps::dirs::apachedir
file { '/var/log/cirrusdump':
ensure => 'directory',
mode => '0644',
owner => $user,
}
$scriptpath = '/usr/local/bin/dumpcirrussearch.sh'
file { $scriptpath:
mode => '0755',
owner => 'root',
group => 'root',
source => 'puppet:///modules/snapshot/systemdjobs/dumpcirrussearch.sh',
}
if !$filesonly {
# The dumps take quite some time to complete. Split the dump up into
# one process per dbshard. The dumps don't have anything to do with db
# shards, but this is a convenient split of wikis with small wikis
# grouped together and large wikis separated out. Shards 9 and 10 do
# not exist (as of nov 2022).
(range(1, 8) + [11]).each |$shard| {
$dblist = "${apachedir}/dblists/s${shard}.dblist"
systemd::timer::job { "cirrussearch-dump-s${shard}":
ensure => present,
description => 'Regular jobs to build snapshot of cirrus search',
user => $user,
monitoring_enabled => false,
send_mail => true,
environment => {'MAILTO' => 'ops-dumps@wikimedia.org'},
command => "${scriptpath} --config ${confsdir}/wikidump.conf.other --dblist ${dblist}",
interval => {'start' => 'OnCalendar', 'interval' => 'Mon *-*-* 16:15:0'},
require => [ File[$scriptpath], Class['snapshot::dumps::dirs'] ],
}
}
# Cleanup historical non-sharded dump
systemd::timer::job { 'cirrussearch-dump':
ensure => absent,
description => 'Regular jobs to build snapshot of cirrus search',
user => $user,
command => "${scriptpath} --config ${confsdir}/wikidump.conf.other",
interval => {'start' => 'OnCalendar', 'interval' => 'Mon *-*-* 16:15:0'},
}
}
}
|