MediaWiki REL1_30
CdnCacheUpdate.php
Go to the documentation of this file.
1<?php
24use Wikimedia\Assert\Assert;
26
33 protected $urls = [];
34
38 public function __construct( array $urlArr ) {
39 $this->urls = $urlArr;
40 }
41
42 public function merge( MergeableUpdate $update ) {
44 Assert::parameterType( __CLASS__, $update, '$update' );
45
46 $this->urls = array_merge( $this->urls, $update->urls );
47 }
48
56 public static function newFromTitles( $titles, $urlArr = [] ) {
57 ( new LinkBatch( $titles ) )->execute();
59 foreach ( $titles as $title ) {
60 $urlArr = array_merge( $urlArr, $title->getCdnUrls() );
61 }
62
63 return new CdnCacheUpdate( $urlArr );
64 }
65
71 public static function newSimplePurge( Title $title ) {
72 return new CdnCacheUpdate( $title->getCdnUrls() );
73 }
74
78 public function doUpdate() {
80
81 self::purge( $this->urls );
82
83 if ( $wgCdnReboundPurgeDelay > 0 ) {
85 Title::makeTitle( NS_SPECIAL, 'Badtitle/' . __CLASS__ ),
86 [
87 'urls' => $this->urls,
88 'jobReleaseTimestamp' => time() + $wgCdnReboundPurgeDelay
89 ]
90 ) );
91 }
92 }
93
101 public static function purge( array $urlArr ) {
103
104 if ( !$urlArr ) {
105 return;
106 }
107
108 // Remove duplicate URLs from list
109 $urlArr = array_unique( $urlArr );
110
111 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
112
113 // Reliably broadcast the purge to all edge nodes
114 $relayer = MediaWikiServices::getInstance()->getEventRelayerGroup()
115 ->getRelayer( 'cdn-url-purges' );
116 $ts = microtime( true );
117 $relayer->notifyMulti(
118 'cdn-url-purges',
119 array_map(
120 function ( $url ) use ( $ts ) {
121 return [
122 'url' => $url,
123 'timestamp' => $ts,
124 ];
125 },
126 $urlArr
127 )
128 );
129
130 // Send lossy UDP broadcasting if enabled
131 if ( $wgHTCPRouting ) {
132 self::HTCPPurge( $urlArr );
133 }
134
135 // Do direct server purges if enabled (this does not scale very well)
136 if ( $wgSquidServers ) {
137 // Maximum number of parallel connections per squid
138 $maxSocketsPerSquid = 8;
139 // Number of requests to send per socket
140 // 400 seems to be a good tradeoff, opening a socket takes a while
141 $urlsPerSocket = 400;
142 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
143 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
144 $socketsPerSquid = $maxSocketsPerSquid;
145 }
146
147 $pool = new SquidPurgeClientPool;
148 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
149 foreach ( $wgSquidServers as $server ) {
150 foreach ( $chunks as $chunk ) {
151 $client = new SquidPurgeClient( $server );
152 foreach ( $chunk as $url ) {
153 $client->queuePurge( $url );
154 }
155 $pool->addClient( $client );
156 }
157 }
158
159 $pool->run();
160 }
161 }
162
169 private static function HTCPPurge( array $urlArr ) {
171
172 // HTCP CLR operation
173 $htcpOpCLR = 4;
174
175 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
176 if ( !defined( "IPPROTO_IP" ) ) {
177 define( "IPPROTO_IP", 0 );
178 define( "IP_MULTICAST_LOOP", 34 );
179 define( "IP_MULTICAST_TTL", 33 );
180 }
181
182 // pfsockopen doesn't work because we need set_sock_opt
183 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
184 if ( !$conn ) {
185 $errstr = socket_strerror( socket_last_error() );
186 wfDebugLog( 'squid', __METHOD__ .
187 ": Error opening UDP socket: $errstr" );
188
189 return;
190 }
191
192 // Set socket options
193 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
194 if ( $wgHTCPMulticastTTL != 1 ) {
195 // Set multicast time to live (hop count) option on socket
196 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
198 }
199
200 // Get sequential trx IDs for packet loss counting
202 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
203 );
204
205 foreach ( $urlArr as $url ) {
206 if ( !is_string( $url ) ) {
207 throw new MWException( 'Bad purge URL' );
208 }
209 $url = self::expand( $url );
210 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
211 if ( !$conf ) {
212 wfDebugLog( 'squid', __METHOD__ .
213 "No HTCP rule configured for URL {$url} , skipping" );
214 continue;
215 }
216
217 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
218 // Normalize single entries
219 $conf = [ $conf ];
220 }
221 foreach ( $conf as $subconf ) {
222 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
223 throw new MWException( "Invalid HTCP rule for URL $url\n" );
224 }
225 }
226
227 // Construct a minimal HTCP request diagram
228 // as per RFC 2756
229 // Opcode 'CLR', no response desired, no auth
230 $htcpTransID = current( $ids );
231 next( $ids );
232
233 $htcpSpecifier = pack( 'na4na*na8n',
234 4, 'HEAD', strlen( $url ), $url,
235 8, 'HTTP/1.0', 0 );
236
237 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
238 $htcpLen = 4 + $htcpDataLen + 2;
239
240 // Note! Squid gets the bit order of the first
241 // word wrong, wrt the RFC. Apparently no other
242 // implementation exists, so adapt to Squid
243 $htcpPacket = pack( 'nxxnCxNxxa*n',
244 $htcpLen, $htcpDataLen, $htcpOpCLR,
245 $htcpTransID, $htcpSpecifier, 2 );
246
247 wfDebugLog( 'squid', __METHOD__ .
248 "Purging URL $url via HTCP" );
249 foreach ( $conf as $subconf ) {
250 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
251 $subconf['host'], $subconf['port'] );
252 }
253 }
254 }
255
270 public static function expand( $url ) {
271 return wfExpandUrl( $url, PROTO_INTERNAL );
272 }
273
280 private static function getRuleForURL( $url, $rules ) {
281 foreach ( $rules as $regex => $routing ) {
282 if ( $regex === '' || preg_match( $regex, $url ) ) {
283 return $routing;
284 }
285 }
286
287 return false;
288 }
289}
290
295 // Keep class name for b/c
296}
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
$wgCdnReboundPurgeDelay
If set, any SquidPurge call on a URL or URLs will send a second purge no less than this many seconds ...
$wgHTCPRouting
Routing configuration for HTCP multicast purging.
$wgSquidServers
List of proxy servers to purge on changes; default port is 80.
$wgHTCPMulticastTTL
HTCP multicast TTL.
wfExpandUrl( $url, $defaultProto=PROTO_CURRENT)
Expand a potentially local URL to a fully-qualified URL.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Handles purging appropriate CDN URLs given a title (or titles)
static newFromTitles( $titles, $urlArr=[])
Create an update object from an array of Title objects, or a TitleArray object.
__construct(array $urlArr)
static HTCPPurge(array $urlArr)
Send Hyper Text Caching Protocol (HTCP) CLR requests.
static getRuleForURL( $url, $rules)
Find the HTCP routing rule to use for a given URL.
string[] $urls
Collection of URLs to purge.
static purge(array $urlArr)
Purges a list of CDN nodes defined in $wgSquidServers.
doUpdate()
Purges the list of URLs passed to the constructor.
static expand( $url)
Expand local URLs to fully-qualified URLs using the internal protocol and host defined in $wgInternal...
static newSimplePurge(Title $title)
merge(MergeableUpdate $update)
Merge this update with $update.
Job to purge a set of URLs from CDN.
static singleton( $wiki=false)
Class representing a list of titles The execute() method checks them all for existence and adds them ...
Definition LinkBatch.php:34
MediaWiki exception.
MediaWikiServices is the service locator for the application scope of MediaWiki.
An HTTP 1.0 client built for the purposes of purging Squid and Varnish.
Represents a title within MediaWiki.
Definition Title.php:39
const QUICK_VOLATILE
static newSequentialPerNodeIDs( $bucket, $bits, $count, $flags=0)
Return IDs that are sequential only for this node and bucket.
when a variable name is used in a it is silently declared as a new local masking the global
Definition design.txt:95
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
const PROTO_INTERNAL
Definition Defines.php:225
const NS_SPECIAL
Definition Defines.php:54
the array() calling protocol came about after MediaWiki 1.4rc1.
namespace and then decline to actually register it file or subcat img or subcat $title
Definition hooks.txt:962
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
Interface that deferrable updates should implement.
Interface that deferrable updates can implement.
linkcache txt The LinkCache class maintains a list of article titles and the information about whether or not the article exists in the database This is used to mark up links when displaying a page If the same link appears more than once on any page then it only has to be looked up once In most cases link lookups are done in batches with the LinkBatch class or the equivalent in so the link cache is mostly useful for short snippets of parsed and for links in the navigation areas of the skin The link cache was formerly used to track links used in a document for the purposes of updating the link tables This application is now deprecated To create a you can use the following $titles
Definition linkcache.txt:17
$batch execute()