MediaWiki REL1_33
CdnCacheUpdate.php
Go to the documentation of this file.
1<?php
23use Wikimedia\Assert\Assert;
25
32 protected $urls = [];
33
37 public function __construct( array $urlArr ) {
38 $this->urls = $urlArr;
39 }
40
41 public function merge( MergeableUpdate $update ) {
43 Assert::parameterType( __CLASS__, $update, '$update' );
44
45 $this->urls = array_merge( $this->urls, $update->urls );
46 }
47
55 public static function newFromTitles( $titles, $urlArr = [] ) {
56 ( new LinkBatch( $titles ) )->execute();
58 foreach ( $titles as $title ) {
59 $urlArr = array_merge( $urlArr, $title->getCdnUrls() );
60 }
61
62 return new CdnCacheUpdate( $urlArr );
63 }
64
68 public function doUpdate() {
70
71 self::purge( $this->urls );
72
73 if ( $wgCdnReboundPurgeDelay > 0 ) {
74 JobQueueGroup::singleton()->lazyPush( new CdnPurgeJob(
75 Title::makeTitle( NS_SPECIAL, 'Badtitle/' . __CLASS__ ),
76 [
77 'urls' => $this->urls,
78 'jobReleaseTimestamp' => time() + $wgCdnReboundPurgeDelay
79 ]
80 ) );
81 }
82 }
83
91 public static function purge( array $urlArr ) {
93
94 if ( !$urlArr ) {
95 return;
96 }
97
98 // Remove duplicate URLs from list
99 $urlArr = array_unique( $urlArr );
100
101 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
102
103 // Reliably broadcast the purge to all edge nodes
104 $relayer = MediaWikiServices::getInstance()->getEventRelayerGroup()
105 ->getRelayer( 'cdn-url-purges' );
106 $ts = microtime( true );
107 $relayer->notifyMulti(
108 'cdn-url-purges',
109 array_map(
110 function ( $url ) use ( $ts ) {
111 return [
112 'url' => $url,
113 'timestamp' => $ts,
114 ];
115 },
116 $urlArr
117 )
118 );
119
120 // Send lossy UDP broadcasting if enabled
121 if ( $wgHTCPRouting ) {
122 self::HTCPPurge( $urlArr );
123 }
124
125 // Do direct server purges if enabled (this does not scale very well)
126 if ( $wgSquidServers ) {
127 // Maximum number of parallel connections per squid
128 $maxSocketsPerSquid = 8;
129 // Number of requests to send per socket
130 // 400 seems to be a good tradeoff, opening a socket takes a while
131 $urlsPerSocket = 400;
132 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
133 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
134 $socketsPerSquid = $maxSocketsPerSquid;
135 }
136
137 $pool = new SquidPurgeClientPool;
138 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
139 foreach ( $wgSquidServers as $server ) {
140 foreach ( $chunks as $chunk ) {
141 $client = new SquidPurgeClient( $server );
142 foreach ( $chunk as $url ) {
143 $client->queuePurge( $url );
144 }
145 $pool->addClient( $client );
146 }
147 }
148
149 $pool->run();
150 }
151 }
152
159 private static function HTCPPurge( array $urlArr ) {
161
162 // HTCP CLR operation
163 $htcpOpCLR = 4;
164
165 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
166 if ( !defined( "IPPROTO_IP" ) ) {
167 define( "IPPROTO_IP", 0 );
168 define( "IP_MULTICAST_LOOP", 34 );
169 define( "IP_MULTICAST_TTL", 33 );
170 }
171
172 // pfsockopen doesn't work because we need set_sock_opt
173 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
174 if ( !$conn ) {
175 $errstr = socket_strerror( socket_last_error() );
176 wfDebugLog( 'squid', __METHOD__ .
177 ": Error opening UDP socket: $errstr" );
178
179 return;
180 }
181
182 // Set socket options
183 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
184 if ( $wgHTCPMulticastTTL != 1 ) {
185 // Set multicast time to live (hop count) option on socket
186 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
188 }
189
190 // Get sequential trx IDs for packet loss counting
192 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
193 );
194
195 foreach ( $urlArr as $url ) {
196 if ( !is_string( $url ) ) {
197 throw new MWException( 'Bad purge URL' );
198 }
199 $url = self::expand( $url );
200 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
201 if ( !$conf ) {
202 wfDebugLog( 'squid', __METHOD__ .
203 "No HTCP rule configured for URL {$url} , skipping" );
204 continue;
205 }
206
207 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
208 // Normalize single entries
209 $conf = [ $conf ];
210 }
211 foreach ( $conf as $subconf ) {
212 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
213 throw new MWException( "Invalid HTCP rule for URL $url\n" );
214 }
215 }
216
217 // Construct a minimal HTCP request diagram
218 // as per RFC 2756
219 // Opcode 'CLR', no response desired, no auth
220 $htcpTransID = current( $ids );
221 next( $ids );
222
223 $htcpSpecifier = pack( 'na4na*na8n',
224 4, 'HEAD', strlen( $url ), $url,
225 8, 'HTTP/1.0', 0 );
226
227 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
228 $htcpLen = 4 + $htcpDataLen + 2;
229
230 // Note! Squid gets the bit order of the first
231 // word wrong, wrt the RFC. Apparently no other
232 // implementation exists, so adapt to Squid
233 $htcpPacket = pack( 'nxxnCxNxxa*n',
234 $htcpLen, $htcpDataLen, $htcpOpCLR,
235 $htcpTransID, $htcpSpecifier, 2 );
236
237 wfDebugLog( 'squid', __METHOD__ .
238 "Purging URL $url via HTCP" );
239 foreach ( $conf as $subconf ) {
240 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
241 $subconf['host'], $subconf['port'] );
242 }
243 }
244 }
245
260 public static function expand( $url ) {
261 return wfExpandUrl( $url, PROTO_INTERNAL );
262 }
263
270 private static function getRuleForURL( $url, $rules ) {
271 foreach ( $rules as $regex => $routing ) {
272 if ( $regex === '' || preg_match( $regex, $url ) ) {
273 return $routing;
274 }
275 }
276
277 return false;
278 }
279}
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
$wgCdnReboundPurgeDelay
If set, any SquidPurge call on a URL or URLs will send a second purge no less than this many seconds ...
$wgHTCPRouting
Routing configuration for HTCP multicast purging.
$wgSquidServers
List of proxy servers to purge on changes; default port is 80.
$wgHTCPMulticastTTL
HTCP multicast TTL.
wfExpandUrl( $url, $defaultProto=PROTO_CURRENT)
Expand a potentially local URL to a fully-qualified URL.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
Handles purging appropriate CDN URLs given a title (or titles)
static newFromTitles( $titles, $urlArr=[])
Create an update object from an array of Title objects, or a TitleArray object.
__construct(array $urlArr)
static HTCPPurge(array $urlArr)
Send Hyper Text Caching Protocol (HTCP) CLR requests.
static getRuleForURL( $url, $rules)
Find the HTCP routing rule to use for a given URL.
string[] $urls
Collection of URLs to purge.
static purge(array $urlArr)
Purges a list of CDN nodes defined in $wgSquidServers.
doUpdate()
Purges the list of URLs passed to the constructor.
static expand( $url)
Expand local URLs to fully-qualified URLs using the internal protocol and host defined in $wgInternal...
merge(MergeableUpdate $update)
Merge this update with $update.
Job to purge a set of URLs from CDN.
Class representing a list of titles The execute() method checks them all for existence and adds them ...
Definition LinkBatch.php:34
MediaWiki exception.
MediaWikiServices is the service locator for the application scope of MediaWiki.
An HTTP 1.0 client built for the purposes of purging Squid and Varnish.
const QUICK_VOLATILE
static newSequentialPerNodeIDs( $bucket, $bits, $count, $flags=0)
Return IDs that are sequential only for this node and bucket.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
const PROTO_INTERNAL
Definition Defines.php:233
const NS_SPECIAL
Definition Defines.php:62
namespace and then decline to actually register it file or subcat img or subcat $title
Definition hooks.txt:955
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
Definition injection.txt:37
Interface that deferrable updates should implement.
Interface that deferrable updates can implement to signal that updates can be combined.
linkcache txt The LinkCache class maintains a list of article titles and the information about whether or not the article exists in the database This is used to mark up links when displaying a page If the same link appears more than once on any page then it only has to be looked up once In most cases link lookups are done in batches with the LinkBatch class or the equivalent in so the link cache is mostly useful for short snippets of parsed and for links in the navigation areas of the skin The link cache was formerly used to track links used in a document for the purposes of updating the link tables This application is now deprecated To create a you can use the following $titles
Definition linkcache.txt:17
$batch execute()
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
In both all secondary updates will be triggered handle like object that caches derived data representing a and can trigger updates of cached copies of that e g in the links the and the CDN layer DerivedPageDataUpdater is used by PageUpdater when creating new but can also be used independently when performing meta data updates during or when puring a page It s a stepping stone on the way to a more complete refactoring of WikiPage we want to define interfaces for the different use cases of particularly providing access to post PST content and ParserOutput to callbacks during revision which currently use and allowing updates to be triggered on purge