39 $this->urls = $urlArr;
44 Assert::parameterType( __CLASS__, $update,
'$update' );
46 $this->urls = array_merge( $this->urls, $update->urls );
59 $urlArr = array_merge( $urlArr, $title->getCdnUrls() );
80 self::purge( $this->urls );
82 if ( $wgCdnReboundPurgeDelay > 0 ) {
86 'urls' => $this->urls,
87 'jobReleaseTimestamp' => time() + $wgCdnReboundPurgeDelay
108 $urlArr = array_unique( $urlArr );
110 wfDebugLog(
'squid', __METHOD__ .
': ' . implode(
' ', $urlArr ) );
113 $relayer = MediaWikiServices::getInstance()->getEventRelayerGroup()
114 ->getRelayer(
'cdn-url-purges' );
115 $ts = microtime(
true );
116 $relayer->notifyMulti(
119 function ( $url )
use ( $ts ) {
130 if ( $wgHTCPRouting ) {
131 self::HTCPPurge( $urlArr );
135 if ( $wgSquidServers ) {
137 $maxSocketsPerSquid = 8;
140 $urlsPerSocket = 400;
141 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
142 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
143 $socketsPerSquid = $maxSocketsPerSquid;
147 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
148 foreach ( $wgSquidServers
as $server ) {
149 foreach ( $chunks
as $chunk ) {
151 foreach ( $chunk
as $url ) {
152 $client->queuePurge( $url );
154 $pool->addClient( $client );
175 if ( !defined(
"IPPROTO_IP" ) ) {
176 define(
"IPPROTO_IP", 0 );
177 define(
"IP_MULTICAST_LOOP", 34 );
178 define(
"IP_MULTICAST_TTL", 33 );
182 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
184 $errstr = socket_strerror( socket_last_error() );
186 ": Error opening UDP socket: $errstr" );
192 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
193 if ( $wgHTCPMulticastTTL != 1 ) {
195 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
196 $wgHTCPMulticastTTL );
204 foreach ( $urlArr
as $url ) {
205 if ( !is_string( $url ) ) {
208 $url = self::expand( $url );
209 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
212 "No HTCP rule configured for URL {$url} , skipping" );
216 if ( isset( $conf[
'host'] ) && isset( $conf[
'port'] ) ) {
220 foreach ( $conf
as $subconf ) {
221 if ( !isset( $subconf[
'host'] ) || !isset( $subconf[
'port'] ) ) {
222 throw new MWException(
"Invalid HTCP rule for URL $url\n" );
229 $htcpTransID = current( $ids );
232 $htcpSpecifier = pack(
'na4na*na8n',
233 4,
'HEAD', strlen( $url ), $url,
236 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
237 $htcpLen = 4 + $htcpDataLen + 2;
242 $htcpPacket = pack(
'nxxnCxNxxa*n',
243 $htcpLen, $htcpDataLen, $htcpOpCLR,
244 $htcpTransID, $htcpSpecifier, 2 );
247 "Purging URL $url via HTCP" );
248 foreach ( $conf
as $subconf ) {
249 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
250 $subconf[
'host'], $subconf[
'port'] );
280 foreach ( $rules
as $regex => $routing ) {
281 if ( $regex ===
'' || preg_match( $regex, $url ) ) {
the array() calling protocol came about after MediaWiki 1.4rc1.
static HTCPPurge(array $urlArr)
Send Hyper Text Caching Protocol (HTCP) CLR requests.
$wgSquidServers
List of proxy servers to purge on changes; default port is 80.
static purge(array $urlArr)
Purges a list of CDN nodes defined in $wgSquidServers.
Interface that deferrable updates should implement.
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
static newSequentialPerNodeIDs($bucket, $bits, $count, $flags=0)
Return IDs that are sequential only for this node and bucket.
static newSimplePurge(Title $title)
Handles purging appropriate CDN URLs given a title (or titles)
Interface that deferrable updates can implement.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency MediaWikiServices
when a variable name is used in a it is silently declared as a new local masking the global
wfExpandUrl($url, $defaultProto=PROTO_CURRENT)
Expand a potentially local URL to a fully-qualified URL.
static getRuleForURL($url, $rules)
Find the HTCP routing rule to use for a given URL.
wfDebugLog($logGroup, $text, $dest= 'all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not...
string[] $urls
Collection of URLs to purge.
An HTTP 1.0 client built for the purposes of purging Squid and Varnish.
doUpdate()
Purges the list of URLs passed to the constructor.
getCdnUrls()
Get a list of URLs to purge from the CDN cache when this page changes.
merge(MergeableUpdate $update)
Merge this update with $update.
__construct(array $urlArr)
namespace and then decline to actually register it file or subcat img or subcat $title
static expand($url)
Expand local URLs to fully-qualified URLs using the internal protocol and host defined in $wgInternal...
$wgHTCPMulticastTTL
HTCP multicast TTL.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
static singleton($wiki=false)
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
linkcache txt The LinkCache class maintains a list of article titles and the information about whether or not the article exists in the database This is used to mark up links when displaying a page If the same link appears more than once on any page then it only has to be looked up once In most cases link lookups are done in batches with the LinkBatch class or the equivalent in so the link cache is mostly useful for short snippets of parsed and for links in the navigation areas of the skin The link cache was formerly used to track links used in a document for the purposes of updating the link tables This application is now deprecated To create a you can use the following $titles
$wgHTCPRouting
Routing configuration for HTCP multicast purging.
static makeTitle($ns, $title, $fragment= '', $interwiki= '')
Create a new Title from a namespace index and a DB key.
Job to purge a set of URLs from CDN.
static newFromTitles($titles, $urlArr=[])
Create an update object from an array of Title objects, or a TitleArray object.
$wgCdnReboundPurgeDelay
If set, any SquidPurge call on a URL or URLs will send a second purge no less than this many seconds ...