3if ( !defined(
'MEDIAWIKI' ) ) {
7use \MediaWiki\MediaWikiServices;
38 $text = str_replace(
'.',
'.', $text );
55 function filter(
array $links,
Title $title =
null, $preventLog =
false, $mode =
'check' ) {
56 $statsd = MediaWikiServices::getInstance()->getStatsdDataFactory();
57 $cache = ObjectCache::getLocalClusterInstance();
74 sha1( implode(
"\n", $links ) ),
78 $knownNonMatchAsOf =
$cache->get( $key );
79 if ( $mode ===
'check' ) {
80 if ( $knownNonMatchAsOf ) {
81 $statsd->increment(
'spamblacklist.check-stash.hit' );
85 $statsd->increment(
'spamblacklist.check-stash.miss' );
87 } elseif ( $mode ===
'stash' ) {
88 if ( $knownNonMatchAsOf && ( time() - $knownNonMatchAsOf ) < self::STASH_AGE_DYING ) {
96 if ( count( $blacklists ) ) {
98 $newLinks = array_map( [ $this,
'antiSpoof' ], $links );
101 if ( $title !==
null ) {
103 $addedLinks = array_diff( $newLinks, $oldLinks );
106 $addedLinks = $newLinks;
109 wfDebugLog(
'SpamBlacklist',
"Old URLs: " . implode(
', ', $oldLinks ) );
110 wfDebugLog(
'SpamBlacklist',
"New URLs: " . implode(
', ', $newLinks ) );
111 wfDebugLog(
'SpamBlacklist',
"Added URLs: " . implode(
', ', $addedLinks ) );
113 if ( !$preventLog ) {
117 $links = implode(
"\n", $addedLinks );
119 # Strip whitelisted URLs from the match
120 if ( is_array( $whitelists ) ) {
121 wfDebugLog(
'SpamBlacklist',
"Excluding whitelisted URLs from " . count( $whitelists ) .
122 " regexes: " . implode(
', ', $whitelists ) .
"\n" );
123 foreach ( $whitelists as $regex ) {
125 $newLinks = preg_replace( $regex,
'', $links );
127 if ( is_string( $newLinks ) ) {
135 wfDebugLog(
'SpamBlacklist',
"Checking text against " . count( $blacklists ) .
136 " regexes: " . implode(
', ', $blacklists ) .
"\n" );
138 foreach ( $blacklists as $regex ) {
141 $check = ( preg_match_all( $regex, $links,
$matches ) > 0 );
148 $fullLineRegex = substr( $regex, 0, strrpos( $regex,
'/' ) ) .
'.*/Sim';
149 preg_match_all( $fullLineRegex, $links, $fullUrls );
150 $imploded = implode(
' ', $fullUrls[0] );
151 wfDebugLog(
'SpamBlacklistHit',
"$ip caught submitting spam: $imploded\n" );
152 if ( !$preventLog ) {
155 if ( $retVal ===
false ) {
158 $retVal = array_merge( $retVal, $fullUrls[1] );
161 if ( is_array( $retVal ) ) {
162 $retVal = array_unique( $retVal );
168 if ( $retVal ===
false ) {
170 $cache->set( $key, time(), self::STASH_TTL );
171 if ( $mode ===
'stash' ) {
172 $statsd->increment(
'spamblacklist.check-stash.store' );
180 global $wgSpamBlacklistEventLogging;
181 return $wgSpamBlacklistEventLogging && class_exists(
'EventLogging' );
196 $removedLinks = array_diff( $oldLinks, $newLinks );
197 foreach ( $addedLinks as $url ) {
201 foreach ( $removedLinks as $url ) {
220 'pageId' => $title->getArticleID(),
221 'pageNamespace' => $title->getNamespace(),
222 'userId' => $user->getId(),
223 'userText' => $user->getName(),
227 $this->urlChangeLog = [];
229 DeferredUpdates::addCallableUpdate(
function () use ( $changes, $baseInfo ) {
230 foreach ( $changes as $change ) {
231 EventLogging::logEvent(
232 'ExternalLinksChange',
248 if ( !isset( $parsed[
'host'] ) ) {
249 wfDebugLog(
'SpamBlacklist',
"Unable to parse $url" );
254 'protocol' => $parsed[
'scheme'],
255 'domain' => $parsed[
'host'],
256 'path' => isset( $parsed[
'path'] ) ? $parsed[
'path'] :
'',
257 'query' => isset( $parsed[
'query'] ) ? $parsed[
'query'] :
'',
258 'fragment' => isset( $parsed[
'fragment'] ) ? $parsed[
'fragment'] :
'',
261 $this->urlChangeLog[] = $info;
273 $cache = ObjectCache::getMainWANInstance();
275 return $cache->getWithSetCallback(
277 $cache->makeKey(
'external-link-list', $title->getLatestRevID() ),
281 $setOpts += Database::getCacheSetOptions(
$dbr );
283 return $dbr->selectFieldValues(
286 [
'el_from' => $title->getArticleID() ],
294 $this->
filter( $entries, $title,
true ,
'stash' );
303 return '/(?:https?:)?\/\/+[a-z0-9_\-.]*(';
313 return ')' . parent::getRegexEnd( $batchSize );
323 global $wgUser, $wgLogSpamBlacklistHits;
324 if ( $wgLogSpamBlacklistHits ) {
326 $logEntry->setPerformer( $wgUser );
327 $logEntry->setTarget( $title );
328 $logEntry->setParameters( [
331 $logid = $logEntry->insert();
332 $log =
new LogPage(
'spamblacklist' );
333 if ( $log->isRestricted() ) {
337 && class_exists(
'CheckUserHooks' )
339 $rc = $logEntry->getRecentChange( $logid );
340 CheckUserHooks::updateCheckUserData( $rc );
345 $logEntry->publish( $logid,
"rc" );
wfParseUrl( $url)
parse_url() work-alike, but non-broken.
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
wfSuppressWarnings( $end=false)
Reference-counted warning suppression.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
if(defined( 'MW_SETUP_CALLBACK')) $fname
Customization point after all loading (constants, functions, classes, DefaultSettings,...
if(! $wgDBerrorLogTZ) $wgRequest
Base class for different kinds of blacklists.
getBlacklists()
Fetch local and (possibly cached) remote blacklists.
getWhitelists()
Returns the (local) whitelist.
Class to simplify the use of log pages.
Class for creating new log entries and inserting them into the database.
logUrlChange( $url, $action)
Queue log data about change for a url addition or removal.
getCurrentLinks(Title $title)
Look up the links currently in the article, so we can ignore them on a second run.
filter(array $links, Title $title=null, $preventLog=false, $mode='check')
doLogging(User $user, Title $title, $revId)
Actually push the url change events post-save.
getRegexStart()
Returns the start of the regex for matches.
warmCachesForFilter(Title $title, array $entries)
logFilterHit( $title, $url)
Logs the filter hit to Special:Log if $wgLogSpamBlacklistHits is enabled.
logUrlChanges( $oldLinks, $newLinks, $addedLinks)
Diff added/removed urls and generate events for them.
getBlacklistType()
Returns the code for the blacklist implementation.
antiSpoof( $text)
Apply some basic anti-spoofing to the links before they get filtered, see.
array[] $urlChangeLog
Changes to external links, for logging purposes.
getRegexEnd( $batchSize)
Returns the end of the regex for matches.
Represents a title within MediaWiki.
The User object encapsulates all of the user-specific settings (user_id, name, rights,...
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback function
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))