Go to the documentation of this file.
3 if ( !defined(
'MEDIAWIKI' ) ) {
38 $text = str_replace(
'.',
'.', $text );
56 $statsd = MediaWikiServices::getInstance()->getStatsdDataFactory();
74 sha1( implode(
"\n", $links ) ),
78 $knownNonMatchAsOf =
$cache->get( $key );
79 if ( $mode ===
'check' ) {
80 if ( $knownNonMatchAsOf ) {
81 $statsd->increment(
'spamblacklist.check-stash.hit' );
85 $statsd->increment(
'spamblacklist.check-stash.miss' );
87 } elseif ( $mode ===
'stash' ) {
88 if ( $knownNonMatchAsOf && ( time() - $knownNonMatchAsOf ) < self::STASH_AGE_DYING ) {
96 if (
count( $blacklists ) ) {
98 $newLinks = array_map( [ $this,
'antiSpoof' ], $links );
103 $addedLinks = array_diff( $newLinks, $oldLinks );
106 $addedLinks = $newLinks;
109 wfDebugLog(
'SpamBlacklist',
"Old URLs: " . implode(
', ', $oldLinks ) );
110 wfDebugLog(
'SpamBlacklist',
"New URLs: " . implode(
', ', $newLinks ) );
111 wfDebugLog(
'SpamBlacklist',
"Added URLs: " . implode(
', ', $addedLinks ) );
113 if ( !$preventLog ) {
117 $links = implode(
"\n", $addedLinks );
119 # Strip whitelisted URLs from the match
120 if ( is_array( $whitelists ) ) {
121 wfDebugLog(
'SpamBlacklist',
"Excluding whitelisted URLs from " .
count( $whitelists ) .
122 " regexes: " . implode(
', ', $whitelists ) .
"\n" );
123 foreach ( $whitelists
as $regex ) {
124 Wikimedia\suppressWarnings();
125 $newLinks = preg_replace( $regex,
'', $links );
126 Wikimedia\restoreWarnings();
127 if ( is_string( $newLinks ) ) {
135 wfDebugLog(
'SpamBlacklist',
"Checking text against " .
count( $blacklists ) .
136 " regexes: " . implode(
', ', $blacklists ) .
"\n" );
138 foreach ( $blacklists
as $regex ) {
139 Wikimedia\suppressWarnings();
141 $check = ( preg_match_all( $regex, $links,
$matches ) > 0 );
142 Wikimedia\restoreWarnings();
148 $fullLineRegex = substr( $regex, 0, strrpos( $regex,
'/' ) ) .
'.*/Sim';
149 preg_match_all( $fullLineRegex, $links, $fullUrls );
150 $imploded = implode(
' ', $fullUrls[0] );
151 wfDebugLog(
'SpamBlacklistHit',
"$ip caught submitting spam: $imploded\n" );
152 if ( !$preventLog ) {
155 if ( $retVal ===
false ) {
158 $retVal = array_merge( $retVal, $fullUrls[1] );
161 if ( is_array( $retVal ) ) {
162 $retVal = array_unique( $retVal );
168 if ( $retVal ===
false ) {
170 $cache->set( $key, time(), self::STASH_TTL );
171 if ( $mode ===
'stash' ) {
172 $statsd->increment(
'spamblacklist.check-stash.store' );
180 global $wgSpamBlacklistEventLogging;
181 return $wgSpamBlacklistEventLogging &&
197 $removedLinks = array_diff( $oldLinks, $newLinks );
198 foreach ( $addedLinks
as $url ) {
202 foreach ( $removedLinks
as $url ) {
221 'pageId' =>
$title->getArticleID(),
222 'pageNamespace' =>
$title->getNamespace(),
223 'userId' =>
$user->getId(),
224 'userText' =>
$user->getName(),
228 $this->urlChangeLog = [];
231 foreach ( $changes
as $change ) {
232 EventLogging::logEvent(
233 'ExternalLinksChange',
249 if ( !isset( $parsed[
'host'] ) ) {
250 wfDebugLog(
'SpamBlacklist',
"Unable to parse $url" );
255 'protocol' => $parsed[
'scheme'],
256 'domain' => $parsed[
'host'],
257 'path' => $parsed[
'path'] ??
'',
258 'query' => $parsed[
'query'] ??
'',
259 'fragment' => $parsed[
'fragment'] ??
'',
262 $this->urlChangeLog[] = $info;
276 return $cache->getWithSetCallback(
278 $cache->makeKey(
'external-link-list',
$title->getLatestRevID() ),
282 $setOpts += Database::getCacheSetOptions(
$dbr );
284 return $dbr->selectFieldValues(
287 [
'el_from' =>
$title->getArticleID() ],
304 return '/(?:https?:)?\/\/+[a-z0-9_\-.]*(';
314 return ')' . parent::getRegexEnd( $batchSize );
324 global $wgUser, $wgLogSpamBlacklistHits;
325 if ( $wgLogSpamBlacklistHits ) {
327 $logEntry->setPerformer( $wgUser );
328 $logEntry->setTarget(
$title );
329 $logEntry->setParameters( [
332 $logid = $logEntry->insert();
333 $log =
new LogPage(
'spamblacklist' );
334 if ( $log->isRestricted() ) {
340 $rc = $logEntry->getRecentChange( $logid );
341 CheckUserHooks::updateCheckUserData( $rc );
346 $logEntry->publish( $logid,
"rc" );
return true to allow those checks to and false if checking is done & $user
logUrlChange( $url, $action)
Queue log data about change for a url addition or removal.
getRegexStart()
Returns the start of the regex for matches.
static getLocalClusterInstance()
Get the main cluster-local cache object.
getBlacklists()
Fetch local and (possibly cached) remote blacklists.
getCurrentLinks(Title $title)
Look up the links currently in the article, so we can ignore them on a second run.
wfDebugLog( $logGroup, $text, $dest='all', array $context=[])
Send a line to a supplementary debug log file, if configured, or main debug log if not.
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency which acts as the top level factory for services in MediaWiki which can be used to gain access to default instances of various services MediaWikiServices however also allows new services to be defined and default services to be redefined Services are defined or redefined by providing a callback the instantiator that will return a new instance of the service When it will create an instance of MediaWikiServices and populate it with the services defined in the files listed by thereby bootstrapping the DI framework Per $wgServiceWiringFiles lists includes ServiceWiring php
getRegexEnd( $batchSize)
Returns the end of the regex for matches.
wfParseUrl( $url)
parse_url() work-alike, but non-broken.
logFilterHit( $title, $url)
Logs the filter hit to Special:Log if $wgLogSpamBlacklistHits is enabled.
namespace and then decline to actually register it file or subcat img or subcat $title
wfGetDB( $db, $groups=[], $wiki=false)
Get a Database object.
Base class for different kinds of blacklists.
Class to simplify the use of log pages.
warmCachesForFilter(Title $title, array $entries)
as see the revision history and available at free of to any person obtaining a copy of this software and associated documentation to deal in the Software without including without limitation the rights to use
array[] $urlChangeLog
Changes to external links, for logging purposes.
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))
if(defined( 'MW_SETUP_CALLBACK')) $fname
Customization point after all loading (constants, functions, classes, DefaultSettings,...
logUrlChanges( $oldLinks, $newLinks, $addedLinks)
Diff added/removed urls and generate events for them.
filter(array $links, Title $title=null, $preventLog=false, $mode='check')
Represents a title within MediaWiki.
static getMainWANInstance()
Get the main WAN cache object.
getBlacklistType()
Returns the code for the blacklist implementation.
This document is intended to provide useful advice for parties seeking to redistribute MediaWiki to end users It s targeted particularly at maintainers for Linux since it s been observed that distribution packages of MediaWiki often break We ve consistently had to recommend that users seeking support use official tarballs instead of their distribution s and this often solves whatever problem the user is having It would be nice if this could such as
Class for creating new log entries and inserting them into the database.
you have access to all of the normal MediaWiki so you can get a DB use the etc For full docs on the Maintenance class
if(! $wgDBerrorLogTZ) $wgRequest
injection txt This is an overview of how MediaWiki makes use of dependency injection The design described here grew from the discussion of RFC T384 The term dependency this means that anything an object needs to operate should be injected from the the object itself should only know narrow no concrete implementation of the logic it relies on The requirement to inject everything typically results in an architecture that based on two main types of and essentially stateless service objects that use other service objects to operate on the value objects As of the beginning MediaWiki is only starting to use the DI approach Much of the code still relies on global state or direct resulting in a highly cyclical dependency MediaWikiServices
The User object encapsulates all of the user-specific settings (user_id, name, rights,...
static addCallableUpdate( $callable, $stage=self::POSTSEND, $dbw=null)
Add a callable update.
getWhitelists()
Returns the (local) whitelist.
antiSpoof( $text)
Apply some basic anti-spoofing to the links before they get filtered, see.
doLogging(User $user, Title $title, $revId)
Actually push the url change events post-save.