19use Wikimedia\Purtle\RdfWriter;
20use Wikimedia\Purtle\TurtleRdfWriter;
23require_once __DIR__ .
'/Maintenance.php';
93 parent::__construct();
95 $this->
addDescription(
"Generate RDF dump of category changes in a wiki." );
98 $this->
addOption(
'output',
"Output file (default is stdout). Will be overwritten.",
false,
100 $this->
addOption(
'start',
'Starting timestamp (inclusive), in ISO or Mediawiki format.',
102 $this->
addOption(
'end',
'Ending timestamp (exclusive), in ISO or Mediawiki format.',
true,
111 $this->rdfWriter =
new TurtleRdfWriter();
112 $this->categoriesRdf =
new CategoriesRdf( $this->rdfWriter );
125 $this->
error(
"Start timestamp too old, maximum RC age is $wgRCMaxAge!" );
128 $this->
error(
"End timestamp too old, maximum RC age is $wgRCMaxAge!" );
131 $this->startTS =
$startTS->getTimestamp();
132 $this->endTS =
$endTS->getTimestamp();
134 $outFile = $this->
getOption(
'output',
'php://stdout' );
135 if ( $outFile ===
'-' ) {
136 $outFile =
'php://stdout';
139 $output = fopen( $outFile,
'wb' );
141 $this->categoriesRdf->setupPrefixes();
142 $this->rdfWriter->start();
144 $prefixes = $this->
getRdf();
147 $prefixes = preg_replace( [
'/^@/m',
'/\s*[.]$/m' ],
'', $prefixes );
176 $rdfText = $this->
getRdf();
180 return sprintf( self::SPARQL_INSERT, $rdfText );
192 if ( empty( $deleteUrls ) ) {
196 if ( !empty( $pages ) ) {
200 return "# $mark\n" . sprintf( self::SPARQL_DELETE, implode(
' ', $deleteUrls ) ) .
212 $this->categoriesRdf->writeCategoryLinkData( $pages[$row->cl_from], $row->cl_to );
222 $dumpUrl =
'<' . $this->categoriesRdf->getDumpURI() .
'>';
226 $dumpUrl schema:dateModified ?o .
229 $dumpUrl schema:dateModified ?o .
232 $dumpUrl schema:dateModified
"$ts"^^xsd:dateTime .
249 array $extra_tables = []
251 $tables = [
'recentchanges',
'page_props',
'category' ];
252 if ( $extra_tables ) {
261 $it->addJoinConditions(
264 'LEFT JOIN', [
'pp_propname' =>
'hiddencat',
'pp_page = rc_cur_id' ]
267 'LEFT JOIN', [
'cat_title = rc_title' ]
271 $it->setFetchColumns( array_merge( $columns, [
289 $it->addConditions( [
303 $it->addConditions( [
306 'rc_log_type' =>
'move',
309 $it->addJoinConditions( [
310 'page' => [
'INNER JOIN',
'rc_cur_id = page_id' ],
328 $it->addConditions( [
331 'rc_log_type' =>
'delete',
332 'rc_log_action' =>
'delete',
336 'NOT EXISTS (SELECT * FROM page WHERE page_id = rc_cur_id)',
339 $it->setFetchColumns( [
'rc_cur_id',
'rc_title' ] );
350 $it->addConditions( [
353 'rc_log_type' =>
'delete',
354 'rc_log_action' =>
'restore',
357 'EXISTS (SELECT page_id FROM page WHERE page_id = rc_cur_id)',
371 $it->addConditions( [
386 $it->addConditions( [
387 'rc_timestamp >= ' .
$dbr->addQuotes(
$dbr->timestamp( $this->startTS ) ),
388 'rc_timestamp < ' .
$dbr->addQuotes(
$dbr->timestamp( $this->endTS ) ),
398 'USE INDEX' => [
'recentchanges' =>
'new_name_timestamp' ]
412 [
'cl_from',
'cl_to' ],
415 $it->addConditions( [
416 'cl_type' =>
'subcat',
419 $it->setFetchColumns( [
'cl_from',
'cl_to' ] );
420 return new RecursiveIteratorIterator( $it );
428 return $this->rdfWriter->drain();
440 foreach (
$batch as $row ) {
442 $deleteUrls[] =
'<' . $this->categoriesRdf->labelToUrl( $row->rc_title ) .
'>';
443 $this->processed[$row->rc_cur_id] =
true;
454 $this->categoriesRdf->writeCategoryData(
456 $row->pp_propname ===
'hiddencat',
457 (
int)$row->cat_pages - (
int)$row->cat_subcats - (
int)$row->cat_files,
458 (
int)$row->cat_subcats
470 foreach (
$batch as $row ) {
471 $deleteUrls[] =
'<' . $this->categoriesRdf->labelToUrl( $row->rc_title ) .
'>';
473 if ( isset( $this->processed[$row->rc_cur_id] ) ) {
482 $row->rc_title = $row->page_title;
484 $pages[$row->rc_cur_id] = $row->page_title;
485 $this->processed[$row->rc_cur_id] =
true;
497 fwrite(
$output,
"# Restores\n" );
501 foreach (
$batch as $row ) {
502 if ( isset( $this->processed[$row->rc_cur_id] ) ) {
507 $pages[$row->rc_cur_id] = $row->rc_title;
508 $this->processed[$row->rc_cur_id] =
true;
511 if ( empty( $pages ) ) {
526 fwrite(
$output,
"# Additions\n" );
529 foreach (
$batch as $row ) {
530 if ( isset( $this->processed[$row->rc_cur_id] ) ) {
535 $pages[$row->rc_cur_id] = $row->rc_title;
536 $this->processed[$row->rc_cur_id] =
true;
539 if ( empty( $pages ) ) {
561 foreach (
$batch as $row ) {
564 if ( isset( $this->processed[$row->rc_cur_id] ) ) {
569 $pages[$row->rc_cur_id] = $row->rc_title;
570 $this->processed[$row->rc_cur_id] =
true;
571 $deleteUrls[] =
'<' . $this->categoriesRdf->labelToUrl( $row->rc_title ) .
'>';
584 $processedTitle = [];
596 foreach (
$batch as $row ) {
597 $childPages[$row->rc_cur_id] =
true;
598 $parentCats[$row->rc_title] =
true;
604 [
'pp_propname' =>
'hiddencat',
'pp_page = page_id' ],
608 [
'cat_title = page_title' ],
615 if ( !empty( $childPages ) ) {
617 $childRows =
$dbr->select(
618 [
'page',
'page_props',
'category' ],
621 'rc_title' =>
'page_title',
627 [
'page_namespace' =>
NS_CATEGORY,
'page_id' => array_keys( $childPages ) ],
632 foreach ( $childRows as $row ) {
633 if ( isset( $this->processed[$row->page_id] ) ) {
638 $deleteUrls[] =
'<' . $this->categoriesRdf->labelToUrl( $row->rc_title ) .
'>';
639 $this->processed[$row->page_id] =
true;
643 if ( !empty( $parentCats ) ) {
648 [
'page_title = cat_title',
'page_namespace' =>
NS_CATEGORY ],
652 [
'pp_propname' =>
'hiddencat',
'pp_page = page_id' ],
656 $parentRows =
$dbr->select(
657 [
'category',
'page',
'page_props' ],
660 'rc_title' =>
'cat_title',
666 [
'cat_title' => array_keys( $parentCats ) ],
671 foreach ( $parentRows as $row ) {
672 if ( $row->page_id && isset( $this->processed[$row->page_id] ) ) {
676 if ( isset( $processedTitle[$row->rc_title] ) ) {
681 $deleteUrls[] =
'<' . $this->categoriesRdf->labelToUrl( $row->rc_title ) .
'>';
682 if ( $row->page_id ) {
683 $this->processed[$row->page_id] =
true;
685 $processedTitle[$row->rc_title] =
true;
$wgRCMaxAge
Recentchanges items are periodically purged; entries older than this many seconds will go.
wfTimestamp( $outputtype=TS_UNIX, $ts=0)
Get a timestamp string in one of various formats.
Helper class to produce RDF representation of categories.
Maintenance script to provide RDF representation of the recent changes in category tree.
writeCategoryData( $row)
Write category data to RDF.
initialize()
Initialize external service classes.
writeParentCategories(IDatabase $dbr, $pages)
Write parent data for a set of categories.
getMovedCatsIterator(IDatabase $dbr)
Fetch moved categories.
addTimestampConditions(BatchRowIterator $it, IDatabase $dbr)
Add timestamp limits to iterator.
execute()
Do the actual work.
addIndex(BatchRowIterator $it)
Need to force index, somehow on terbium the optimizer chooses wrong one.
getNewCatsIterator(IDatabase $dbr)
Fetch newly created categories.
getInsertRdf()
Get the text of SPARQL INSERT DATA clause.
const SPARQL_DELETE
Delete query.
getDeletedCatsIterator(IDatabase $dbr)
Fetch deleted categories.
getChangedCatsIterator(IDatabase $dbr, $type)
Fetch categorization changes or edits.
const SPARQL_INSERT
Insert query.
const SPARQL_DELETE_INSERT
Delete/Insert query.
setupChangesIterator(IDatabase $dbr, array $columns=[], array $extra_tables=[])
Set up standard iterator for retrieving category changes.
int[] $processed
List of processed page IDs, so we don't try to process same thing twice.
handleMoves(IDatabase $dbr, $output)
getRdf()
Get accumulated RDF.
handleCategorization(IDatabase $dbr, $output)
Handles categorization changes.
getRestoredCatsIterator(IDatabase $dbr)
Fetch restored categories.
handleDeletes(IDatabase $dbr, $output)
Handle category deletes.
CategoriesRdf $categoriesRdf
Categories RDF helper.
updateTS( $timestamp)
Generate SPARQL Update code for updating dump timestamp.
__construct()
Default constructor.
handleEdits(IDatabase $dbr, $output)
Handle edits for category texts.
getCategoryLinksIterator(IDatabase $dbr, array $ids)
Get iterator for links for categories.
handleRestores(IDatabase $dbr, $output)
handleAdds(IDatabase $dbr, $output)
getCategoriesUpdate(IDatabase $dbr, $deleteUrls, $pages, $mark)
Get SPARQL for updating set of categories.
Library for creating and parsing MW-style timestamps.
Abstract maintenance class for quickly writing and churning out maintenance scripts with minimal effo...
getDB( $db, $groups=[], $wiki=false)
Returns a database to be used by current maintenance script.
addDescription( $text)
Set the description text.
addOption( $name, $description, $required=false, $withArg=false, $shortName=false, $multiOccurrence=false)
Add a parameter to the script.
getOption( $name, $default=null)
Get an option, or return the default.
setBatchSize( $s=0)
Set the batch size.
do that in ParserLimitReportFormat instead use this to modify the parameters of the image all existing parser cache entries will be invalid To avoid you ll need to handle that somehow(e.g. with the RejectParserCacheValue hook) because MediaWiki won 't do it for you. & $defaults error
this hook is for auditing only RecentChangesLinked and Watchlist Do not use this to implement individual filters if they are compatible with the ChangesListFilter and ChangesListFilterGroup structure use sub classes of those in conjunction with the ChangesListSpecialPageStructuredFilters hook This hook can be used to implement filters that do not implement that or custom behavior that is not an individual filter e g Watchlist & $tables
static configuration should be added through ResourceLoaderGetConfigVars instead can be used to get the real title e g db for database replication lag or jobqueue for job queue size converted to pseudo seconds It is possible to add more fields and they will be returned to the user in the API response after the basic globals have been set but before ordinary actions take place $output
require_once RUN_MAINTENANCE_IF_MAIN
The wiki should then use memcached to cache various data To use multiple just add more items to the array To increase the weight of a make its entry a array("192.168.0.1:11211", 2))