00001 <?php
00008 function compressOldPages( $start = 0, $extdb = '' ) {
00009 $fname = 'compressOldPages';
00010
00011 $chunksize = 50;
00012 print "Starting from old_id $start...\n";
00013 $dbw = wfGetDB( DB_MASTER );
00014 do {
00015 $res = $dbw->select( 'text', array( 'old_id','old_flags','old_text' ),
00016 "old_id>=$start", $fname, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) );
00017 if( $dbw->numRows( $res ) == 0 ) {
00018 break;
00019 }
00020 $last = $start;
00021 while( $row = $dbw->fetchObject( $res ) ) {
00022 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
00023 compressPage( $row, $extdb );
00024 $last = $row->old_id;
00025 }
00026 $dbw->freeResult( $res );
00027 $start = $last + 1; # Deletion may leave long empty stretches
00028 print "$start...\n";
00029 } while( true );
00030 }
00031
00033 function compressPage( $row, $extdb ) {
00034 $fname = 'compressPage';
00035 if ( false !== strpos( $row->old_flags, 'gzip' ) || false !== strpos( $row->old_flags, 'object' ) ) {
00036 #print "Already compressed row {$row->old_id}\n";
00037 return false;
00038 }
00039 $dbw = wfGetDB( DB_MASTER );
00040 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
00041 $compress = gzdeflate( $row->old_text );
00042
00043 # Store in external storage if required
00044 if ( $extdb !== '' ) {
00045 $storeObj = new ExternalStoreDB;
00046 $compress = $storeObj->store( $extdb, $compress );
00047 if ( $compress === false ) {
00048 print "Unable to store object\n";
00049 return false;
00050 }
00051 }
00052
00053 # Update text row
00054 $dbw->update( 'text',
00055 array(
00056 'old_flags' => $flags,
00057 'old_text' => $compress
00058 ), array(
00059 'old_id' => $row->old_id
00060 ), $fname,
00061 array( 'LIMIT' => 1 )
00062 );
00063 return true;
00064 }
00065
00066 define( 'LS_INDIVIDUAL', 0 );
00067 define( 'LS_CHUNKED', 1 );
00068
00070 function compressWithConcat( $startId, $maxChunkSize, $beginDate,
00071 $endDate, $extdb="", $maxPageId = false )
00072 {
00073 $fname = 'compressWithConcat';
00074 $loadStyle = LS_CHUNKED;
00075
00076 $dbr = wfGetDB( DB_SLAVE );
00077 $dbw = wfGetDB( DB_MASTER );
00078
00079 # Set up external storage
00080 if ( $extdb != '' ) {
00081 $storeObj = new ExternalStoreDB;
00082 }
00083
00084 # Get all articles by page_id
00085 if ( !$maxPageId ) {
00086 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', $fname );
00087 }
00088 print "Starting from $startId of $maxPageId\n";
00089 $pageConds = array();
00090
00091
00092
00093
00094
00095
00096
00097
00098
00099
00100
00101 # For each article, get a list of revisions which fit the criteria
00102
00103 # No recompression, use a condition on old_flags
00104 # Don't compress object type entities, because that might produce data loss when
00105 # overwriting bulk storage concat rows. Don't compress external references, because
00106 # the script doesn't yet delete rows from external storage.
00107 $conds = array(
00108 'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() ) . ' AND old_flags NOT '
00109 . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() ) );
00110
00111 if ( $beginDate ) {
00112 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
00113 print "Invalid begin date \"$beginDate\"\n";
00114 return false;
00115 }
00116 $conds[] = "rev_timestamp>'" . $beginDate . "'";
00117 }
00118 if ( $endDate ) {
00119 if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
00120 print "Invalid end date \"$endDate\"\n";
00121 return false;
00122 }
00123 $conds[] = "rev_timestamp<'" . $endDate . "'";
00124 }
00125 if ( $loadStyle == LS_CHUNKED ) {
00126 $tables = array( 'revision', 'text' );
00127 $fields = array( 'rev_id', 'rev_text_id', 'old_flags', 'old_text' );
00128 $conds[] = 'rev_text_id=old_id';
00129 $revLoadOptions = 'FOR UPDATE';
00130 } else {
00131 $tables = array( 'revision' );
00132 $fields = array( 'rev_id', 'rev_text_id' );
00133 $revLoadOptions = array();
00134 }
00135
00136 # Don't work with current revisions
00137 # Don't lock the page table for update either -- TS 2006-04-04
00138 #$tables[] = 'page';
00139 #$conds[] = 'page_id=rev_page AND rev_id != page_latest';
00140
00141 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
00142 wfWaitForSlaves( 5 );
00143
00144 # Wake up
00145 $dbr->ping();
00146
00147 # Get the page row
00148 $pageRes = $dbr->select( 'page',
00149 array('page_id', 'page_namespace', 'page_title','page_latest'),
00150 $pageConds + array('page_id' => $pageId), $fname );
00151 if ( $dbr->numRows( $pageRes ) == 0 ) {
00152 continue;
00153 }
00154 $pageRow = $dbr->fetchObject( $pageRes );
00155
00156 # Display progress
00157 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
00158 print "$pageId\t" . $titleObj->getPrefixedDBkey() . " ";
00159
00160 # Load revisions
00161 $revRes = $dbw->select( $tables, $fields,
00162 array_merge( array(
00163 'rev_page' => $pageRow->page_id,
00164 # Don't operate on the current revision
00165 # Use < instead of <> in case the current revision has changed
00166 # since the page select, which wasn't locking
00167 'rev_id < ' . $pageRow->page_latest
00168 ), $conds ),
00169 $fname,
00170 $revLoadOptions
00171 );
00172 $revs = array();
00173 while ( $revRow = $dbw->fetchObject( $revRes ) ) {
00174 $revs[] = $revRow;
00175 }
00176
00177 if ( count( $revs ) < 2) {
00178 # No revisions matching, no further processing
00179 print "\n";
00180 continue;
00181 }
00182
00183 # For each chunk
00184 $i = 0;
00185 while ( $i < count( $revs ) ) {
00186 if ( $i < count( $revs ) - $maxChunkSize ) {
00187 $thisChunkSize = $maxChunkSize;
00188 } else {
00189 $thisChunkSize = count( $revs ) - $i;
00190 }
00191
00192 $chunk = new ConcatenatedGzipHistoryBlob();
00193 $stubs = array();
00194 $dbw->begin();
00195 $usedChunk = false;
00196 $primaryOldid = $revs[$i]->rev_text_id;
00197
00198 # Get the text of each revision and add it to the object
00199 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
00200 $oldid = $revs[$i + $j]->rev_text_id;
00201
00202 # Get text
00203 if ( $loadStyle == LS_INDIVIDUAL ) {
00204 $textRow = $dbw->selectRow( 'text',
00205 array( 'old_flags', 'old_text' ),
00206 array( 'old_id' => $oldid ),
00207 $fname,
00208 'FOR UPDATE'
00209 );
00210 $text = Revision::getRevisionText( $textRow );
00211 } else {
00212 $text = Revision::getRevisionText( $revs[$i + $j] );
00213 }
00214
00215 if ( $text === false ) {
00216 print "\nError, unable to get text in old_id $oldid\n";
00217 #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
00218 }
00219
00220 if ( $extdb == "" && $j == 0 ) {
00221 $chunk->setText( $text );
00222 print '.';
00223 } else {
00224 # Don't make a stub if it's going to be longer than the article
00225 # Stubs are typically about 100 bytes
00226 if ( strlen( $text ) < 120 ) {
00227 $stub = false;
00228 print 'x';
00229 } else {
00230 $stub = new HistoryBlobStub( $chunk->addItem( $text ) );
00231 $stub->setLocation( $primaryOldid );
00232 $stub->setReferrer( $oldid );
00233 print '.';
00234 $usedChunk = true;
00235 }
00236 $stubs[$j] = $stub;
00237 }
00238 }
00239 $thisChunkSize = $j;
00240
00241 # If we couldn't actually use any stubs because the pages were too small, do nothing
00242 if ( $usedChunk ) {
00243 if ( $extdb != "" ) {
00244 # Move blob objects to External Storage
00245 $stored = $storeObj->store( $extdb, serialize( $chunk ));
00246 if ($stored === false) {
00247 print "Unable to store object\n";
00248 return false;
00249 }
00250 # Store External Storage URLs instead of Stub placeholders
00251 foreach ($stubs as $stub) {
00252 if ($stub===false)
00253 continue;
00254 # $stored should provide base path to a BLOB
00255 $url = $stored."/".$stub->getHash();
00256 $dbw->update( 'text',
00257 array(
00258 'old_text' => $url,
00259 'old_flags' => 'external,utf-8',
00260 ), array (
00261 'old_id' => $stub->getReferrer(),
00262 )
00263 );
00264 }
00265 } else {
00266 # Store the main object locally
00267 $dbw->update( 'text',
00268 array(
00269 'old_text' => serialize( $chunk ),
00270 'old_flags' => 'object,utf-8',
00271 ), array(
00272 'old_id' => $primaryOldid
00273 )
00274 );
00275
00276 # Store the stub objects
00277 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
00278 # Skip if not compressing and don't overwrite the first revision
00279 if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) {
00280 $dbw->update( 'text',
00281 array(
00282 'old_text' => serialize($stubs[$j]),
00283 'old_flags' => 'object,utf-8',
00284 ), array(
00285 'old_id' => $revs[$i + $j]->rev_text_id
00286 )
00287 );
00288 }
00289 }
00290 }
00291 }
00292 # Done, next
00293 print "/";
00294 $dbw->commit();
00295 $i += $thisChunkSize;
00296 wfWaitForSlaves( 5 );
00297 }
00298 print "\n";
00299 }
00300 return true;
00301 }