# Store in external storage if required
if ( $extdb !== '' ) {
- $storeObj = new ExternalStoreDB;
+ $esFactory = MediaWikiServices::getInstance()->getExternalStoreFactory();
+ /** @var ExternalStoreDB $storeObj */
+ $storeObj = $esFactory->getStore( 'DB' );
$compress = $storeObj->store( $extdb, $compress );
if ( $compress === false ) {
$this->error( "Unable to store object" );
* @param string $extdb
* @param bool|int $maxPageId
* @return bool
+ * @suppress PhanTypeInvalidDimOffset
*/
private function compressWithConcat( $startId, $maxChunkSize, $beginDate,
$endDate, $extdb = "", $maxPageId = false
# Set up external storage
if ( $extdb != '' ) {
- $storeObj = new ExternalStoreDB;
+ $esFactory = MediaWikiServices::getInstance()->getExternalStoreFactory();
+ /** @var ExternalStoreDB $storeObj */
+ $storeObj = $esFactory->getStore( 'DB' );
}
+ // @phan-suppress-next-line PhanAccessMethodInternal
+ $blobStore = MediaWikiServices::getInstance()
+ ->getBlobStoreFactory()
+ ->newSqlBlobStore();
# Get all articles by page_id
if ( !$maxPageId ) {
for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
$oldid = $revs[$i + $j]->old_id;
- # Get text
- $text = Revision::getRevisionText( $revs[$i + $j] );
+ # Get text. We do not need the full `extractBlob` since the query is built
+ # to fetch non-externalstore blobs.
+ $text = $blobStore->decompressData(
+ $revs[$i + $j]->old_text,
+ explode( ',', $revs[$i + $j]->old_flags )
+ );
if ( $text === false ) {
$this->error( "\nError, unable to get text in old_id $oldid" );