From: Kunal Mehta Date: Fri, 25 Sep 2015 01:27:28 +0000 (-0700) Subject: Move FileContentsHasher into includes/utils/ X-Git-Tag: 1.31.0-rc.0~9843 X-Git-Url: https://git.heureux-cyclage.org/?a=commitdiff_plain;ds=sidebyside;h=9a3c7b43ea1e440a30dac778eee77627f56687db;hp=bdabeab02a5bb58aafd60f34d905d4b9b016d332;p=lhc%2Fweb%2Fwiklou.git Move FileContentsHasher into includes/utils/ The class only contains two dependencies upon MediaWiki (ObjectCache & wfGlobalCacheKey) which are suitable for inclusion in the utils directory. Change-Id: I85b4c763be2670c40f26d93e75cedcb68eaa7987 --- diff --git a/autoload.php b/autoload.php index 9cb5b43ad9..f1b0a6cae7 100644 --- a/autoload.php +++ b/autoload.php @@ -430,7 +430,7 @@ $wgAutoloadLocalClasses = array( 'FileBackendStoreShardListIterator' => __DIR__ . '/includes/filebackend/FileBackendStore.php', 'FileBasedSiteLookup' => __DIR__ . '/includes/site/FileBasedSiteLookup.php', 'FileCacheBase' => __DIR__ . '/includes/cache/FileCacheBase.php', - 'FileContentsHasher' => __DIR__ . '/includes/FileContentsHasher.php', + 'FileContentsHasher' => __DIR__ . '/includes/utils/FileContentsHasher.php', 'FileDeleteForm' => __DIR__ . '/includes/FileDeleteForm.php', 'FileDependency' => __DIR__ . '/includes/cache/CacheDependency.php', 'FileDuplicateSearchPage' => __DIR__ . '/includes/specials/SpecialFileDuplicateSearch.php', diff --git a/includes/FileContentsHasher.php b/includes/FileContentsHasher.php deleted file mode 100644 index 67eb9d29f0..0000000000 --- a/includes/FileContentsHasher.php +++ /dev/null @@ -1,111 +0,0 @@ -cache = ObjectCache::newAccelerator( 'hash' ); - } - - /** - * Get the singleton instance of this class. - * - * @return FileContentsHasher - */ - public static function singleton() { - if ( !self::$instance ) { - self::$instance = new self; - } - - return self::$instance; - } - - /** - * Get a hash of a file's contents, either by retrieving a previously- - * computed hash from the cache, or by computing a hash from the file. - * - * @private - * @param string $filePath Full path to the file. - * @param string $algo Name of selected hashing algorithm. - * @return string|bool Hash of file contents, or false if the file could not be read. - */ - public function getFileContentsHashInternal( $filePath, $algo = 'md4' ) { - $mtime = MediaWiki\quietCall( 'filemtime', $filePath ); - if ( $mtime === false ) { - return false; - } - - $cacheKey = wfGlobalCacheKey( __CLASS__, $filePath, $mtime, $algo ); - $hash = $this->cache->get( $cacheKey ); - - if ( $hash ) { - return $hash; - } - - $contents = MediaWiki\quietCall( 'file_get_contents', $filePath ); - if ( $contents === false ) { - return false; - } - - $hash = hash( $algo, $contents ); - $this->cache->set( $cacheKey, $hash, 60 * 60 * 24 ); // 24h - - return $hash; - } - - /** - * Get a hash of the combined contents of one or more files, either by - * retrieving a previously-computed hash from the cache, or by computing - * a hash from the files. - * - * @param string|string[] $filePaths One or more file paths. - * @param string $algo Name of selected hashing algorithm. - * @return string|bool Hash of files' contents, or false if no file could not be read. - */ - public static function getFileContentsHash( $filePaths, $algo = 'md4' ) { - $instance = self::singleton(); - - if ( !is_array( $filePaths ) ) { - $filePaths = (array) $filePaths; - } - - if ( count( $filePaths ) === 1 ) { - return $instance->getFileContentsHashInternal( $filePaths[0], $algo ); - } - - sort( $filePaths ); - $hashes = array_map( function ( $filePath ) use ( $instance, $algo ) { - return $instance->getFileContentsHashInternal( $filePath, $algo ) ?: ''; - }, $filePaths ); - - $hashes = implode( '', $hashes ); - return $hashes ? hash( $algo, $hashes ) : false; - } -} diff --git a/includes/utils/FileContentsHasher.php b/includes/utils/FileContentsHasher.php new file mode 100644 index 0000000000..67eb9d29f0 --- /dev/null +++ b/includes/utils/FileContentsHasher.php @@ -0,0 +1,111 @@ +cache = ObjectCache::newAccelerator( 'hash' ); + } + + /** + * Get the singleton instance of this class. + * + * @return FileContentsHasher + */ + public static function singleton() { + if ( !self::$instance ) { + self::$instance = new self; + } + + return self::$instance; + } + + /** + * Get a hash of a file's contents, either by retrieving a previously- + * computed hash from the cache, or by computing a hash from the file. + * + * @private + * @param string $filePath Full path to the file. + * @param string $algo Name of selected hashing algorithm. + * @return string|bool Hash of file contents, or false if the file could not be read. + */ + public function getFileContentsHashInternal( $filePath, $algo = 'md4' ) { + $mtime = MediaWiki\quietCall( 'filemtime', $filePath ); + if ( $mtime === false ) { + return false; + } + + $cacheKey = wfGlobalCacheKey( __CLASS__, $filePath, $mtime, $algo ); + $hash = $this->cache->get( $cacheKey ); + + if ( $hash ) { + return $hash; + } + + $contents = MediaWiki\quietCall( 'file_get_contents', $filePath ); + if ( $contents === false ) { + return false; + } + + $hash = hash( $algo, $contents ); + $this->cache->set( $cacheKey, $hash, 60 * 60 * 24 ); // 24h + + return $hash; + } + + /** + * Get a hash of the combined contents of one or more files, either by + * retrieving a previously-computed hash from the cache, or by computing + * a hash from the files. + * + * @param string|string[] $filePaths One or more file paths. + * @param string $algo Name of selected hashing algorithm. + * @return string|bool Hash of files' contents, or false if no file could not be read. + */ + public static function getFileContentsHash( $filePaths, $algo = 'md4' ) { + $instance = self::singleton(); + + if ( !is_array( $filePaths ) ) { + $filePaths = (array) $filePaths; + } + + if ( count( $filePaths ) === 1 ) { + return $instance->getFileContentsHashInternal( $filePaths[0], $algo ); + } + + sort( $filePaths ); + $hashes = array_map( function ( $filePath ) use ( $instance, $algo ) { + return $instance->getFileContentsHashInternal( $filePath, $algo ) ?: ''; + }, $filePaths ); + + $hashes = implode( '', $hashes ); + return $hashes ? hash( $algo, $hashes ) : false; + } +} diff --git a/tests/phpunit/includes/FileContentsHasherTest.php b/tests/phpunit/includes/FileContentsHasherTest.php deleted file mode 100644 index eb63649788..0000000000 --- a/tests/phpunit/includes/FileContentsHasherTest.php +++ /dev/null @@ -1,55 +0,0 @@ -provideSingleFile() ) - ); - } - - /** - * @covers FileContentsHasher::getFileContentHash - * @covers FileContentsHasher::getFileContentsHashInternal - * @dataProvider provideSingleFile - */ - public function testSingleFileHash( $fileName, $contents ) { - foreach ( array( 'md4', 'md5' ) as $algo ) { - $expectedHash = hash( $algo, $contents ); - $actualHash = FileContentsHasher::getFileContentsHash( $fileName, $algo ); - $this->assertEquals( $expectedHash, $actualHash ); - $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileName, $algo ); - $this->assertEquals( $expectedHash, $actualHashRepeat ); - } - } - - /** - * @covers FileContentsHasher::getFileContentHash - * @covers FileContentsHasher::getFileContentsHashInternal - * @dataProvider provideMultipleFiles - */ - public function testMultipleFileHash( $files ) { - $fileNames = array(); - $hashes = array(); - foreach ( $files as $fileInfo ) { - list( $fileName, $contents ) = $fileInfo; - $fileNames[] = $fileName; - $hashes[] = md5( $contents ); - } - - $expectedHash = md5( implode( '', $hashes ) ); - $actualHash = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' ); - $this->assertEquals( $expectedHash, $actualHash ); - $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' ); - $this->assertEquals( $expectedHash, $actualHashRepeat ); - } -} diff --git a/tests/phpunit/includes/utils/FileContentsHasherTest.php b/tests/phpunit/includes/utils/FileContentsHasherTest.php new file mode 100644 index 0000000000..a03e1fc416 --- /dev/null +++ b/tests/phpunit/includes/utils/FileContentsHasherTest.php @@ -0,0 +1,55 @@ +provideSingleFile() ) + ); + } + + /** + * @covers FileContentsHasher::getFileContentHash + * @covers FileContentsHasher::getFileContentsHashInternal + * @dataProvider provideSingleFile + */ + public function testSingleFileHash( $fileName, $contents ) { + foreach ( array( 'md4', 'md5' ) as $algo ) { + $expectedHash = hash( $algo, $contents ); + $actualHash = FileContentsHasher::getFileContentsHash( $fileName, $algo ); + $this->assertEquals( $expectedHash, $actualHash ); + $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileName, $algo ); + $this->assertEquals( $expectedHash, $actualHashRepeat ); + } + } + + /** + * @covers FileContentsHasher::getFileContentHash + * @covers FileContentsHasher::getFileContentsHashInternal + * @dataProvider provideMultipleFiles + */ + public function testMultipleFileHash( $files ) { + $fileNames = array(); + $hashes = array(); + foreach ( $files as $fileInfo ) { + list( $fileName, $contents ) = $fileInfo; + $fileNames[] = $fileName; + $hashes[] = md5( $contents ); + } + + $expectedHash = md5( implode( '', $hashes ) ); + $actualHash = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' ); + $this->assertEquals( $expectedHash, $actualHash ); + $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' ); + $this->assertEquals( $expectedHash, $actualHashRepeat ); + } +}