Skip to content

Commit

Permalink
fix ? same file displayed multiple time
Browse files Browse the repository at this point in the history
  • Loading branch information
eldertek committed Dec 27, 2024
1 parent bd7b0bd commit a07cece
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
- Added ability to exclude specific folders from duplicate scanning via settings page
### Fixed
- Handle when there is no file to delete in bulk deletion
- Fix an issue where the same file was displayed multiple times in duplicate

## 1.3.1 - 2024-12-26
### Added
Expand Down
30 changes: 26 additions & 4 deletions lib/Service/FileDuplicateService.php
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,29 @@ public function enrich(FileDuplicate $duplicate): FileDuplicate
$files = $duplicate->getFiles();
$this->logger->debug('Enriching duplicate with hash: {hash}', ['hash' => $duplicate->getHash()]);

// Track unique node IDs to prevent showing the same file multiple times
$seenNodeIds = [];
$uniqueFiles = [];

// Iterate through each FileInfo object to enrich it
foreach ($files as $key => $fileInfo) {
// Enrich the FileInfo object
$files[$key] = $this->fileInfoService->enrich($fileInfo);

// Normalize path by removing /admin/files/ prefix
// Skip if we've already seen this node ID (same physical file)
if ($files[$key]->getNodeId() && isset($seenNodeIds[$files[$key]->getNodeId()])) {
$this->logger->debug('Skipping duplicate node ID: {nodeId} for path: {path}', [
'nodeId' => $files[$key]->getNodeId(),
'path' => $fileInfo->getPath()
]);
continue;
}

if ($files[$key]->getNodeId()) {
$seenNodeIds[$files[$key]->getNodeId()] = true;
}

// Store normalized path for logging
$normalizedPath = preg_replace('#^/[^/]+/files/#', '/', $fileInfo->getPath());
$this->logger->debug('Normalized path for file: {original} -> {normalized}', [
'original' => $fileInfo->getPath(),
Expand All @@ -75,16 +92,21 @@ public function enrich(FileDuplicate $duplicate): FileDuplicate
'path' => $normalizedPath,
'status' => $protectionInfo['isProtected'] ? 'protected' : 'not protected'
]);

$uniqueFiles[] = $files[$key];
}

// Sort the enriched FileInfo objects
uasort($files, function (FileInfo $a, FileInfo $b) {
uasort($uniqueFiles, function (FileInfo $a, FileInfo $b) {
return strnatcmp($a->getPath(), $b->getPath());
});

// Set the sorted and enriched FileInfo objects back to the duplicate
$duplicate->setFiles(array_values($files));
$this->logger->debug('Finished enriching duplicate with hash: {hash}', ['hash' => $duplicate->getHash()]);
$duplicate->setFiles(array_values($uniqueFiles));
$this->logger->debug('Finished enriching duplicate with hash: {hash}, found {count} unique files', [
'hash' => $duplicate->getHash(),
'count' => count($uniqueFiles)
]);

return $duplicate;
}
Expand Down

0 comments on commit a07cece

Please sign in to comment.