Skip to content

Commit

Permalink
v1.1.5
Browse files Browse the repository at this point in the history
  • Loading branch information
eldertek committed Feb 15, 2024
1 parent 5977f11 commit 9119422
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 62 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
## 1.1.5 - 2024-02-12
### Added
- Limit number of errors/success messages to 2
- Limit number of fetched duplicates to 20
- Limit number of fetched duplicates to 50
### Fixed
- Fix an issue where limit passed to the api was limiting files not entities.
- Fix an issue where nodeid was not corectly returned by the api
- Fix an issue where duplicates returned was not the user's one
- Fix [#45](https://github.com/eldertek/duplicatefinder/issues/45)
- Fix [#44](https://github.com/eldertek/duplicatefinder/issues/44)
- Fix [#43](https://github.com/eldertek/duplicatefinder/issues/43)
- Fix [#41](https://github.com/eldertek/duplicatefinder/issues/41)
- Fix [#40](https://github.com/eldertek/duplicatefinder/issues/40)
- Fix [#38](https://github.com/eldertek/duplicatefinder/issues/38)
Expand Down
2 changes: 1 addition & 1 deletion js/duplicatefinder-main.js

Large diffs are not rendered by default.

27 changes: 14 additions & 13 deletions lib/Db/FileDuplicateMapper.php
Original file line number Diff line number Diff line change
Expand Up @@ -45,24 +45,25 @@ public function findAll(
?array $orderBy = [['hash'], ['type']]
): array {
$qb = $this->db->getQueryBuilder();
$qb->select('d.id as id', 'type', 'hash', 'acknowledged')
->from($this->getTableName(), 'd');

$qb->select('d.*')
->from($this->getTableName(), 'd')
->leftJoin('d', 'duplicatefinder_finfo', 'f', 'd.hash = f.file_hash');

if (!is_null($user)) {
$qb->andWhere($qb->expr()->eq('f.owner', $qb->createNamedParameter($user)));
}

if ($limit !== null) {
$qb->setMaxResults($limit); // Set the limit of rows to fetch
$qb->setMaxResults($limit);
}
if ($offset !== null) {
$qb->setFirstResult($offset); // Set the offset to start fetching rows
$qb->setFirstResult($offset);
}

$qb->addOrderBy('id');

if ($orderBy !== null) {
foreach ($orderBy as $order) {
$qb->addOrderBy($order[0], isset($order[1]) ? $order[1] : null);
}
unset($order);

foreach ($orderBy as $order) {
$qb->addOrderBy($order[0], $order[1] ?? 'ASC');
}

return $this->findEntities($qb);
}

Expand Down
32 changes: 19 additions & 13 deletions lib/Service/FileDuplicateService.php
Original file line number Diff line number Diff line change
Expand Up @@ -73,19 +73,19 @@ public function findAll(
bool $enrich = false,
?array $orderBy = [['hash'], ['type']]
): array {
$limit = $pageSize; // Set the number of records per page
$offset = ($page - 1) * $pageSize; // Calculate the offset
$result = [];
$isLastFetched = false;
$entities = [];

while (empty($entities) && !$isLastFetched) {
$offset = ($page - 1) * $pageSize; // Calculate the offset based on the current page
$entities = $this->mapper->findAll($user, $pageSize, $offset, $orderBy);

$result = array();
$entities = null;
do {
$entities = $this->mapper->findAll($user, $limit, $offset, $orderBy);
foreach ($entities as $entity) {
$entity = $this->stripFilesWithoutAccessRights($entity, $user);
if ($enrich) {
$entity = $this->enrich($entity);
}
$offset = $entity->id;
if (count($entity->getFiles()) > 1) {
if ($type === 'acknowledged' && $entity->isAcknowledged()) {
$result[] = $entity;
Expand All @@ -94,14 +94,20 @@ public function findAll(
} else if ($type === 'all') {
$result[] = $entity;
}
if (count($result) === $limit) {
break;
}
}
}
unset($entity);
} while (count($result) < $limit && count($entities) === $limit);
return array("entities" => $result, "pageKey" => $offset, "isLastFetched" => count($entities) !== $limit);

$isLastFetched = count($entities) < $pageSize; // Determine if this is the last page
if (empty($entities) && !$isLastFetched) {
$page++; // Move to the next page if no entities found and not the last page
}
}

return [
"entities" => $result,
"pageKey" => $offset,
"isLastFetched" => $isLastFetched
];
}

private function stripFilesWithoutAccessRights(
Expand Down
54 changes: 20 additions & 34 deletions src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ export default {
currentDuplicateId: null,
updating: false,
fetchingLimit: 1,
fetchingLimit: 50,
loading: true,
loadingDots: '',
loadingInterval: null,
Expand Down Expand Up @@ -156,26 +156,10 @@ export default {
},
async mounted() {
this.startLoadingAnimation();
this.fetchAllPages('acknowledged', this.fetchingLimit, true);
this.fetchAllPages('unacknowledged', this.fetchingLimit, true);
this.fetchAllPages('acknowledged', this.fetchingLimit);
this.fetchAllPages('unacknowledged', this.fetchingLimit);
this.stopLoadingAnimation();
},
watch: {
// Watcher for acknowledgedDuplicates list
acknowledgedDuplicates(newVal) {
if (newVal.length <= 2) {
// Trigger re-fetch for acknowledged list with the existing limit, not initial fetch
this.fetchAllPages('acknowledged', this.fetchingLimit, false);
}
},
// Watcher for unacknowledgedDuplicates list
unacknowledgedDuplicates(newVal) {
if (newVal.length <= 2) {
// Trigger re-fetch for unacknowledged list with the existing limit, not initial fetch
this.fetchAllPages('unacknowledged', this.fetchingLimit, false);
}
}
},
methods: {
openFileInViewer(file) {
// Ensure the viewer script is loaded and OCA.Viewer is available
Expand Down Expand Up @@ -240,14 +224,13 @@ export default {
clearInterval(this.loadingInterval);
this.loadingDots = ''; // Reset loading dots
},
async fetchAllPages(type, limit, initial) {
async fetchAllPages(type, limit) {
let currentPage = 1;
let url;
this.loading = true;
let totalFetched = 0; // Initialize total fetched entities counter
do {
// Determine URL based on the type
if (type === 'acknowledged') {
url = generateUrl(`/apps/duplicatefinder/api/duplicates/acknowledged?page=${currentPage}&limit=${limit}`);
} else if (type === 'unacknowledged') {
Expand All @@ -264,22 +247,19 @@ export default {
const newDuplicates = response.data.entities;
const pagination = response.data.pagination;
// Decide to append new duplicates or replace based on `initial` flag
if (initial) {
// Replace or set the list if it's the initial fetch
this[type + 'Duplicates'] = newDuplicates.slice(0, limit);
} else {
// Append new duplicates and ensure unique entries, if needed
this[type + 'Duplicates'] = [...this[type + 'Duplicates'], ...newDuplicates].slice(0, limit);
}
// Filter out duplicates that already exist in the current list
const filteredNewDuplicates = newDuplicates.filter(newDup =>
!this[type + 'Duplicates'].some(existingDup => existingDup.id === newDup.id)
);
this[type + 'Duplicates'] = [...this[type + 'Duplicates'], ...filteredNewDuplicates.slice(0, limit - totalFetched)];
totalFetched += filteredNewDuplicates.length; // Update total fetched entities based on filtered list
// Update pagination details
this[`totalPages${type.charAt(0).toUpperCase() + type.slice(1)}`] = pagination.totalPages;
currentPage++;
// Break out if the limit is reached
if (this[type + 'Duplicates'].length >= limit) {
console.log(`Limit of ${limit} reached for ${type} duplicates, stopping fetch.`);
// Stop fetching if we have reached the limit
if (totalFetched >= limit) {
break;
}
} catch (e) {
Expand All @@ -298,12 +278,15 @@ export default {
await axios.post(generateUrl(`/apps/duplicatefinder/api/duplicates/acknowledge/${hash}`));
this.showSuccess(t('duplicatefinder', 'Duplicate acknowledged successfully'));
// Fetch all pages again to get the latest data
// Move the duplicate from the unacknowledgedlist to the acknowledged list
const index = this.unacknowledgedDuplicates.findIndex(dup => dup.id === this.currentDuplicateId);
const [removedItem] = this.unacknowledgedDuplicates.splice(index, 1);
this.acknowledgedDuplicates.push(removedItem);
this.fetchAllPages('unacknowledged', 5);
// Switch to the next unacknowledged duplicate in the list
if (this.unacknowledgedDuplicates[index]) {
this.openDuplicate(this.unacknowledgedDuplicates[index]);
Expand All @@ -322,6 +305,7 @@ export default {
await axios.post(generateUrl(`/apps/duplicatefinder/api/duplicates/unacknowledge/${hash}`));
this.showSuccess(t('duplicatefinder', 'Duplicate unacknowledged successfully'));
this.fetchAllPages('acknowledged', 5);
// Move the duplicate from the acknowledged list to the unacknowledged list
const index = this.acknowledgedDuplicates.findIndex(dup => dup.id === this.currentDuplicateId);
Expand Down Expand Up @@ -397,8 +381,10 @@ export default {
let currentList = null;
if (this.unacknowledgedDuplicates.some(dup => dup.id === this.currentDuplicateId)) {
currentList = this.unacknowledgedDuplicates;
this.fetchAllPages('unacknowledged', 5);
} else if (this.acknowledgedDuplicates.some(dup => dup.id === this.currentDuplicateId)) {
currentList = this.acknowledgedDuplicates;
this.fetchAllPages('acknowledged', 5);
}
// If only one file remains for the current hash, remove the hash
Expand Down

0 comments on commit 9119422

Please sign in to comment.