From 4901092f136d4b9a51cc04727904e40682271c1f Mon Sep 17 00:00:00 2001 From: Nimantha Harshana Perera Date: Mon, 4 Dec 2017 12:14:12 +0530 Subject: [PATCH 1/2] Update extension.driver.php --- extension.driver.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extension.driver.php b/extension.driver.php index fea88f1..337e357 100644 --- a/extension.driver.php +++ b/extension.driver.php @@ -22,7 +22,7 @@ public function fetchNavigation() } } - public function update() + public function update($previousVersion = false) { if (file_exists(TMP.'/importcsv.csv')) { @unlink(TMP.'/importcsv.csv'); From 1d1a001401bb2e79f8745c199bfb88cab7852058 Mon Sep 17 00:00:00 2001 From: Nimantha Harshana Perera Date: Tue, 13 Nov 2018 18:24:19 +0530 Subject: [PATCH 2/2] Duplicate entries in the exported CSV fix By default, the CSV entries are sorted with core fetch function so it'll give duplicate entries when exported. This is a tricky fix (Might not be the best thing to do as I don't have a complete idea about the extension) --- content/content.index.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/content/content.index.php b/content/content.index.php index 4cde94a..8b2ac2b 100644 --- a/content/content.index.php +++ b/content/content.index.php @@ -389,7 +389,7 @@ private function __exportPage() for($offset = 0; $offset < $total; $offset += 100) { - $entries = EntryManager::fetch(null, $sectionID, 100, $offset, $where, $joins); + $entries = EntryManager::fetch(null, $sectionID, 100, $offset, $where, $joins, false, true, null, false); foreach ($entries as $entry) { $line = array(); foreach ($fields as $field) {