From 3e7354d31cea6b3273449a3963cf71b0e9fe6a6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 23 Sep 2014 08:51:08 +0200 Subject: [PATCH 001/180] Removed dead code. Fixed indentation (and obvious bug because of it). Removed some System.out logging, fixed other. --- build.xml | 2 +- src/plugins/Library/SpiderIndexUploader.java | 287 ++++++++++-------- .../Library/client/FreenetArchiver.java | 25 +- src/plugins/Library/ui/MainPage.java | 15 +- src/plugins/Library/util/BTreeMap.java | 4 - .../Library/util/SkeletonBTreeMap.java | 27 +- .../util/concurrent/ObjectProcessor.java | 1 - 7 files changed, 174 insertions(+), 187 deletions(-) diff --git a/build.xml b/build.xml index fa7a6670..f555e14f 100644 --- a/build.xml +++ b/build.xml @@ -14,7 +14,7 @@ - + diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 4ab7fa88..44e6a91f 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -81,23 +81,29 @@ public class SpiderIndexUploader { /** The temporary on-disk index. We merge stuff into this until it exceeds a threshold size, then * we create a new diskIdx and merge the old one into the idxFreenet. */ ProtoIndex idxDisk; + /** idxDisk gets merged into idxFreenet this long after the last merge completed. */ static final long MAX_TIME = 24*60*60*1000L; + /** idxDisk gets merged into idxFreenet after this many incoming updates from Spider. */ static final int MAX_UPDATES = 16; + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must * fit into memory during the merge process. */ static final int MAX_TERMS = 100*1000; - /** idxDisk gets merged into idxFreenet after it has grown to this many terms. - * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must - * fit into memory during the merge process. */ - static final int MAX_TERMS_NOT_UPLOADED = 10*1000; + + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. + * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must + * fit into memory during the merge process. */ + static final int MAX_TERMS_NOT_UPLOADED = 10*1000; + /** Maximum size of a single entry, in TermPageEntry count, on disk. If we exceed this we force an * insert-to-freenet and move on to a new disk index. The problem is that the merge to Freenet has * to keep the whole of each entry in RAM. This is only true for the data being merged in - the * on-disk index - and not for the data on Freenet, which is pulled on demand. SCALABILITY */ static final int MAX_DISK_ENTRY_SIZE = 10000; + /** Like pushNumber, the number of the current disk dir, used to create idxDiskDir. */ private int dirNumber; static final String DISK_DIR_PREFIX = "library-temp-index-"; @@ -276,7 +282,25 @@ private void maybeMergeToFreenet(MutableBoolean maxDiskEntrySizeExceeded) { final ProtoIndex diskToMerge = idxDisk; final File dir = idxDiskDir; - System.out.println("Exceeded threshold, starting new disk index and starting merge from disk to Freenet..."); + System.out.println("" + + idxDisk.ttab.size() + " terms in index, " + + mergedToDisk + " merges, " + + (lastMergedToFreenet <= 0 + ? "never merged to Freenet" + : ("last merged to Freenet "+TimeUtil.formatTime(System.currentTimeMillis() - lastMergedToFreenet)) + "ago")); + + System.out.print("Exceeded threshold for "); + if (lastMergedToFreenet > 0 && idxDisk.ttab.size() > MAX_TERMS) + System.out.print("terms, "); + if (idxDisk.ttab.size() > MAX_TERMS_NOT_UPLOADED) + System.out.print("not uploaded terms, "); + if (mergedToDisk > MAX_UPDATES) + System.out.print("updates, "); + if (termTooBig) + System.out.print("term too big, "); + if (lastMergedToFreenet > 0 && (System.currentTimeMillis() - lastMergedToFreenet) > MAX_TIME) + System.out.print("time since last merge, "); + System.out.println("starting new disk index and starting merge from disk to Freenet..."); mergedToDisk = 0; lastMergedToFreenet = -1; idxDisk = null; @@ -394,10 +418,9 @@ private Closure>, TaskAbortException> SkeletonBTreeSet tree = entry.getValue(); if(logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); if(tree != null) - System.out.println("Merging data (on disk) in term "+key); + Logger.debug(this, "Merging data (on disk) in term "+key); else - System.out.println("Adding new term to disk index: "+key); - //System.out.println("handling " + key + ((tree == null)? " (new)":" (old)")); + Logger.debug(this, "Adding new term to disk index: "+key); if (tree == null) { entry.setValue(tree = makeEntryTree(leafsrlDisk)); } @@ -412,7 +435,6 @@ private Closure>, TaskAbortException> newtrees.remove(key); assert(tree.isBare()); if(logMINOR) Logger.minor(this, "Updated: "+key+" : "+tree); - //System.out.println("handled " + key); } }; } @@ -588,7 +610,7 @@ protected void mergeToFreenet(File diskDir) { * @param diskDir The folder the on-disk index is stored in. */ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { - System.out.println("Merging on-disk index to Freenet: "+diskDir); + System.out.println("Merging on-disk index to Freenet: "+diskDir); if(lastUploadURI == null) { lastUploadURI = readURIFrom(new File(LAST_URL_FILENAME)); } @@ -635,16 +657,15 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { arch.waitForAsyncInserts(); long mergeEndTime = System.currentTimeMillis(); - System.out.print(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta + ", "); + System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); FreenetURI uri = (FreenetURI)task4.meta; lastUploadURI = uri; - System.out.println("Uploaded new index to "+uri); if(writeURITo(new File(LAST_URL_FILENAME), uri)) { - newtrees.deflate(); - diskToMerge = null; - terms = null; - System.out.println("Finished with disk index "+diskDir); - FileUtil.removeAll(diskDir); + newtrees.deflate(); + diskToMerge = null; + terms = null; + System.out.println("Finished with disk index "+diskDir); + FileUtil.removeAll(diskDir); } // Create the USK to redirect to the CHK at the top of the index. @@ -661,25 +682,25 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } private void uploadUSKForFreenetIndex(FreenetURI uri) { - FreenetURI privUSK = spiderIndexURIs.getPrivateUSK(); - try { - FreenetURI tmp = pr.getHLSimpleClient().insertRedirect(privUSK, uri); - long ed; - synchronized(freenetMergeSync) { - ed = spiderIndexURIs.setEdition(tmp.getEdition()+1); - } - System.out.println("Uploaded index as USK to "+tmp); - - writeStringTo(new File(EDITION_FILENAME), Long.toString(ed)); - - } catch (InsertException e) { - System.err.println("Failed to upload USK for index update: "+e); - e.printStackTrace(); - Logger.error(this, "Failed to upload USK for index update", e); - } - } + FreenetURI privUSK = spiderIndexURIs.getPrivateUSK(); + try { + FreenetURI tmp = pr.getHLSimpleClient().insertRedirect(privUSK, uri); + long ed; + synchronized(freenetMergeSync) { + ed = spiderIndexURIs.setEdition(tmp.getEdition()+1); + } + System.out.println("Uploaded index as USK to "+tmp); + + writeStringTo(new File(EDITION_FILENAME), Long.toString(ed)); + + } catch (InsertException e) { + System.err.println("Failed to upload USK for index update: "+e); + e.printStackTrace(); + Logger.error(this, "Failed to upload USK for index update", e); + } + } - /** Create a Closure which will merge the subtrees from one index (on disk) into the subtrees + /** Create a Closure which will merge the subtrees from one index (on disk) into the subtrees * of another index (on Freenet). It will be called with each subtree from the on-Freenet * index, and will merge data from the relevant on-disk subtree. Both subtrees are initially * deflated, and should be deflated when we leave the method, to avoid running out of memory. @@ -687,113 +708,111 @@ private void uploadUSKForFreenetIndex(FreenetURI uri) { * @return */ private Closure>, TaskAbortException> createMergeFromTreeClosure(final SkeletonBTreeMap> newtrees) { - return new - Closure>, TaskAbortException>() { - /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { - String key = entry.getKey(); - SkeletonBTreeSet tree = entry.getValue(); - if(logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); - //System.out.println("handling " + key + ((tree == null)? " (new)":" (old)")); - boolean newTree = false; - if (tree == null) { - entry.setValue(tree = makeEntryTree(leafsrl)); - newTree = true; - } - assert(tree.isBare()); - SortedSet data; - // Can't be run in parallel. - synchronized(inflateSync) { - newtrees.inflate(key, true); - SkeletonBTreeSet entries; - entries = newtrees.get(key); - // CONCURRENCY: Because the lower-level trees are packed by the top tree, the bottom - // trees (SkeletonBTreeSet's) are not independant of each other. When the newtrees - // inflate above runs, it can deflate a tree that is still in use by another instance - // of this callback. Therefore we must COPY IT AND DEFLATE IT INSIDE THE LOCK. - entries.inflate(); - data = new TreeSet(entries); - entries.deflate(); - assert(entries.isBare()); - } - if(tree != null) - - if(newTree) { - tree.addAll(data); - assert(tree.size() == data.size()); - System.out.println("Added data to Freenet for term "+key+" : "+data.size()); - } else { - int oldSize = tree.size(); - tree.update(data, null); - // Note that it is possible for data.size() + oldSize != tree.size(), because we might be merging data we've already merged. - // But most of the time it will add up. - System.out.println("Merged data to Freenet in term "+key+" : "+data.size()+" + "+oldSize+" -> "+tree.size()); - } - tree.deflate(); - assert(tree.isBare()); - if(logMINOR) Logger.minor(this, "Updated: "+key+" : "+tree); - //System.out.println("handled " + key); - } - }; - } + return new + Closure>, TaskAbortException>() { + /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { + String key = entry.getKey(); + SkeletonBTreeSet tree = entry.getValue(); + if (logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); + boolean newTree = false; + if (tree == null) { + entry.setValue(tree = makeEntryTree(leafsrl)); + newTree = true; + } + assert(tree.isBare()); + SortedSet data; + // Can't be run in parallel. + synchronized(inflateSync) { + newtrees.inflate(key, true); + SkeletonBTreeSet entries; + entries = newtrees.get(key); + // CONCURRENCY: Because the lower-level trees are packed by the top tree, the bottom + // trees (SkeletonBTreeSet's) are not independant of each other. When the newtrees + // inflate above runs, it can deflate a tree that is still in use by another instance + // of this callback. Therefore we must COPY IT AND DEFLATE IT INSIDE THE LOCK. + entries.inflate(); + data = new TreeSet(entries); + entries.deflate(); + assert(entries.isBare()); + } + if (tree != null) { + if (newTree) { + tree.addAll(data); + assert(tree.size() == data.size()); + Logger.debug(this, "Added data to Freenet for term "+key+" : "+data.size()); + } else { + int oldSize = tree.size(); + tree.update(data, null); + // Note that it is possible for data.size() + oldSize != tree.size(), because we might be merging data we've already merged. + // But most of the time it will add up. + Logger.debug(this, "Merged data to Freenet in term "+key+" : "+data.size()+" + "+oldSize+" -> "+tree.size()); + } + tree.deflate(); + assert(tree.isBare()); + if(logMINOR) Logger.minor(this, "Updated: "+key+" : "+tree); + } + } + }; + } - /** Update the overall metadata for the on-Freenet index from the on-disk index. */ + /** Update the overall metadata for the on-Freenet index from the on-disk index. */ private void updateOverallMetadata(ProtoIndex diskToMerge) { - idxFreenet.setName(diskToMerge.getName()); - idxFreenet.setOwnerEmail(diskToMerge.getOwnerEmail()); - idxFreenet.setOwner(diskToMerge.getOwner()); - // This is roughly accurate, it might not be exactly so if we process a bit out of order. - idxFreenet.setTotalPages(diskToMerge.getTotalPages() + Math.max(0,idxFreenet.getTotalPages())); - } + idxFreenet.setName(diskToMerge.getName()); + idxFreenet.setOwnerEmail(diskToMerge.getOwnerEmail()); + idxFreenet.setOwner(diskToMerge.getOwner()); + // This is roughly accurate, it might not be exactly so if we process a bit out of order. + idxFreenet.setTotalPages(diskToMerge.getTotalPages() + Math.max(0,idxFreenet.getTotalPages())); + } - /** Setup the serialisers for uploading to Freenet. These convert tree nodes to and from blocks + /** Setup the serialisers for uploading to Freenet. These convert tree nodes to and from blocks * on Freenet, essentially. */ private void makeFreenetSerialisers() { - if(srl == null) { - srl = ProtoIndexSerialiser.forIndex(lastUploadURI, RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS); - LiveArchiver,SimpleProgress> archiver = - (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); - leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); - if(lastUploadURI == null) { - try { - idxFreenet = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); - } catch (java.net.MalformedURLException e) { - throw new AssertionError(e); - } - // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. - leafsrl.setSerialiserFor(idxFreenet); - } else { - try { - PullTask pull = new PullTask(lastUploadURI); - System.out.println("Pulling previous index "+lastUploadURI+" so can update it."); - srl.pull(pull); - System.out.println("Pulled previous index "+lastUploadURI+" - updating..."); - idxFreenet = pull.data; - if(idxFreenet.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); - } catch (TaskAbortException e) { - Logger.error(this, "Failed to download previous index for spider update: "+e, e); - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - return; - } - } - } - } + if(srl == null) { + srl = ProtoIndexSerialiser.forIndex(lastUploadURI, RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS); + LiveArchiver,SimpleProgress> archiver = + (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); + leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); + if(lastUploadURI == null) { + try { + idxFreenet = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); + } catch (java.net.MalformedURLException e) { + throw new AssertionError(e); + } + // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. + leafsrl.setSerialiserFor(idxFreenet); + } else { + try { + PullTask pull = new PullTask(lastUploadURI); + System.out.println("Pulling previous index "+lastUploadURI+" so can update it."); + srl.pull(pull); + System.out.println("Pulled previous index "+lastUploadURI+" - updating..."); + idxFreenet = pull.data; + if(idxFreenet.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); + } catch (TaskAbortException e) { + Logger.error(this, "Failed to download previous index for spider update: "+e, e); + System.err.println("Failed to download previous index for spider update: "+e); + e.printStackTrace(); + synchronized(freenetMergeSync) { + pushBroken = true; + } + return; + } + } + } + } - /** Set up the on-disk cache, which keeps a copy of everything we upload to Freenet, so we + /** Set up the on-disk cache, which keeps a copy of everything we upload to Freenet, so we * won't need to re-download it, which can be very slow and doesn't always succeed. */ - private void setupFreenetCacheDir() { - if(FreenetArchiver.getCacheDir() == null) { - File dir = new File("library-spider-pushed-data-cache"); - dir.mkdir(); - FreenetArchiver.setCacheDir(dir); - } - } + private void setupFreenetCacheDir() { + if(FreenetArchiver.getCacheDir() == null) { + File dir = new File("library-spider-pushed-data-cache"); + dir.mkdir(); + FreenetArchiver.setCacheDir(dir); + } + } - protected static SkeletonBTreeSet makeEntryTree(ProtoIndexComponentSerialiser leafsrl) { + protected static SkeletonBTreeSet makeEntryTree(ProtoIndexComponentSerialiser leafsrl) { SkeletonBTreeSet tree = new SkeletonBTreeSet(ProtoIndex.BTREE_NODE_MIN); leafsrl.setSerialiserFor(tree); return tree; diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 102b2a43..e00b04aa 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -289,17 +289,8 @@ public FreenetArchiver(NodeC } InsertBlock ib = new InsertBlock(tempB, new ClientMetadata(default_mime), target); - System.out.println("Inserting block for FreenetArchiver..."); long startTime = System.currentTimeMillis(); - // code for async insert - maybe be useful elsewhere - //ClientContext cctx = core.clientContext; - //InsertContext ictx = hlsc.getInsertContext(true); - //PutWaiter pw = new PutWaiter(); - //ClientPutter pu = hlsc.insert(ib, false, null, false, ictx, pw); - //pu.setPriorityClass(RequestStarter.INTERACTIVE_PRIORITY_CLASS, cctx, null); - //FreenetURI uri = pw.waitForCompletion(); - // bookkeeping. detects bugs in the SplitfileProgressEvent handler ProgressParts prog_old = null; if(progress != null) @@ -440,7 +431,9 @@ public synchronized void setPutter(ClientPutter put) { synchronized(FreenetArchiver.this) { if(semiAsyncPushes.add(this)) totalBytesPushing += size; - System.out.println("Pushing "+totalBytesPushing+" bytes on "+semiAsyncPushes.size()+" inserters"); + System.out.println("Added insert of " + size + " bytes, now pushing: " + + semiAsyncPushes.size() + + " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); } } @@ -471,7 +464,9 @@ public synchronized Bucket getGeneratedMetadata() { @Override public void onFailure(InsertException e, BaseClientPutter state) { - System.out.println("Failed background insert ("+generatedURI+"), now running: "+semiAsyncPushes.size()+" ("+SizeUtil.formatSize(totalBytesPushing)+")."); + System.out.println("Failed background insert (" + generatedURI + "), now pushing: " + + semiAsyncPushes.size() + + " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); synchronized(this) { failed = e; notifyAll(); @@ -502,12 +497,14 @@ public void onSuccess(BaseClientPutter state) { synchronized(FreenetArchiver.this) { if(semiAsyncPushes.remove(this)) totalBytesPushing -= size; - System.out.println("Completed background insert ("+generatedURI+") in "+(System.currentTimeMillis()-startTime)+"ms, now running: "+semiAsyncPushes.size()+" ("+SizeUtil.formatSize(totalBytesPushing)+")."); + System.out.println("Completed background insert (" + generatedURI + ") in " + + (System.currentTimeMillis()-startTime) + "ms, now pushing: " + + semiAsyncPushes.size() + + " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); FreenetArchiver.this.notifyAll(); } if(ib != null) ib.free(); -// if(progress != null) progress.addPartKnown(0, true); } @@ -595,7 +592,7 @@ public void waitForAsyncInserts() throws TaskAbortException { System.out.println("Asynchronous inserts completed."); return; // Completed all pushes. } - System.out.println("Waiting for "+semiAsyncPushes.size()+" asynchronous inserts ("+SizeUtil.formatSize(totalBytesPushing)+")..."); + try { wait(); } catch (InterruptedException e) { diff --git a/src/plugins/Library/ui/MainPage.java b/src/plugins/Library/ui/MainPage.java index ddbb17c1..bf80afd6 100644 --- a/src/plugins/Library/ui/MainPage.java +++ b/src/plugins/Library/ui/MainPage.java @@ -147,22 +147,18 @@ public static MainPage processPostRequest(HTTPRequest request, HTMLNode contentN page.indexstring = ""; for (String bm : library.bookmarkKeys()){ String bmid = (Library.BOOKMARK_PREFIX + bm).trim(); - //Logger.normal(this, "checking for ~" + bm + " - "+request.isPartSet("~"+bm)); if(request.isPartSet("~"+bm)){ page.indexstring += bmid + " "; page.selectedBMIndexes.add(bmid); } } // Get other index list - //Logger.normal(page, "extra indexes : "+request.getIntPart("extraindexcount", 0)); for (int i = 0; i < request.getIntPart("extraindexcount", 0); i++) { if (request.isPartSet("index"+i)){ String otherindexuri = request.getPartAsStringFailsafe("index"+i, 256); - //Logger.normal(page, "Added index : "+otherindexuri); page.indexstring += otherindexuri + " "; page.selectedOtherIndexes.add(otherindexuri); - }//else - //Logger.normal(page, "other index #"+i+" unchecked"); + } } for (String string : etcIndexes) { if(string.length()>0){ @@ -285,10 +281,8 @@ public void writeContent(HTMLNode contentNode, MultiValueTable h if (search.isDone()) { if(search.hasGeneratedResultNode()){ contentNode.addChild(search.getHTMLNode()); - //Logger.normal(this, "Got pre generated result node."); }else try { - //Logger.normal(this, "Blocking to generate resultnode."); ResultNodeGenerator nodegenerator = new ResultNodeGenerator(search.getResult(), groupusk, showold, true); // js is being switch on always currently due to detection being off nodegenerator.run(); contentNode.addChild(nodegenerator.getPageEntryNode()); @@ -310,8 +304,6 @@ public void writeContent(HTMLNode contentNode, MultiValueTable h // refresh will GET so use a request id if (!js) { headers.put("Refresh", "2;url=" + refreshURL); - //contentNode.addChild("script", new String[]{"type", "src"}, new String[]{"text/javascript", path() + "static/" + (js ? "script.js" : "detect.js") + "?request="+search.hashCode()+(showold?"&showold=on":"")}).addChild("%", " "); - //contentNode.addChild("script", new String[]{"type", "src"}, new String[]{"text/javascript", path() + "static/" + (js ? "script.js" : "detect.js") + "?request="+search.hashCode()+(showold?"&showold=on":"")}).addChild("%", " "); } } }catch(TaskAbortException e) { @@ -351,10 +343,9 @@ private HTMLNode searchBox(){ // Shows the list of bookmarked indexes TODO show descriptions on mouseover ?? HTMLNode indexeslist = searchBox.addChild("ul", "class", "index-bookmark-list", "Select indexes"); for (String bm : library.bookmarkKeys()){ - //Logger.normal(this, "Checking for bm="+Library.BOOKMARK_PREFIX+bm+" in \""+indexuri + " = " + selectedBMIndexes.contains(Library.BOOKMARK_PREFIX+bm)+" "+indexuri.contains(Library.BOOKMARK_PREFIX+bm)); HTMLNode bmItem = indexeslist.addChild("li"); - bmItem.addChild("input", new String[]{"name", "type", "value", "title", (selectedBMIndexes.contains(Library.BOOKMARK_PREFIX+bm) ? "checked" : "size" )}, new String[]{"~"+bm, "checkbox", Library.BOOKMARK_PREFIX+bm, "Index uri : "+library.getBookmark(bm), "1" } , bm); - bmItem.addChild("input", new String[]{"name", "type", "value", "title", "class"}, new String[]{Commands.removebookmark+bm, "submit", "X", "Delete this bookmark", "index-bookmark-delete" }); + bmItem.addChild("input", new String[]{"name", "type", "value", "title", (selectedBMIndexes.contains(Library.BOOKMARK_PREFIX+bm) ? "checked" : "size" )}, new String[]{"~"+bm, "checkbox", Library.BOOKMARK_PREFIX+bm, "Index uri : "+library.getBookmark(bm), "1" } , bm); + bmItem.addChild("input", new String[]{"name", "type", "value", "title", "class"}, new String[]{Commands.removebookmark+bm, "submit", "X", "Delete this bookmark", "index-bookmark-delete" }); } int i=0; for (String uri : selectedOtherIndexes) { diff --git a/src/plugins/Library/util/BTreeMap.java b/src/plugins/Library/util/BTreeMap.java index 11b21aeb..94bb7c43 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/src/plugins/Library/util/BTreeMap.java @@ -953,10 +953,6 @@ final void verifyNodeIntegrity(Node node) { verify(node.nodeSize() + 1 == node.rnodes.size()); verify(node.nodeSize() + 1 == node.lnodes.size()); } - /* DEBUG if (node._size > 0 && node._size != s) { - System.out.println(node._size + " vs " + s); - System.out.println(node.toTreeString("\t")); - }*/ verify(node._size < 0 || node._size == s); verify(node.nodeSize() <= ENT_MAX); diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 84628770..5c43966d 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -579,8 +579,6 @@ public interface SkeletonMap new LinkedBlockingQueue, TaskAbortException>>(0x10), new HashMap, SkeletonNode>() ); - //System.out.println("Using scheduler"); - //int DEBUG_pushed = 0, DEBUG_popped = 0; try { nodequeue.add((SkeletonNode)root); @@ -589,8 +587,6 @@ public interface SkeletonMap // operation fails do { - //System.out.println("pushed: " + DEBUG_pushed + "; popped: " + DEBUG_popped); - // handle the inflated tasks and attach them to the tree. // THREAD progress tracker should prevent this from being run twice for the // same node, but what if we didn't use a progress tracker? hmm... @@ -654,8 +650,6 @@ public interface SkeletonMap throw new TaskAbortException("interrupted", e); } finally { proc_pull.close(); - //System.out.println("pushed: " + DEBUG_pushed + "; popped: " + DEBUG_popped); - //assert(DEBUG_pushed == DEBUG_popped); } } @@ -1111,7 +1105,6 @@ public void run() { reassignKeyToSweeper(key, parVClo); } - //System.out.println("parent:"+parent.getRange()+"\nseps:"+keys+"\nheld:"+held); parNClo.open(); // for each split-node, create a sweeper that will run when all its (k,v) @@ -1122,12 +1115,7 @@ public void run() { // reassign appropriate keys to the split-node's sweeper SortedSet subheld = subSet(held, n.lkey, n.rkey); - //try { assert(subheld.isEmpty() || compareL(n.lkey, subheld.first()) < 0 && compareR(subheld.last(), n.rkey) < 0); - //} catch (AssertionError e) { - // System.out.println(n.lkey + " " + subheld.first() + " " + subheld.last() + " " + n.rkey); - // throw e; - //} for (K key: subheld) { reassignKeyToSweeper(key, vClo); } @@ -1336,16 +1324,15 @@ private void handleLocalRemove(SkeletonNode n, K key, TrackingSweeper 10)) { count = 0; -// if(ccount++ > 10) { - System.out.println(/*System.identityHashCode(this) + " " + */proc_val + " " + proc_pull + " " + proc_push+ " "+proc_deflate); -// ccount = 0; -// } + Logger.debug(this, + "SkeletonBTreeMap update " + + proc_val + " " + + proc_pull + " " + + proc_push + " " + + proc_deflate); notifier.waitUpdate(1000); } progress = false; @@ -1369,8 +1356,6 @@ private void handleLocalRemove(SkeletonNode n, K key, TrackingSweeper res = proc_deflate.accept(); DeflateNode sw = res._0; diff --git a/src/plugins/Library/util/concurrent/ObjectProcessor.java b/src/plugins/Library/util/concurrent/ObjectProcessor.java index 7ad7907a..9899da0a 100644 --- a/src/plugins/Library/util/concurrent/ObjectProcessor.java +++ b/src/plugins/Library/util/concurrent/ObjectProcessor.java @@ -328,7 +328,6 @@ private static synchronized void ensureAutoHandler() { } catch (InterruptedException e) { // TODO LOW log this somewhere } - // System.out.println("pending " + pending.size()); if (t > 0) { continue; } synchronized (ObjectProcessor.class) { From bca4c8fd24ec8557b8d8cf2f4c890c8db9af339f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 23 Sep 2014 09:07:41 +0200 Subject: [PATCH 002/180] More code cleanups. --- src/plugins/Library/Library.java | 144 +-------------------------- src/plugins/Library/ui/MainPage.java | 14 +-- 2 files changed, 10 insertions(+), 148 deletions(-) diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 6ce83230..df933cc8 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -241,16 +241,6 @@ public synchronized void saveState(){ * search for multiple terms in the same btree, but for now, turning off caching is the only viable option. */ -// /** -// ** Holds all the read-indexes. -// */ -// private Map rtab = new HashMap(); -// -// /** -// ** Holds all the writeable indexes. -// */ -// private Map wtab = new HashMap(); -// /** ** Holds all the bookmarks (aliases into the rtab). */ @@ -329,10 +319,10 @@ public Class getIndexType(FreenetURI indexuri) throws FetchException { public Class getIndexTypeFromMIME(String mime) { if (mime.equals(ProtoIndex.MIME_TYPE)) { - //return "YAML index"; + // YAML index return ProtoIndex.class; } else if (mime.equals(XMLIndex.MIME_TYPE)) { - //return "XML index"; + // XML index return XMLIndex.class; } else { throw new UnsupportedOperationException("Unknown mime-type for index: "+mime); @@ -340,103 +330,6 @@ public Class getIndexTypeFromMIME(String mime) { } -/* - KEYEXPLORER slightly more efficient version that depends on KeyExplorer - - /** - ** Get the index type giving a {@code FreenetURI}. This should have been - ** passed through {@link KeyExplorerUtils#sanitizeURI(List, String)} at - ** some point - ie. it must not contain a metastring (end with "/") or be - ** a USK. - * / - public Class getIndexType(FreenetURI uri) - throws FetchException, IOException, MetadataParseException, LowLevelGetException, KeyListenerConstructionException { - GetResult getresult = KeyExplorerUtils.simpleGet(pr, uri); - byte[] data = BucketTools.toByteArray(getresult.getData()); - - if (getresult.isMetaData()) { - try { - Metadata md = Metadata.construct(data); - - if (md.isArchiveManifest()) { - if (md.getArchiveType() == ARCHIVE_TYPE.TAR) { - return getIndexTypeFromManifest(uri, false, true); - - } else if (md.getArchiveType() == ARCHIVE_TYPE.ZIP) { - return getIndexTypeFromManifest(uri, true, false); - - } else { - throw new UnsupportedOperationException("not implemented - unknown archive manifest"); - } - - } else if (md.isSimpleManifest()) { - return getIndexTypeFromManifest(uri, false, false); - } - - return getIndexTypeFromSimpleMetadata(md); - - } catch (MetadataParseException e) { - throw new RuntimeException(e); - } - } else { - throw new UnsupportedOperationException("Found data instead of metadata; I do not have enough intelligence to decode this."); - } - } - - public Class getIndexTypeFromSimpleMetadata(Metadata md) { - String mime = md.getMIMEType(); - if (mime.equals(ProtoIndex.MIME_TYPE)) { - //return "YAML index"; - return ProtoIndex.class; - } else if (mime.equals(XMLIndex.MIME_TYPE)) { - //return "XML index"; - return XMLIndex.class; - } else { - throw new UnsupportedOperationException("Unknown mime-type for index"); - } - } - - public Class getIndexTypeFromManifest(FreenetURI furi, boolean zip, boolean tar) - throws FetchException, IOException, MetadataParseException, LowLevelGetException, KeyListenerConstructionException { - - boolean automf = true, deep = true, ml = true; - Metadata md = null; - - if (zip) - md = KeyExplorerUtils.zipManifestGet(pr, furi); - else if (tar) - md = KeyExplorerUtils.tarManifestGet(pr, furi, ".metadata"); - else { - md = KeyExplorerUtils.simpleManifestGet(pr, furi); - if (ml) { - md = KeyExplorerUtils.splitManifestGet(pr, md); - } - } - - if (md.isSimpleManifest()) { - // a subdir - HashMap docs = md.getDocuments(); - Metadata defaultDoc = md.getDefaultDocument(); - - if (defaultDoc != null) { - //return "(default doc method) " + getIndexTypeFromSimpleMetadata(defaultDoc); - return getIndexTypeFromSimpleMetadata(defaultDoc); - } - - if (docs.containsKey(ProtoIndex.DEFAULT_FILE)) { - //return "(doclist method) YAML index"; - return ProtoIndex.class; - } else if (docs.containsKey(XMLIndex.DEFAULT_FILE)) { - //return "(doclist method) XML index"; - return XMLIndex.class; - } else { - throw new UnsupportedOperationException("Could not find a supported index in the document-listings for " + furi.toString()); - } - } - - throw new UnsupportedOperationException("Parsed metadata but did not reach a simple manifest: " + furi.toString()); - } -*/ public Class getIndexType(File f) { if (f.getName().endsWith(ProtoIndexSerialiser.FILE_EXTENSION)) return ProtoIndex.class; @@ -452,8 +345,6 @@ public Object getAddressTypeFromString(String indexuri) { // OPT HIGH if it already ends with eg. *Index.DEFAULT_FILE, don't strip // the MetaString, and have getIndexType behave accordingly FreenetURI tempURI = new FreenetURI(indexuri); -// if (tempURI.hasMetaStrings()) { tempURI = tempURI.setMetaString(null); } -// if (tempURI.isUSK()) { tempURI = tempURI.sskForUSK(); } return tempURI; } catch (MalformedURLException e) { File file = new File(indexuri); @@ -606,14 +497,6 @@ public final ArrayList getIndices(String indexuris) throws InvalidSearchE return indices; } - // See comments near rtab. Can't use in parallel so not acceptable. -// /** -// * Method to get all of the instatiated Indexes -// */ -// public final Iterable getAllIndices() { -// return rtab.values(); -// } -// public final Index getIndex(String indexuri) throws InvalidSearchException, TaskAbortException { return getIndex(indexuri, null); } @@ -644,10 +527,6 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali throw new InvalidSearchException("Index bookmark '"+indexuri+" does not exist"); } - // See comments near rtab. Can't use in parallel so caching is dangerous. -// if (rtab.containsKey(indexuri)) -// return rtab.get(indexuri); -// Class indextype; Index index; Object indexkey; @@ -686,33 +565,16 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali throw new AssertionError(); } - // See comments near rtab. Can't use in parallel so caching is dangerous. - //rtab.put(indexuri, index); Logger.normal(this, "Loaded index type " + indextype.getName() + " at " + indexuri); return index; } catch (FetchException e) { - throw new TaskAbortException("Failed to fetch index " + indexuri+" : "+e, e, true); // can retry -/* KEYEXPLORER - } catch (IOException e) { - throw new TaskAbortException("Failed to fetch index " + indexuri, e, true); // can retry - - } catch (LowLevelGetException e) { - throw new TaskAbortException("Failed to fetch index " + indexuri, e, true); // can retry - - } catch (KeyListenerConstructionException e) { - throw new TaskAbortException("Failed to fetch index " + indexuri, e, true); // can retry - - } catch (MetadataParseException e) { - throw new TaskAbortException("Failed to parse index " + indexuri, e); -*/ + throw new TaskAbortException("Failed to fetch index " + indexuri+" : "+e, e, true); } catch (UnsupportedOperationException e) { throw new TaskAbortException("Failed to parse index " + indexuri+" : "+e, e); - } catch (RuntimeException e) { throw new TaskAbortException("Failed to load index " + indexuri+" : "+e, e); - } } diff --git a/src/plugins/Library/ui/MainPage.java b/src/plugins/Library/ui/MainPage.java index bf80afd6..fb683fe6 100644 --- a/src/plugins/Library/ui/MainPage.java +++ b/src/plugins/Library/ui/MainPage.java @@ -342,7 +342,7 @@ private HTMLNode searchBox(){ searchBox.addChild("input", new String[]{"type","name"}, new String[]{"hidden","js"}); // Shows the list of bookmarked indexes TODO show descriptions on mouseover ?? HTMLNode indexeslist = searchBox.addChild("ul", "class", "index-bookmark-list", "Select indexes"); - for (String bm : library.bookmarkKeys()){ + for (String bm : library.bookmarkKeys()) { HTMLNode bmItem = indexeslist.addChild("li"); bmItem.addChild("input", new String[]{"name", "type", "value", "title", (selectedBMIndexes.contains(Library.BOOKMARK_PREFIX+bm) ? "checked" : "size" )}, new String[]{"~"+bm, "checkbox", Library.BOOKMARK_PREFIX+bm, "Index uri : "+library.getBookmark(bm), "1" } , bm); bmItem.addChild("input", new String[]{"name", "type", "value", "title", "class"}, new String[]{Commands.removebookmark+bm, "submit", "X", "Delete this bookmark", "index-bookmark-delete" }); @@ -351,9 +351,9 @@ private HTMLNode searchBox(){ for (String uri : selectedOtherIndexes) { HTMLNode removeItem = indexeslist.addChild("li"); String showuri; - try{ + try { showuri = (new FreenetURI(uri)).toShortString(); - }catch(MalformedURLException e){ + } catch (MalformedURLException e) { showuri = uri; } removeItem.addChild("input", new String[]{"type", "name", "value", "checked"}, new String[]{"checkbox", "index"+i, uri, "checked", } , showuri); @@ -434,7 +434,7 @@ public static void addError(HTMLNode node, Throwable error, StringBuilder messag error1.addChild("br"); error1.addChild("#", " -- "+ste.toString()); } - if(error.getCause()!=null) + if (error.getCause()!=null) addError(error1, error.getCause(), messages); } } @@ -447,17 +447,17 @@ public static void addError(HTMLNode node, Throwable error, StringBuilder messag */ public static HTMLNode progressBar(Progress progress, boolean canFail) throws TaskAbortException { synchronized (progress){ - if( progress instanceof CompositeProgress && ((CompositeProgress) progress).getSubProgress()!=null && ((CompositeProgress) progress).getSubProgress().iterator().hasNext()){ + if (progress instanceof CompositeProgress && ((CompositeProgress) progress).getSubProgress()!=null && ((CompositeProgress) progress).getSubProgress().iterator().hasNext()){ // Put together progress bars for all the subProgress HTMLNode block = new HTMLNode("#"); block.addChild("tr").addChild("td", "colspan", "6", progress.getSubject() + " : "+progress.getStatus()); TaskAbortException firstError = null; boolean anySuccess = false; - if(canFail && progress instanceof Search) { + if (canFail && progress instanceof Search) { if(!(((Search)progress).innerCanFailAndStillComplete())) canFail = false; } else canFail = false; - if(((CompositeProgress) progress).getSubProgress() != null) + if (((CompositeProgress) progress).getSubProgress() != null) for (Progress progress1 : ((CompositeProgress) progress).getSubProgress()) { try { block.addChild(progressBar(progress1, canFail)); From 2fec59b710f76ac43703e33006d10e5fdeee207f Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:00:57 +0000 Subject: [PATCH 003/180] Setting up the structure. Doing the first refacturing. --HG-- branch : eclipse-separation --- .classpath | 3 +-- README | 12 ++++++++++++ shared/.classpath | 8 ++++++++ shared/.project | 17 +++++++++++++++++ .../library}/util/exec/AbstractExecution.java | 2 +- .../util/exec/BaseCompositeProgress.java | 2 +- .../library}/util/exec/ChainedProgress.java | 2 +- .../library}/util/exec/CompositeProgress.java | 2 +- .../freenet/library}/util/exec/Execution.java | 2 +- .../library}/util/exec/ExecutionAcceptor.java | 2 +- .../freenet/library}/util/exec/Progress.java | 2 +- .../library}/util/exec/ProgressParts.java | 2 +- .../library}/util/exec/SimpleProgress.java | 2 +- .../library}/util/exec/TaskAbortException.java | 2 +- .../util/exec/TaskCompleteException.java | 2 +- .../util/exec/TaskInProgressException.java | 2 +- .../library}/util/exec/package-info.java | 2 +- src/plugins/Library/Index.java | 2 +- src/plugins/Library/Library.java | 2 +- src/plugins/Library/Main.java | 4 ++-- src/plugins/Library/SpiderIndexUploader.java | 4 ++-- src/plugins/Library/VirtualIndex.java | 2 +- src/plugins/Library/WriteableIndex.java | 2 +- src/plugins/Library/client/FreenetArchiver.java | 6 +++--- src/plugins/Library/index/ProtoIndex.java | 12 ++++++------ .../index/ProtoIndexComponentSerialiser.java | 12 ++++++------ .../Library/index/ProtoIndexSerialiser.java | 4 ++-- src/plugins/Library/index/xml/FindRequest.java | 12 ++++++------ src/plugins/Library/index/xml/XMLIndex.java | 4 ++-- src/plugins/Library/io/serial/Archiver.java | 3 ++- src/plugins/Library/io/serial/FileArchiver.java | 5 +++-- .../Library/io/serial/IterableSerialiser.java | 2 +- src/plugins/Library/io/serial/LiveArchiver.java | 4 ++-- .../Library/io/serial/MapSerialiser.java | 3 ++- src/plugins/Library/io/serial/Packer.java | 4 ++-- .../Library/io/serial/ParallelSerialiser.java | 9 +++++---- .../Library/io/serial/ProgressTracker.java | 5 +++-- .../Library/io/serial/ScheduledSerialiser.java | 3 ++- src/plugins/Library/io/serial/Serialiser.java | 3 ++- src/plugins/Library/search/ResultSet.java | 4 ++-- src/plugins/Library/search/Search.java | 14 +++++++------- src/plugins/Library/ui/MainPage.java | 10 +++++----- src/plugins/Library/ui/TestInterface.java | 2 +- src/plugins/Library/util/Skeleton.java | 2 +- src/plugins/Library/util/SkeletonBTreeMap.java | 13 ++++++------- src/plugins/Library/util/SkeletonBTreeSet.java | 5 +++-- src/plugins/Library/util/SkeletonMap.java | 3 ++- src/plugins/Library/util/SkeletonTreeMap.java | 4 ++-- .../util/TaskAbortExceptionConvertor.java | 2 +- test/plugins/Library/Tester.java | 3 +++ test/plugins/Library/index/BIndexTest.java | 2 ++ test/plugins/Library/index/TermEntryTest.java | 2 +- test/plugins/Library/io/serial/PackerTest.java | 3 ++- uploader/.classpath | 8 ++++++++ uploader/.project | 17 +++++++++++++++++ 55 files changed, 171 insertions(+), 96 deletions(-) create mode 100644 shared/.classpath create mode 100644 shared/.project rename {src/plugins/Library => shared/src/freenet/library}/util/exec/AbstractExecution.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/BaseCompositeProgress.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/ChainedProgress.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/CompositeProgress.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/Execution.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/ExecutionAcceptor.java (93%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/Progress.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/ProgressParts.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/SimpleProgress.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/TaskAbortException.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/TaskCompleteException.java (93%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/TaskInProgressException.java (96%) rename {src/plugins/Library => shared/src/freenet/library}/util/exec/package-info.java (89%) create mode 100644 uploader/.classpath create mode 100644 uploader/.project diff --git a/.classpath b/.classpath index 97403d05..24982e93 100644 --- a/.classpath +++ b/.classpath @@ -4,8 +4,7 @@ - - + diff --git a/README b/README index 6da0a190..c1cdeaee 100644 --- a/README +++ b/README @@ -56,3 +56,15 @@ this may change soon : +== Ongoing work to split == + +The plugin is in src, test (for historical reasons). + +The uploader is in uploader/src and uploader/test depending on fcp and +shared parts. + +The shared parts are in shared/src and shared/test. shared means shared +between the plugin and the uploader. + +Plan: These are three separate eclipse project so eclipse will help +monitoring the dependencies. diff --git a/shared/.classpath b/shared/.classpath new file mode 100644 index 00000000..1ba3543a --- /dev/null +++ b/shared/.classpath @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/shared/.project b/shared/.project new file mode 100644 index 00000000..9d309433 --- /dev/null +++ b/shared/.project @@ -0,0 +1,17 @@ + + + library-shared + + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.core.javanature + + diff --git a/src/plugins/Library/util/exec/AbstractExecution.java b/shared/src/freenet/library/util/exec/AbstractExecution.java similarity index 99% rename from src/plugins/Library/util/exec/AbstractExecution.java rename to shared/src/freenet/library/util/exec/AbstractExecution.java index 7b8a4c0f..137d0579 100644 --- a/src/plugins/Library/util/exec/AbstractExecution.java +++ b/shared/src/freenet/library/util/exec/AbstractExecution.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; import java.util.Date; import java.util.Set; diff --git a/src/plugins/Library/util/exec/BaseCompositeProgress.java b/shared/src/freenet/library/util/exec/BaseCompositeProgress.java similarity index 99% rename from src/plugins/Library/util/exec/BaseCompositeProgress.java rename to shared/src/freenet/library/util/exec/BaseCompositeProgress.java index 050f3f02..a2c98fb6 100644 --- a/src/plugins/Library/util/exec/BaseCompositeProgress.java +++ b/shared/src/freenet/library/util/exec/BaseCompositeProgress.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** A progress that accumulates its data from the given group of progresses. diff --git a/src/plugins/Library/util/exec/ChainedProgress.java b/shared/src/freenet/library/util/exec/ChainedProgress.java similarity index 97% rename from src/plugins/Library/util/exec/ChainedProgress.java rename to shared/src/freenet/library/util/exec/ChainedProgress.java index 2529064d..8c9e2ed5 100644 --- a/src/plugins/Library/util/exec/ChainedProgress.java +++ b/shared/src/freenet/library/util/exec/ChainedProgress.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Interface representing a {@link Progress} made up a sequence of progresses, diff --git a/src/plugins/Library/util/exec/CompositeProgress.java b/shared/src/freenet/library/util/exec/CompositeProgress.java similarity index 97% rename from src/plugins/Library/util/exec/CompositeProgress.java rename to shared/src/freenet/library/util/exec/CompositeProgress.java index 37353c05..791d025e 100644 --- a/src/plugins/Library/util/exec/CompositeProgress.java +++ b/shared/src/freenet/library/util/exec/CompositeProgress.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Interface representing a {@link Progress} made up of several subprogresses, diff --git a/src/plugins/Library/util/exec/Execution.java b/shared/src/freenet/library/util/exec/Execution.java similarity index 98% rename from src/plugins/Library/util/exec/Execution.java rename to shared/src/freenet/library/util/exec/Execution.java index aee8b54f..ab62d379 100644 --- a/src/plugins/Library/util/exec/Execution.java +++ b/shared/src/freenet/library/util/exec/Execution.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; import java.util.Date; //import java.util.concurrent.Future; diff --git a/src/plugins/Library/util/exec/ExecutionAcceptor.java b/shared/src/freenet/library/util/exec/ExecutionAcceptor.java similarity index 93% rename from src/plugins/Library/util/exec/ExecutionAcceptor.java rename to shared/src/freenet/library/util/exec/ExecutionAcceptor.java index 1bb7b3e0..bcbd1189 100644 --- a/src/plugins/Library/util/exec/ExecutionAcceptor.java +++ b/shared/src/freenet/library/util/exec/ExecutionAcceptor.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Accepts state-changes on an execution. diff --git a/src/plugins/Library/util/exec/Progress.java b/shared/src/freenet/library/util/exec/Progress.java similarity index 97% rename from src/plugins/Library/util/exec/Progress.java rename to shared/src/freenet/library/util/exec/Progress.java index 95110c10..d970210d 100644 --- a/src/plugins/Library/util/exec/Progress.java +++ b/shared/src/freenet/library/util/exec/Progress.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** An abstraction of the progress of a task. diff --git a/src/plugins/Library/util/exec/ProgressParts.java b/shared/src/freenet/library/util/exec/ProgressParts.java similarity index 99% rename from src/plugins/Library/util/exec/ProgressParts.java rename to shared/src/freenet/library/util/exec/ProgressParts.java index f1438d02..c4915b9a 100644 --- a/src/plugins/Library/util/exec/ProgressParts.java +++ b/shared/src/freenet/library/util/exec/ProgressParts.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; import java.util.Formatter; diff --git a/src/plugins/Library/util/exec/SimpleProgress.java b/shared/src/freenet/library/util/exec/SimpleProgress.java similarity index 99% rename from src/plugins/Library/util/exec/SimpleProgress.java rename to shared/src/freenet/library/util/exec/SimpleProgress.java index de1f7426..17f6d537 100644 --- a/src/plugins/Library/util/exec/SimpleProgress.java +++ b/shared/src/freenet/library/util/exec/SimpleProgress.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Basic progress implementation. The number of parts known is implicily equal diff --git a/src/plugins/Library/util/exec/TaskAbortException.java b/shared/src/freenet/library/util/exec/TaskAbortException.java similarity index 98% rename from src/plugins/Library/util/exec/TaskAbortException.java rename to shared/src/freenet/library/util/exec/TaskAbortException.java index 4100fd8b..0a00edff 100644 --- a/src/plugins/Library/util/exec/TaskAbortException.java +++ b/shared/src/freenet/library/util/exec/TaskAbortException.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Thrown when a task aborts. DOCUMENT diff --git a/src/plugins/Library/util/exec/TaskCompleteException.java b/shared/src/freenet/library/util/exec/TaskCompleteException.java similarity index 93% rename from src/plugins/Library/util/exec/TaskCompleteException.java rename to shared/src/freenet/library/util/exec/TaskCompleteException.java index b81de820..3dfd9111 100644 --- a/src/plugins/Library/util/exec/TaskCompleteException.java +++ b/shared/src/freenet/library/util/exec/TaskCompleteException.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Thrown when a task aborts due to the task already having been done, eg. by diff --git a/src/plugins/Library/util/exec/TaskInProgressException.java b/shared/src/freenet/library/util/exec/TaskInProgressException.java similarity index 96% rename from src/plugins/Library/util/exec/TaskInProgressException.java rename to shared/src/freenet/library/util/exec/TaskInProgressException.java index 5ec800b1..a15805ad 100644 --- a/src/plugins/Library/util/exec/TaskInProgressException.java +++ b/shared/src/freenet/library/util/exec/TaskInProgressException.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.exec; +package freenet.library.util.exec; /** ** Thrown when a task is already in progress elsewhere. diff --git a/src/plugins/Library/util/exec/package-info.java b/shared/src/freenet/library/util/exec/package-info.java similarity index 89% rename from src/plugins/Library/util/exec/package-info.java rename to shared/src/freenet/library/util/exec/package-info.java index 24a5a855..0bf1883d 100644 --- a/src/plugins/Library/util/exec/package-info.java +++ b/shared/src/freenet/library/util/exec/package-info.java @@ -6,4 +6,4 @@ ** ** @author infinity0 */ -package plugins.Library.util.exec; +package freenet.library.util.exec; diff --git a/src/plugins/Library/Index.java b/src/plugins/Library/Index.java index 196ffb1a..e76ffe6a 100644 --- a/src/plugins/Library/Index.java +++ b/src/plugins/Library/Index.java @@ -5,9 +5,9 @@ import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; -import plugins.Library.util.exec.Execution; import freenet.keys.FreenetURI; +import freenet.library.util.exec.Execution; import java.util.Set; diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index df933cc8..5b8a84c4 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -26,7 +26,6 @@ import plugins.Library.io.ObjectStreamWriter; import plugins.Library.io.serial.Serialiser.PullTask; import plugins.Library.search.InvalidSearchException; -import plugins.Library.util.exec.TaskAbortException; import freenet.client.FetchContext; import freenet.client.FetchException; @@ -46,6 +45,7 @@ import freenet.client.events.ExpectedMIMEEvent; import freenet.keys.FreenetURI; import freenet.keys.USK; +import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; import freenet.node.RequestClient; import freenet.node.RequestStarter; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 600cb330..ba52dcbf 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -37,8 +37,6 @@ import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.TaskAbortExceptionConvertor; import plugins.Library.util.concurrent.Executors; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.util.func.Closure; import freenet.pluginmanager.FredPlugin; @@ -53,6 +51,8 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; import freenet.pluginmanager.FredPluginFCP; import freenet.support.Logger; diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 44e6a91f..1e1a8b64 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -33,11 +33,11 @@ import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.TaskAbortExceptionConvertor; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.util.func.Closure; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; import freenet.node.RequestStarter; import freenet.pluginmanager.PluginNotFoundException; import freenet.pluginmanager.PluginReplySender; diff --git a/src/plugins/Library/VirtualIndex.java b/src/plugins/Library/VirtualIndex.java index 9fae7c91..9648fef4 100644 --- a/src/plugins/Library/VirtualIndex.java +++ b/src/plugins/Library/VirtualIndex.java @@ -3,9 +3,9 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library; +import freenet.library.util.exec.Execution; import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; -import plugins.Library.util.exec.Execution; /** ** Represents a virtual index that gets its data from another plugin. diff --git a/src/plugins/Library/WriteableIndex.java b/src/plugins/Library/WriteableIndex.java index 5debe962..158656f1 100644 --- a/src/plugins/Library/WriteableIndex.java +++ b/src/plugins/Library/WriteableIndex.java @@ -5,9 +5,9 @@ import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; -import plugins.Library.util.exec.Execution; import freenet.keys.FreenetURI; +import freenet.library.util.exec.Execution; import java.util.Collection; import java.util.Set; diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index e00b04aa..fabd1744 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -14,9 +14,6 @@ import plugins.Library.io.ObjectStreamReader; import plugins.Library.io.ObjectStreamWriter; import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; import freenet.client.ClientMetadata; import freenet.client.FetchException; @@ -37,6 +34,9 @@ import freenet.crypt.SHA256; import freenet.keys.CHKBlock; import freenet.keys.FreenetURI; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; import freenet.node.RequestClient; import freenet.node.RequestStarter; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 002e03bc..c9f9e3a6 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -11,15 +11,15 @@ import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.DataNotLoadedException; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.ChainedProgress; -import plugins.Library.util.exec.Execution; -import plugins.Library.util.exec.AbstractExecution; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.util.concurrent.Executors; import freenet.keys.FreenetURI; +import freenet.library.util.exec.AbstractExecution; +import freenet.library.util.exec.ChainedProgress; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.TaskAbortException; import freenet.support.Logger; import java.util.AbstractSet; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 73d6f4f4..0ca61866 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -8,12 +8,6 @@ import plugins.Library.util.SkeletonTreeMap; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.BaseCompositeProgress; -import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.util.exec.TaskInProgressException; import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.Serialiser; import plugins.Library.io.serial.Translator; @@ -30,6 +24,12 @@ import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.util.exec.BaseCompositeProgress; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.exec.TaskInProgressException; import freenet.node.RequestStarter; import java.io.File; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index 09c1a10a..5b4c7f5b 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -7,8 +7,6 @@ import plugins.Library.client.FreenetArchiver; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.LiveArchiver; import plugins.Library.io.serial.Serialiser; @@ -19,6 +17,8 @@ import plugins.Library.io.DataFormatException; import freenet.keys.FreenetURI; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; import java.util.Collection; import java.util.Set; diff --git a/src/plugins/Library/index/xml/FindRequest.java b/src/plugins/Library/index/xml/FindRequest.java index 74550fcb..563946ce 100644 --- a/src/plugins/Library/index/xml/FindRequest.java +++ b/src/plugins/Library/index/xml/FindRequest.java @@ -10,18 +10,18 @@ import freenet.client.events.SendingToNetworkEvent; import freenet.client.events.SplitfileCompatibilityModeEvent; import freenet.client.events.SplitfileProgressEvent; +import freenet.library.util.exec.AbstractExecution; +import freenet.library.util.exec.ChainedProgress; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.TaskAbortException; import freenet.support.Logger; import java.util.Collections; import java.util.ArrayList; import java.util.Set; -import plugins.Library.util.exec.AbstractExecution; -import plugins.Library.util.exec.ChainedProgress; -import plugins.Library.util.exec.Execution; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.index.TermPageEntry; import plugins.Library.index.TermEntry; diff --git a/src/plugins/Library/index/xml/XMLIndex.java b/src/plugins/Library/index/xml/XMLIndex.java index b3851397..afa4b569 100644 --- a/src/plugins/Library/index/xml/XMLIndex.java +++ b/src/plugins/Library/index/xml/XMLIndex.java @@ -9,8 +9,6 @@ import plugins.Library.index.TermPageEntry; import plugins.Library.index.URIEntry; import plugins.Library.search.InvalidSearchException; -import plugins.Library.util.exec.Execution; -import plugins.Library.util.exec.TaskAbortException; import freenet.support.Fields; import freenet.support.Logger; @@ -28,6 +26,8 @@ import freenet.node.RequestStarter; import freenet.node.RequestClient; import freenet.keys.FreenetURI; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.TaskAbortException; import freenet.pluginmanager.PluginRespirator; import freenet.support.Executor; diff --git a/src/plugins/Library/io/serial/Archiver.java b/src/plugins/Library/io/serial/Archiver.java index 62ddd136..7241a771 100644 --- a/src/plugins/Library/io/serial/Archiver.java +++ b/src/plugins/Library/io/serial/Archiver.java @@ -4,11 +4,12 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.exec.TaskAbortException; import java.util.Collection; import java.util.Map; +import freenet.library.util.exec.TaskAbortException; + /** ** An interface that handles a single {@link Serialiser.Task}. ** diff --git a/src/plugins/Library/io/serial/FileArchiver.java b/src/plugins/Library/io/serial/FileArchiver.java index 34290471..8adc1375 100644 --- a/src/plugins/Library/io/serial/FileArchiver.java +++ b/src/plugins/Library/io/serial/FileArchiver.java @@ -9,11 +9,12 @@ import java.io.IOException; import java.nio.channels.FileLock; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + import plugins.Library.io.ObjectStreamReader; import plugins.Library.io.ObjectStreamWriter; import plugins.Library.io.serial.Serialiser.Task; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; /** ** Converts between a map of {@link String} to {@link Object}, and a file on diff --git a/src/plugins/Library/io/serial/IterableSerialiser.java b/src/plugins/Library/io/serial/IterableSerialiser.java index ee457833..c0a3c2f9 100644 --- a/src/plugins/Library/io/serial/IterableSerialiser.java +++ b/src/plugins/Library/io/serial/IterableSerialiser.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; +import freenet.library.util.exec.TaskAbortException; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.exec.TaskAbortException; /** ** An interface that handles an iterable group of {@link Serialiser.Task}s. diff --git a/src/plugins/Library/io/serial/LiveArchiver.java b/src/plugins/Library/io/serial/LiveArchiver.java index 85d03422..36c88d7a 100644 --- a/src/plugins/Library/io/serial/LiveArchiver.java +++ b/src/plugins/Library/io/serial/LiveArchiver.java @@ -3,9 +3,9 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.TaskAbortException; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.TaskAbortException; /** ** An interface that handles a single {@link Serialiser.Task} and sends live diff --git a/src/plugins/Library/io/serial/MapSerialiser.java b/src/plugins/Library/io/serial/MapSerialiser.java index 89a65888..9a39208e 100644 --- a/src/plugins/Library/io/serial/MapSerialiser.java +++ b/src/plugins/Library/io/serial/MapSerialiser.java @@ -4,10 +4,11 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.exec.TaskAbortException; import java.util.Map; +import freenet.library.util.exec.TaskAbortException; + /** ** An interface that handles a map of {@link Serialiser.Task}s. As well as the ** metadata associated with each individual task, each map also has an diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index 363155ec..520fdfcb 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -6,8 +6,6 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.IdentityComparator; import plugins.Library.util.concurrent.ObjectProcessor; -import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.util.exec.TaskCompleteException; import java.util.Collections; import java.util.Collection; @@ -22,6 +20,8 @@ import java.util.HashSet; import java.util.TreeSet; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.exec.TaskCompleteException; import freenet.support.Logger; /** diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index f59cd10a..b5160923 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -8,10 +8,6 @@ import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; import plugins.Library.util.concurrent.Executors; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.util.exec.TaskInProgressException; -import plugins.Library.util.exec.TaskCompleteException; import plugins.Library.util.func.SafeClosure; import static plugins.Library.util.func.Tuples.X2; // also imports the class @@ -27,6 +23,11 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.ConcurrentMap; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.exec.TaskCompleteException; +import freenet.library.util.exec.TaskInProgressException; + /** ** An {@link IterableSerialiser} that uses threads to handle tasks given to it ** in parallel, and keeps track of task progress. diff --git a/src/plugins/Library/io/serial/ProgressTracker.java b/src/plugins/Library/io/serial/ProgressTracker.java index 8769193c..340ef9df 100644 --- a/src/plugins/Library/io/serial/ProgressTracker.java +++ b/src/plugins/Library/io/serial/ProgressTracker.java @@ -4,14 +4,15 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.TaskInProgressException; import plugins.Library.util.CompositeIterable; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.TaskInProgressException; + /** ** Keeps track of a task's progress and provides methods to retrieve this data. ** For this to function properly, the data/meta for push/pull tasks MUST NOT diff --git a/src/plugins/Library/io/serial/ScheduledSerialiser.java b/src/plugins/Library/io/serial/ScheduledSerialiser.java index 09020c8d..3c413a12 100644 --- a/src/plugins/Library/io/serial/ScheduledSerialiser.java +++ b/src/plugins/Library/io/serial/ScheduledSerialiser.java @@ -6,12 +6,13 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.util.func.Tuples.X2; import java.util.Map; import java.util.concurrent.BlockingQueue; +import freenet.library.util.exec.TaskAbortException; + /** ** An interface for asynchronous task execution. The methods return objects ** for managing and scheduling tasks. diff --git a/src/plugins/Library/io/serial/Serialiser.java b/src/plugins/Library/io/serial/Serialiser.java index 736d41cf..bc8042d2 100644 --- a/src/plugins/Library/io/serial/Serialiser.java +++ b/src/plugins/Library/io/serial/Serialiser.java @@ -3,11 +3,12 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; -import plugins.Library.util.exec.Progress; import java.util.Collection; import java.util.Map; +import freenet.library.util.exec.Progress; + /** ** An empty marker interface for serialisation classes. It defines some nested ** subclasses that acts as a unified interface between these classes. diff --git a/src/plugins/Library/search/ResultSet.java b/src/plugins/Library/search/ResultSet.java index b9df2990..2a2916f0 100644 --- a/src/plugins/Library/search/ResultSet.java +++ b/src/plugins/Library/search/ResultSet.java @@ -7,9 +7,9 @@ import plugins.Library.index.TermIndexEntry; import plugins.Library.index.TermTermEntry; import plugins.Library.index.TermPageEntry; -import plugins.Library.util.exec.Execution; -import plugins.Library.util.exec.TaskAbortException; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.TaskAbortException; import freenet.support.Logger; import java.util.Iterator; diff --git a/src/plugins/Library/search/Search.java b/src/plugins/Library/search/Search.java index f01a79a9..9edd9c51 100644 --- a/src/plugins/Library/search/Search.java +++ b/src/plugins/Library/search/Search.java @@ -18,12 +18,12 @@ import plugins.Library.index.TermEntry; import plugins.Library.search.ResultSet.ResultOperation; import plugins.Library.ui.ResultNodeGenerator; -import plugins.Library.util.exec.AbstractExecution; -import plugins.Library.util.exec.CompositeProgress; -import plugins.Library.util.exec.Execution; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.TaskAbortException; +import freenet.library.util.exec.AbstractExecution; +import freenet.library.util.exec.CompositeProgress; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.TaskAbortException; import freenet.support.Executor; import freenet.support.HTMLNode; import freenet.support.Logger; @@ -497,7 +497,7 @@ public boolean hasGeneratedResultNode(){ /** * After this finishes running, the status of this Search object will be correct, stimulates the creation of the result if all subreqquests are complete and the result isn't made - * @throws plugins.Library.util.exec.TaskAbortException + * @throws freenet.library.util.exec.TaskAbortException */ private synchronized void setStatus() throws TaskAbortException{ switch (status){ diff --git a/src/plugins/Library/ui/MainPage.java b/src/plugins/Library/ui/MainPage.java index fb683fe6..4995b092 100644 --- a/src/plugins/Library/ui/MainPage.java +++ b/src/plugins/Library/ui/MainPage.java @@ -6,13 +6,13 @@ import plugins.Library.Library; import plugins.Library.search.InvalidSearchException; import plugins.Library.search.Search; -import plugins.Library.util.exec.ChainedProgress; -import plugins.Library.util.exec.CompositeProgress; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.TaskAbortException; import freenet.keys.FreenetURI; +import freenet.library.util.exec.ChainedProgress; +import freenet.library.util.exec.CompositeProgress; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.TaskAbortException; import freenet.pluginmanager.PluginRespirator; import freenet.support.HTMLNode; import freenet.support.Logger; diff --git a/src/plugins/Library/ui/TestInterface.java b/src/plugins/Library/ui/TestInterface.java index 86e19e79..92e306c5 100644 --- a/src/plugins/Library/ui/TestInterface.java +++ b/src/plugins/Library/ui/TestInterface.java @@ -3,10 +3,10 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.ui; -import plugins.Library.util.exec.Execution; import plugins.Library.search.Search; import plugins.Library.*; +import freenet.library.util.exec.Execution; import freenet.support.Logger; import java.util.HashMap; diff --git a/src/plugins/Library/util/Skeleton.java b/src/plugins/Library/util/Skeleton.java index 652e808a..54ede848 100644 --- a/src/plugins/Library/util/Skeleton.java +++ b/src/plugins/Library/util/Skeleton.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; +import freenet.library.util.exec.TaskAbortException; import plugins.Library.io.serial.Serialiser; -import plugins.Library.util.exec.TaskAbortException; /** ** Defines an interface for an extension of a data structure which is only diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 5c43966d..c67bd7a5 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -9,8 +9,6 @@ import plugins.Library.io.serial.MapSerialiser; import plugins.Library.io.serial.Translator; import plugins.Library.io.DataFormatException; -import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.util.exec.TaskCompleteException; import plugins.Library.util.func.Tuples.X2; import plugins.Library.util.func.Tuples.X3; @@ -37,12 +35,8 @@ import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import plugins.Library.util.exec.Progress; -import plugins.Library.util.exec.ProgressParts; -import plugins.Library.util.exec.BaseCompositeProgress; import plugins.Library.io.serial.Serialiser; import plugins.Library.io.serial.ProgressTracker; -import plugins.Library.util.exec.TaskCompleteException; import plugins.Library.util.BTreeMap.Node; import plugins.Library.util.concurrent.Scheduler; @@ -53,6 +47,11 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.util.exec.BaseCompositeProgress; +import freenet.library.util.exec.Progress; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.exec.TaskCompleteException; import freenet.support.Logger; import plugins.Library.util.Sorted; import plugins.Library.util.concurrent.BoundedPriorityBlockingQueue; @@ -596,7 +595,7 @@ public interface SkeletonMap SkeletonNode parent = res._1; TaskAbortException ex = res._2; if (ex != null) { - assert(!(ex instanceof plugins.Library.util.exec.TaskInProgressException)); // by contract of ScheduledSerialiser + assert(!(ex instanceof freenet.library.util.exec.TaskInProgressException)); // by contract of ScheduledSerialiser if (!(ex instanceof TaskCompleteException)) { // TODO LOW maybe dump it somewhere else and throw it at the end... throw ex; diff --git a/src/plugins/Library/util/SkeletonBTreeSet.java b/src/plugins/Library/util/SkeletonBTreeSet.java index 33647eec..2f9230ec 100644 --- a/src/plugins/Library/util/SkeletonBTreeSet.java +++ b/src/plugins/Library/util/SkeletonBTreeSet.java @@ -8,7 +8,6 @@ import plugins.Library.io.serial.IterableSerialiser; import plugins.Library.io.serial.MapSerialiser; import plugins.Library.io.serial.Translator; -import plugins.Library.util.exec.TaskAbortException; import java.util.Comparator; import java.util.Collection; @@ -17,6 +16,8 @@ import java.util.SortedSet; import java.util.ArrayList; +import freenet.library.util.exec.TaskAbortException; + /** ** {@link Skeleton} of a {@link BTreeSet}. DOCUMENT ** @@ -67,7 +68,7 @@ public void inflate() throws TaskAbortException { } // TODO NORM tidy this - see SkeletonBTreeMap.inflate() for details - public plugins.Library.util.exec.BaseCompositeProgress getProgressInflate() { + public freenet.library.util.exec.BaseCompositeProgress getProgressInflate() { return ((SkeletonBTreeMap)bkmap).pr_inf; } diff --git a/src/plugins/Library/util/SkeletonMap.java b/src/plugins/Library/util/SkeletonMap.java index 0e4bc7a1..10285884 100644 --- a/src/plugins/Library/util/SkeletonMap.java +++ b/src/plugins/Library/util/SkeletonMap.java @@ -4,10 +4,11 @@ package plugins.Library.util; import plugins.Library.io.serial.MapSerialiser; -import plugins.Library.util.exec.TaskAbortException; import java.util.Map; +import freenet.library.util.exec.TaskAbortException; + /** ** A {@link Skeleton} of a {@link Map}. ** diff --git a/src/plugins/Library/util/SkeletonTreeMap.java b/src/plugins/Library/util/SkeletonTreeMap.java index 866fb2bc..9167656b 100644 --- a/src/plugins/Library/util/SkeletonTreeMap.java +++ b/src/plugins/Library/util/SkeletonTreeMap.java @@ -7,8 +7,6 @@ import plugins.Library.io.serial.Translator; import plugins.Library.io.serial.MapSerialiser; import plugins.Library.io.DataFormatException; -import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.util.exec.TaskCompleteException; import java.util.Iterator; import java.util.Comparator; @@ -22,6 +20,8 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.exec.TaskCompleteException; import freenet.support.Logger; /** diff --git a/src/plugins/Library/util/TaskAbortExceptionConvertor.java b/src/plugins/Library/util/TaskAbortExceptionConvertor.java index 5cefdd02..cb1d4dd7 100644 --- a/src/plugins/Library/util/TaskAbortExceptionConvertor.java +++ b/src/plugins/Library/util/TaskAbortExceptionConvertor.java @@ -1,7 +1,7 @@ package plugins.Library.util; +import freenet.library.util.exec.TaskAbortException; import plugins.Library.util.concurrent.ExceptionConvertor; -import plugins.Library.util.exec.TaskAbortException; public class TaskAbortExceptionConvertor implements ExceptionConvertor { diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index bb16a4a8..711ec1c1 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -14,6 +14,9 @@ import plugins.Library.*; import freenet.keys.FreenetURI; +import freenet.library.util.exec.ProgressParts; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; import freenet.node.RequestStarter; import java.util.*; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index e3fadcf6..24b8ab24 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -14,6 +14,8 @@ import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.util.exec.Execution; +import freenet.library.util.exec.TaskAbortException; import java.util.*; import java.io.*; diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java index fec1256f..ecefd54e 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/test/plugins/Library/index/TermEntryTest.java @@ -7,12 +7,12 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.FileArchiver; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.io.serial.Packer; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.util.exec.TaskAbortException; import java.util.Arrays; import java.util.List; diff --git a/test/plugins/Library/io/serial/PackerTest.java b/test/plugins/Library/io/serial/PackerTest.java index 7687f763..ce23c749 100644 --- a/test/plugins/Library/io/serial/PackerTest.java +++ b/test/plugins/Library/io/serial/PackerTest.java @@ -7,7 +7,6 @@ import plugins.Library.util.Generators; import plugins.Library.util.SkeletonTreeMap; -import plugins.Library.util.exec.TaskAbortException; import plugins.Library.io.serial.Packer.Bin; import plugins.Library.io.serial.Serialiser.*; @@ -17,6 +16,8 @@ import java.util.HashSet; import java.util.HashMap; +import freenet.library.util.exec.TaskAbortException; + /** ** PRIORITY actually write some tests for this... ** diff --git a/uploader/.classpath b/uploader/.classpath new file mode 100644 index 00000000..62f50c7b --- /dev/null +++ b/uploader/.classpath @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/uploader/.project b/uploader/.project new file mode 100644 index 00000000..28ae6247 --- /dev/null +++ b/uploader/.project @@ -0,0 +1,17 @@ + + + library-uploader + + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.core.javanature + + From 16c5d0747dd22b4cec4ea1c625a8d2d888b35529 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:10:02 +0000 Subject: [PATCH 004/180] Ignore built files. --HG-- branch : eclipse-separation --- .hgignore | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .hgignore diff --git a/.hgignore b/.hgignore new file mode 100644 index 00000000..8905237c --- /dev/null +++ b/.hgignore @@ -0,0 +1,3 @@ +shared/bin +build +lib/snakeyaml-1.5.jar From 0953c9f85b42935a93b2a656c8df6eb1e6b7a228 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:14:51 +0000 Subject: [PATCH 005/180] Second part of the moving. --HG-- branch : eclipse-separation --- .../src/freenet/library}/util/func/Closure.java | 2 +- .../src/freenet/library}/util/func/SafeClosure.java | 2 +- .../src/freenet/library}/util/func/Tuples.java | 2 +- .../src/freenet/library}/util/func/package-info.java | 2 +- src/plugins/Library/Main.java | 2 +- src/plugins/Library/SpiderIndexUploader.java | 2 +- src/plugins/Library/io/serial/ParallelSerialiser.java | 4 ++-- src/plugins/Library/io/serial/ScheduledSerialiser.java | 2 +- src/plugins/Library/util/BTreeMap.java | 5 +++-- src/plugins/Library/util/SkeletonBTreeMap.java | 8 ++++---- src/plugins/Library/util/concurrent/Executors.java | 3 ++- src/plugins/Library/util/concurrent/ObjectProcessor.java | 8 ++++---- test/plugins/Library/Tester.java | 3 +-- test/plugins/Library/index/BIndexTest.java | 2 +- 14 files changed, 24 insertions(+), 23 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/func/Closure.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/func/SafeClosure.java (94%) rename {src/plugins/Library => shared/src/freenet/library}/util/func/Tuples.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/func/package-info.java (92%) diff --git a/src/plugins/Library/util/func/Closure.java b/shared/src/freenet/library/util/func/Closure.java similarity index 98% rename from src/plugins/Library/util/func/Closure.java rename to shared/src/freenet/library/util/func/Closure.java index d32ca796..3afbf5d4 100644 --- a/src/plugins/Library/util/func/Closure.java +++ b/shared/src/freenet/library/util/func/Closure.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.func; +package freenet.library.util.func; /** ** An object representing a partially-evaluated subroutine. This is useful in diff --git a/src/plugins/Library/util/func/SafeClosure.java b/shared/src/freenet/library/util/func/SafeClosure.java similarity index 94% rename from src/plugins/Library/util/func/SafeClosure.java rename to shared/src/freenet/library/util/func/SafeClosure.java index 1235781c..e601a80b 100644 --- a/src/plugins/Library/util/func/SafeClosure.java +++ b/shared/src/freenet/library/util/func/SafeClosure.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.func; +package freenet.library.util.func; /** ** A {@link Closure} that cannot throw a checked exception. diff --git a/src/plugins/Library/util/func/Tuples.java b/shared/src/freenet/library/util/func/Tuples.java similarity index 99% rename from src/plugins/Library/util/func/Tuples.java rename to shared/src/freenet/library/util/func/Tuples.java index ec080174..84ee760f 100644 --- a/src/plugins/Library/util/func/Tuples.java +++ b/shared/src/freenet/library/util/func/Tuples.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.func; +package freenet.library.util.func; /** ** Tuple classes that retain type-safety through generics. These classes can diff --git a/src/plugins/Library/util/func/package-info.java b/shared/src/freenet/library/util/func/package-info.java similarity index 92% rename from src/plugins/Library/util/func/package-info.java rename to shared/src/freenet/library/util/func/package-info.java index a801b916..55d76a1b 100644 --- a/src/plugins/Library/util/func/package-info.java +++ b/shared/src/freenet/library/util/func/package-info.java @@ -9,4 +9,4 @@ ** @see Functional ** programming */ -package plugins.Library.util.func; +package freenet.library.util.func; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index ba52dcbf..aba3f74a 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -37,7 +37,6 @@ import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.TaskAbortExceptionConvertor; import plugins.Library.util.concurrent.Executors; -import plugins.Library.util.func.Closure; import freenet.pluginmanager.FredPlugin; import freenet.pluginmanager.FredPluginL10n; @@ -53,6 +52,7 @@ import freenet.l10n.BaseL10n.LANGUAGE; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Closure; import freenet.pluginmanager.FredPluginFCP; import freenet.support.Logger; diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 1e1a8b64..c27e2d5d 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -33,11 +33,11 @@ import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.TaskAbortExceptionConvertor; -import plugins.Library.util.func.Closure; import freenet.client.InsertException; import freenet.keys.FreenetURI; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Closure; import freenet.node.RequestStarter; import freenet.pluginmanager.PluginNotFoundException; import freenet.pluginmanager.PluginReplySender; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index b5160923..cb900089 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -8,8 +8,7 @@ import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; import plugins.Library.util.concurrent.Executors; -import plugins.Library.util.func.SafeClosure; -import static plugins.Library.util.func.Tuples.X2; // also imports the class +import static freenet.library.util.func.Tuples.X2; import java.util.Iterator; import java.util.List; @@ -27,6 +26,7 @@ import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; import freenet.library.util.exec.TaskInProgressException; +import freenet.library.util.func.SafeClosure; /** ** An {@link IterableSerialiser} that uses threads to handle tasks given to it diff --git a/src/plugins/Library/io/serial/ScheduledSerialiser.java b/src/plugins/Library/io/serial/ScheduledSerialiser.java index 3c413a12..1e2e7741 100644 --- a/src/plugins/Library/io/serial/ScheduledSerialiser.java +++ b/src/plugins/Library/io/serial/ScheduledSerialiser.java @@ -6,12 +6,12 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; -import plugins.Library.util.func.Tuples.X2; import java.util.Map; import java.util.concurrent.BlockingQueue; import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Tuples.X2; /** ** An interface for asynchronous task execution. The methods return objects diff --git a/src/plugins/Library/util/BTreeMap.java b/src/plugins/Library/util/BTreeMap.java index 94bb7c43..b95d61c1 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/src/plugins/Library/util/BTreeMap.java @@ -4,8 +4,6 @@ package plugins.Library.util; import plugins.Library.util.CompositeIterable; -import plugins.Library.util.func.Tuples.X2; -import plugins.Library.util.func.Tuples.X3; import java.util.Comparator; import java.util.Iterator; @@ -23,6 +21,9 @@ import java.util.NoSuchElementException; import java.util.ConcurrentModificationException; +import freenet.library.util.func.Tuples.X2; +import freenet.library.util.func.Tuples.X3; + /** ** General purpose B-tree implementation. '''This class is not a general-use ** {@link SortedMap}'''; for that use {@link TreeMap}. diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index c67bd7a5..61f4139b 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -9,8 +9,6 @@ import plugins.Library.io.serial.MapSerialiser; import plugins.Library.io.serial.Translator; import plugins.Library.io.DataFormatException; -import plugins.Library.util.func.Tuples.X2; -import plugins.Library.util.func.Tuples.X3; import java.util.AbstractSet; import java.util.Comparator; @@ -52,6 +50,10 @@ import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; +import freenet.library.util.func.Closure; +import freenet.library.util.func.SafeClosure; +import freenet.library.util.func.Tuples.X2; +import freenet.library.util.func.Tuples.X3; import freenet.support.Logger; import plugins.Library.util.Sorted; import plugins.Library.util.concurrent.BoundedPriorityBlockingQueue; @@ -61,8 +63,6 @@ import plugins.Library.util.concurrent.Executors; import plugins.Library.util.event.TrackingSweeper; import plugins.Library.util.event.CountingSweeper; -import plugins.Library.util.func.Closure; -import plugins.Library.util.func.SafeClosure; import static plugins.Library.util.Maps.$K; /** diff --git a/src/plugins/Library/util/concurrent/Executors.java b/src/plugins/Library/util/concurrent/Executors.java index b8d9513f..03743243 100644 --- a/src/plugins/Library/util/concurrent/Executors.java +++ b/src/plugins/Library/util/concurrent/Executors.java @@ -3,7 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util.concurrent; -import plugins.Library.util.func.SafeClosure; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; @@ -12,6 +11,8 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import freenet.library.util.func.SafeClosure; + /** ** Class providing various {@link Executor}s. ** diff --git a/src/plugins/Library/util/concurrent/ObjectProcessor.java b/src/plugins/Library/util/concurrent/ObjectProcessor.java index 9899da0a..bf61cff4 100644 --- a/src/plugins/Library/util/concurrent/ObjectProcessor.java +++ b/src/plugins/Library/util/concurrent/ObjectProcessor.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util.concurrent; -import static plugins.Library.util.func.Tuples.X2; -import static plugins.Library.util.func.Tuples.X3; +import static freenet.library.util.func.Tuples.X2; +import static freenet.library.util.func.Tuples.X3; import java.lang.ref.WeakReference; import java.util.Iterator; @@ -15,8 +15,8 @@ import java.util.concurrent.Executor; import java.util.concurrent.RejectedExecutionException; -import plugins.Library.util.func.Closure; -import plugins.Library.util.func.SafeClosure; +import freenet.library.util.func.Closure; +import freenet.library.util.func.SafeClosure; import freenet.support.Logger; /** diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 711ec1c1..be1ca37c 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -4,8 +4,6 @@ package plugins.Library; import plugins.Library.client.*; -import plugins.Library.util.exec.*; -import plugins.Library.util.func.Closure; import plugins.Library.index.*; import plugins.Library.io.*; import plugins.Library.io.serial.*; @@ -17,6 +15,7 @@ import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Closure; import freenet.node.RequestStarter; import java.util.*; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index 24b8ab24..cb6512c6 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -8,7 +8,6 @@ import plugins.Library.util.*; import plugins.Library.util.func.*; -import plugins.Library.util.exec.*; import plugins.Library.io.serial.*; import plugins.Library.io.serial.Serialiser.*; import plugins.Library.index.*; @@ -16,6 +15,7 @@ import freenet.keys.FreenetURI; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Closure; import java.util.*; import java.io.*; From e34d7765c3ff3ea34bbddbd34fef191e00b4a175 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:23:12 +0000 Subject: [PATCH 006/180] Moving another directory. --HG-- branch : eclipse-separation --- shared/.classpath | 1 + .../src/freenet/library}/util/event/AbstractSweeper.java | 2 +- .../src/freenet/library}/util/event/CountingSweeper.java | 2 +- .../src/freenet/library}/util/event/Sweeper.java | 2 +- .../src/freenet/library}/util/event/TrackingSweeper.java | 2 +- .../test/freenet/library}/util/event/SweepersTest.java | 6 +++++- src/plugins/Library/util/SkeletonBTreeMap.java | 4 ++-- 7 files changed, 12 insertions(+), 7 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/event/AbstractSweeper.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/event/CountingSweeper.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/event/Sweeper.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/event/TrackingSweeper.java (98%) rename {test/plugins/Library => shared/test/freenet/library}/util/event/SweepersTest.java (97%) diff --git a/shared/.classpath b/shared/.classpath index 1ba3543a..b8e179ae 100644 --- a/shared/.classpath +++ b/shared/.classpath @@ -4,5 +4,6 @@ + diff --git a/src/plugins/Library/util/event/AbstractSweeper.java b/shared/src/freenet/library/util/event/AbstractSweeper.java similarity index 98% rename from src/plugins/Library/util/event/AbstractSweeper.java rename to shared/src/freenet/library/util/event/AbstractSweeper.java index 3eaa8d03..6f78ea4f 100644 --- a/src/plugins/Library/util/event/AbstractSweeper.java +++ b/shared/src/freenet/library/util/event/AbstractSweeper.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.event; +package freenet.library.util.event; /** ** A partial implementation of {@link Sweeper}, defining some high-level diff --git a/src/plugins/Library/util/event/CountingSweeper.java b/shared/src/freenet/library/util/event/CountingSweeper.java similarity index 97% rename from src/plugins/Library/util/event/CountingSweeper.java rename to shared/src/freenet/library/util/event/CountingSweeper.java index f883209d..ddd1602f 100644 --- a/src/plugins/Library/util/event/CountingSweeper.java +++ b/shared/src/freenet/library/util/event/CountingSweeper.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.event; +package freenet.library.util.event; /** ** A {@link Sweeper} which only counts the number of objects added. It does diff --git a/src/plugins/Library/util/event/Sweeper.java b/shared/src/freenet/library/util/event/Sweeper.java similarity index 98% rename from src/plugins/Library/util/event/Sweeper.java rename to shared/src/freenet/library/util/event/Sweeper.java index ea8de10c..cf15e9a7 100644 --- a/src/plugins/Library/util/event/Sweeper.java +++ b/shared/src/freenet/library/util/event/Sweeper.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.event; +package freenet.library.util.event; /** ** A class which holds a group of objects, eg. for the purposes of delaying an diff --git a/src/plugins/Library/util/event/TrackingSweeper.java b/shared/src/freenet/library/util/event/TrackingSweeper.java similarity index 98% rename from src/plugins/Library/util/event/TrackingSweeper.java rename to shared/src/freenet/library/util/event/TrackingSweeper.java index a1e00849..444817f6 100644 --- a/src/plugins/Library/util/event/TrackingSweeper.java +++ b/shared/src/freenet/library/util/event/TrackingSweeper.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.event; +package freenet.library.util.event; import java.util.Collections; import java.util.Iterator; diff --git a/test/plugins/Library/util/event/SweepersTest.java b/shared/test/freenet/library/util/event/SweepersTest.java similarity index 97% rename from test/plugins/Library/util/event/SweepersTest.java rename to shared/test/freenet/library/util/event/SweepersTest.java index 54010e5f..d9936abb 100644 --- a/test/plugins/Library/util/event/SweepersTest.java +++ b/shared/test/freenet/library/util/event/SweepersTest.java @@ -1,13 +1,17 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.event; +package freenet.library.util.event; import junit.framework.TestCase; import java.util.Iterator; import java.util.HashSet; +import freenet.library.util.event.CountingSweeper; +import freenet.library.util.event.Sweeper; +import freenet.library.util.event.TrackingSweeper; + /** ** @author infinity0 */ diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 61f4139b..8d20e43a 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -45,6 +45,8 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.util.event.CountingSweeper; +import freenet.library.util.event.TrackingSweeper; import freenet.library.util.exec.BaseCompositeProgress; import freenet.library.util.exec.Progress; import freenet.library.util.exec.ProgressParts; @@ -61,8 +63,6 @@ import plugins.Library.util.concurrent.Notifier; import plugins.Library.util.concurrent.ObjectProcessor; import plugins.Library.util.concurrent.Executors; -import plugins.Library.util.event.TrackingSweeper; -import plugins.Library.util.event.CountingSweeper; import static plugins.Library.util.Maps.$K; /** From 0bf0a90bcad0098b5ea62c96f92eb7898af4ef5f Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:27:26 +0000 Subject: [PATCH 007/180] Moved most of plugins.Library.util.concurrent. --HG-- branch : eclipse-separation --- .../util/concurrent/BoundedPriorityBlockingQueue.java | 2 +- .../library}/util/concurrent/ExceptionConvertor.java | 2 +- .../freenet/library}/util/concurrent/Executors.java | 2 +- .../src/freenet/library}/util/concurrent/Notifier.java | 2 +- .../freenet/library}/util/concurrent/Scheduler.java | 2 +- src/plugins/Library/Main.java | 2 +- src/plugins/Library/index/ProtoIndex.java | 2 +- src/plugins/Library/io/serial/ParallelSerialiser.java | 4 ++-- src/plugins/Library/io/serial/ScheduledSerialiser.java | 2 +- src/plugins/Library/util/SkeletonBTreeMap.java | 10 +++++----- .../Library/util/TaskAbortExceptionConvertor.java | 2 +- .../Library/util/concurrent/ObjectProcessor.java | 3 +++ test/plugins/Library/index/BIndexTest.java | 1 - 13 files changed, 19 insertions(+), 17 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/BoundedPriorityBlockingQueue.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/ExceptionConvertor.java (71%) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/Executors.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/Notifier.java (92%) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/Scheduler.java (93%) diff --git a/src/plugins/Library/util/concurrent/BoundedPriorityBlockingQueue.java b/shared/src/freenet/library/util/concurrent/BoundedPriorityBlockingQueue.java similarity index 99% rename from src/plugins/Library/util/concurrent/BoundedPriorityBlockingQueue.java rename to shared/src/freenet/library/util/concurrent/BoundedPriorityBlockingQueue.java index d9e4c802..82ff974a 100644 --- a/src/plugins/Library/util/concurrent/BoundedPriorityBlockingQueue.java +++ b/shared/src/freenet/library/util/concurrent/BoundedPriorityBlockingQueue.java @@ -1,4 +1,4 @@ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; import java.util.Collection; import java.util.Comparator; diff --git a/src/plugins/Library/util/concurrent/ExceptionConvertor.java b/shared/src/freenet/library/util/concurrent/ExceptionConvertor.java similarity index 71% rename from src/plugins/Library/util/concurrent/ExceptionConvertor.java rename to shared/src/freenet/library/util/concurrent/ExceptionConvertor.java index 62454895..2d0ce93f 100644 --- a/src/plugins/Library/util/concurrent/ExceptionConvertor.java +++ b/shared/src/freenet/library/util/concurrent/ExceptionConvertor.java @@ -1,4 +1,4 @@ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; public interface ExceptionConvertor { diff --git a/src/plugins/Library/util/concurrent/Executors.java b/shared/src/freenet/library/util/concurrent/Executors.java similarity index 98% rename from src/plugins/Library/util/concurrent/Executors.java rename to shared/src/freenet/library/util/concurrent/Executors.java index 03743243..254dd26c 100644 --- a/src/plugins/Library/util/concurrent/Executors.java +++ b/shared/src/freenet/library/util/concurrent/Executors.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; import java.util.concurrent.Executor; diff --git a/src/plugins/Library/util/concurrent/Notifier.java b/shared/src/freenet/library/util/concurrent/Notifier.java similarity index 92% rename from src/plugins/Library/util/concurrent/Notifier.java rename to shared/src/freenet/library/util/concurrent/Notifier.java index c1a6fd7f..5e102f8c 100644 --- a/src/plugins/Library/util/concurrent/Notifier.java +++ b/shared/src/freenet/library/util/concurrent/Notifier.java @@ -1,4 +1,4 @@ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; public class Notifier { diff --git a/src/plugins/Library/util/concurrent/Scheduler.java b/shared/src/freenet/library/util/concurrent/Scheduler.java similarity index 93% rename from src/plugins/Library/util/concurrent/Scheduler.java rename to shared/src/freenet/library/util/concurrent/Scheduler.java index e1759445..00cbe530 100644 --- a/src/plugins/Library/util/concurrent/Scheduler.java +++ b/shared/src/freenet/library/util/concurrent/Scheduler.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; /** ** An interface for a general class that accepts objects to be acted on. diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index aba3f74a..9bb28693 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -36,7 +36,6 @@ import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.TaskAbortExceptionConvertor; -import plugins.Library.util.concurrent.Executors; import freenet.pluginmanager.FredPlugin; import freenet.pluginmanager.FredPluginL10n; @@ -50,6 +49,7 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Closure; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index c9f9e3a6..0e2230da 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -11,9 +11,9 @@ import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.DataNotLoadedException; -import plugins.Library.util.concurrent.Executors; import freenet.keys.FreenetURI; +import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.ChainedProgress; import freenet.library.util.exec.Execution; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index cb900089..218c4142 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -5,9 +5,7 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.TaskAbortExceptionConvertor; -import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; -import plugins.Library.util.concurrent.Executors; import static freenet.library.util.func.Tuples.X2; import java.util.Iterator; @@ -22,6 +20,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.ConcurrentMap; +import freenet.library.util.concurrent.Executors; +import freenet.library.util.concurrent.Scheduler; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; diff --git a/src/plugins/Library/io/serial/ScheduledSerialiser.java b/src/plugins/Library/io/serial/ScheduledSerialiser.java index 1e2e7741..d007daaa 100644 --- a/src/plugins/Library/io/serial/ScheduledSerialiser.java +++ b/src/plugins/Library/io/serial/ScheduledSerialiser.java @@ -4,12 +4,12 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.concurrent.Scheduler; import plugins.Library.util.concurrent.ObjectProcessor; import java.util.Map; import java.util.concurrent.BlockingQueue; +import freenet.library.util.concurrent.Scheduler; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Tuples.X2; diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 8d20e43a..ccef3b1f 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -36,7 +36,6 @@ import plugins.Library.io.serial.Serialiser; import plugins.Library.io.serial.ProgressTracker; import plugins.Library.util.BTreeMap.Node; -import plugins.Library.util.concurrent.Scheduler; import java.util.Collections; import java.util.SortedSet; @@ -45,6 +44,11 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.util.concurrent.BoundedPriorityBlockingQueue; +import freenet.library.util.concurrent.ExceptionConvertor; +import freenet.library.util.concurrent.Executors; +import freenet.library.util.concurrent.Notifier; +import freenet.library.util.concurrent.Scheduler; import freenet.library.util.event.CountingSweeper; import freenet.library.util.event.TrackingSweeper; import freenet.library.util.exec.BaseCompositeProgress; @@ -58,11 +62,7 @@ import freenet.library.util.func.Tuples.X3; import freenet.support.Logger; import plugins.Library.util.Sorted; -import plugins.Library.util.concurrent.BoundedPriorityBlockingQueue; -import plugins.Library.util.concurrent.ExceptionConvertor; -import plugins.Library.util.concurrent.Notifier; import plugins.Library.util.concurrent.ObjectProcessor; -import plugins.Library.util.concurrent.Executors; import static plugins.Library.util.Maps.$K; /** diff --git a/src/plugins/Library/util/TaskAbortExceptionConvertor.java b/src/plugins/Library/util/TaskAbortExceptionConvertor.java index cb1d4dd7..35fdea62 100644 --- a/src/plugins/Library/util/TaskAbortExceptionConvertor.java +++ b/src/plugins/Library/util/TaskAbortExceptionConvertor.java @@ -1,7 +1,7 @@ package plugins.Library.util; +import freenet.library.util.concurrent.ExceptionConvertor; import freenet.library.util.exec.TaskAbortException; -import plugins.Library.util.concurrent.ExceptionConvertor; public class TaskAbortExceptionConvertor implements ExceptionConvertor { diff --git a/src/plugins/Library/util/concurrent/ObjectProcessor.java b/src/plugins/Library/util/concurrent/ObjectProcessor.java index bf61cff4..14897347 100644 --- a/src/plugins/Library/util/concurrent/ObjectProcessor.java +++ b/src/plugins/Library/util/concurrent/ObjectProcessor.java @@ -15,6 +15,9 @@ import java.util.concurrent.Executor; import java.util.concurrent.RejectedExecutionException; +import freenet.library.util.concurrent.ExceptionConvertor; +import freenet.library.util.concurrent.Notifier; +import freenet.library.util.concurrent.Scheduler; import freenet.library.util.func.Closure; import freenet.library.util.func.SafeClosure; import freenet.support.Logger; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index cb6512c6..0c35a2ef 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -7,7 +7,6 @@ import static plugins.Library.util.Generators.rand; import plugins.Library.util.*; -import plugins.Library.util.func.*; import plugins.Library.io.serial.*; import plugins.Library.io.serial.Serialiser.*; import plugins.Library.index.*; From 561424f67edc7a98fc48304e4af71c1e5ea0c7e9 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 10:32:08 +0000 Subject: [PATCH 008/180] Moved the rest of plugin.Library.util.concurrent. --HG-- branch : eclipse-separation --- .../library}/util/concurrent/ObjectProcessor.java | 15 +-------------- src/plugins/Library/io/serial/Packer.java | 2 +- .../Library/io/serial/ParallelSerialiser.java | 2 +- .../Library/io/serial/ScheduledSerialiser.java | 2 +- src/plugins/Library/util/SkeletonBTreeMap.java | 2 +- 5 files changed, 5 insertions(+), 18 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/concurrent/ObjectProcessor.java (96%) diff --git a/src/plugins/Library/util/concurrent/ObjectProcessor.java b/shared/src/freenet/library/util/concurrent/ObjectProcessor.java similarity index 96% rename from src/plugins/Library/util/concurrent/ObjectProcessor.java rename to shared/src/freenet/library/util/concurrent/ObjectProcessor.java index 14897347..21d7261a 100644 --- a/src/plugins/Library/util/concurrent/ObjectProcessor.java +++ b/shared/src/freenet/library/util/concurrent/ObjectProcessor.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util.concurrent; +package freenet.library.util.concurrent; import static freenet.library.util.func.Tuples.X2; import static freenet.library.util.func.Tuples.X3; @@ -15,12 +15,8 @@ import java.util.concurrent.Executor; import java.util.concurrent.RejectedExecutionException; -import freenet.library.util.concurrent.ExceptionConvertor; -import freenet.library.util.concurrent.Notifier; -import freenet.library.util.concurrent.Scheduler; import freenet.library.util.func.Closure; import freenet.library.util.func.SafeClosure; -import freenet.support.Logger; /** ** A class that wraps around an {@link Executor}, for processing any given type @@ -48,13 +44,6 @@ public class ObjectProcessor implements Scheduler { protected int dispatched = 0; protected int completed = 0; protected int started = 0; - - private static volatile boolean logMINOR; - private static volatile boolean logDEBUG; - - static { - Logger.registerClass(ObjectProcessor.class); - } // Most ObjectProcessor's are likely to be autostart()'ed, and this way we can // still use ConcurrentMap for pending while having garbage collection. @@ -278,11 +267,9 @@ protected Runnable createJobFor(final T item) { /*@Override**/ public void run() { X ex = null; synchronized(ObjectProcessor.this) { ++started; } - RuntimeException ee = null; try { clo.invoke(item); } // FIXME NORM this could throw RuntimeException catch (RuntimeException e) { - Logger.error(this, "Caught "+e, e); System.err.println("In ObjProc-"+name+" : "+e); e.printStackTrace(); ex = convertor.convert(e); diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index 520fdfcb..d2ed31e6 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -5,7 +5,6 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.IdentityComparator; -import plugins.Library.util.concurrent.ObjectProcessor; import java.util.Collections; import java.util.Collection; @@ -20,6 +19,7 @@ import java.util.HashSet; import java.util.TreeSet; +import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; import freenet.support.Logger; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index 218c4142..400585c9 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -5,7 +5,6 @@ import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.TaskAbortExceptionConvertor; -import plugins.Library.util.concurrent.ObjectProcessor; import static freenet.library.util.func.Tuples.X2; import java.util.Iterator; @@ -21,6 +20,7 @@ import java.util.concurrent.ConcurrentMap; import freenet.library.util.concurrent.Executors; +import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.concurrent.Scheduler; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/io/serial/ScheduledSerialiser.java b/src/plugins/Library/io/serial/ScheduledSerialiser.java index d007daaa..231d6083 100644 --- a/src/plugins/Library/io/serial/ScheduledSerialiser.java +++ b/src/plugins/Library/io/serial/ScheduledSerialiser.java @@ -4,11 +4,11 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.concurrent.ObjectProcessor; import java.util.Map; import java.util.concurrent.BlockingQueue; +import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.concurrent.Scheduler; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Tuples.X2; diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index ccef3b1f..217f8019 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -48,6 +48,7 @@ import freenet.library.util.concurrent.ExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.concurrent.Notifier; +import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.concurrent.Scheduler; import freenet.library.util.event.CountingSweeper; import freenet.library.util.event.TrackingSweeper; @@ -62,7 +63,6 @@ import freenet.library.util.func.Tuples.X3; import freenet.support.Logger; import plugins.Library.util.Sorted; -import plugins.Library.util.concurrent.ObjectProcessor; import static plugins.Library.util.Maps.$K; /** From 32daec1bd9a478b1a430c4a84a1d217be52be68c Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 11:45:27 +0000 Subject: [PATCH 009/180] Moved first (and simple) part of the plugins.Library.util. --HG-- branch : eclipse-separation --- .../library}/util/CompositeIterable.java | 2 +- .../src/freenet/library}/util/Integers.java | 2 +- .../src/freenet/library}/util/Maps.java | 4 ++-- .../src/freenet/library}/util/PrefixTree.java | 2 +- .../src/freenet/library}/util/Sorted.java | 2 +- .../freenet/library}/util/SortedArraySet.java | 2 +- .../freenet/library}/util/SortedMapSet.java | 2 +- .../freenet/library}/util/SortedSetMap.java | 2 +- .../util/TaskAbortExceptionConvertor.java | 2 +- .../freenet/library}/util/package-info.java | 2 +- .../freenet/library}/util/IntegersTest.java | 7 ++++-- .../library}/util/SortedArraySetTest.java | 4 +++- .../library}/util/SortedMapTestSkeleton.java | 12 ++++++++-- .../freenet/library}/util/SortedTest.java | 23 ++++++++++++++----- src/plugins/Library/Main.java | 2 +- src/plugins/Library/SpiderIndexUploader.java | 2 +- .../Library/io/serial/ParallelSerialiser.java | 2 +- .../Library/io/serial/ProgressTracker.java | 2 +- src/plugins/Library/util/BTreeMap.java | 4 +++- src/plugins/Library/util/BTreeSet.java | 2 ++ src/plugins/Library/util/BytePrefixKey.java | 5 ++-- .../Library/util/SkeletonBTreeMap.java | 5 ++-- .../Library/util/SkeletonBTreeSet.java | 2 ++ test/plugins/Library/Tester.java | 1 + test/plugins/Library/index/BIndexTest.java | 1 + test/plugins/Library/util/BTreeMapTest.java | 3 +++ .../Library/util/SkeletonTreeMapTest.java | 3 +++ 27 files changed, 71 insertions(+), 31 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/CompositeIterable.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/Integers.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/Maps.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/PrefixTree.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/Sorted.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/SortedArraySet.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/SortedMapSet.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/SortedSetMap.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/TaskAbortExceptionConvertor.java (91%) rename {src/plugins/Library => shared/src/freenet/library}/util/package-info.java (92%) rename {test/plugins/Library => shared/test/freenet/library}/util/IntegersTest.java (92%) rename {test/plugins/Library => shared/test/freenet/library}/util/SortedArraySetTest.java (95%) rename {test/plugins/Library => shared/test/freenet/library}/util/SortedMapTestSkeleton.java (94%) rename {test/plugins/Library => shared/test/freenet/library}/util/SortedTest.java (91%) diff --git a/src/plugins/Library/util/CompositeIterable.java b/shared/src/freenet/library/util/CompositeIterable.java similarity index 98% rename from src/plugins/Library/util/CompositeIterable.java rename to shared/src/freenet/library/util/CompositeIterable.java index 1fa8cd20..f99ca7b6 100644 --- a/src/plugins/Library/util/CompositeIterable.java +++ b/shared/src/freenet/library/util/CompositeIterable.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Iterator; diff --git a/src/plugins/Library/util/Integers.java b/shared/src/freenet/library/util/Integers.java similarity index 98% rename from src/plugins/Library/util/Integers.java rename to shared/src/freenet/library/util/Integers.java index c81d7d5b..44a0e821 100644 --- a/src/plugins/Library/util/Integers.java +++ b/shared/src/freenet/library/util/Integers.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Iterator; import java.util.NoSuchElementException; diff --git a/src/plugins/Library/util/Maps.java b/shared/src/freenet/library/util/Maps.java similarity index 97% rename from src/plugins/Library/util/Maps.java rename to shared/src/freenet/library/util/Maps.java index 34678982..9f264779 100644 --- a/src/plugins/Library/util/Maps.java +++ b/shared/src/freenet/library/util/Maps.java @@ -1,9 +1,9 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; -import static plugins.Library.util.Maps.$; +import static freenet.library.util.Maps.$; import java.util.Map.Entry; // WORKAROUND javadoc bug #4464323 import java.util.Map; diff --git a/src/plugins/Library/util/PrefixTree.java b/shared/src/freenet/library/util/PrefixTree.java similarity index 99% rename from src/plugins/Library/util/PrefixTree.java rename to shared/src/freenet/library/util/PrefixTree.java index 252023c9..0ddf50bb 100644 --- a/src/plugins/Library/util/PrefixTree.java +++ b/shared/src/freenet/library/util/PrefixTree.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Set; import java.util.Iterator; diff --git a/src/plugins/Library/util/Sorted.java b/shared/src/freenet/library/util/Sorted.java similarity index 99% rename from src/plugins/Library/util/Sorted.java rename to shared/src/freenet/library/util/Sorted.java index 0b390d51..62d608ff 100644 --- a/src/plugins/Library/util/Sorted.java +++ b/shared/src/freenet/library/util/Sorted.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Collections; import java.util.Comparator; diff --git a/src/plugins/Library/util/SortedArraySet.java b/shared/src/freenet/library/util/SortedArraySet.java similarity index 99% rename from src/plugins/Library/util/SortedArraySet.java rename to shared/src/freenet/library/util/SortedArraySet.java index bf9564e6..853c33ea 100644 --- a/src/plugins/Library/util/SortedArraySet.java +++ b/shared/src/freenet/library/util/SortedArraySet.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version) {. See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import freenet.support.Fields; // JDK6: use this instead of Arrays.binarySearch diff --git a/src/plugins/Library/util/SortedMapSet.java b/shared/src/freenet/library/util/SortedMapSet.java similarity index 99% rename from src/plugins/Library/util/SortedMapSet.java rename to shared/src/freenet/library/util/SortedMapSet.java index 82fff6fb..0ade6419 100644 --- a/src/plugins/Library/util/SortedMapSet.java +++ b/shared/src/freenet/library/util/SortedMapSet.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version) {. See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; import java.util.Iterator; diff --git a/src/plugins/Library/util/SortedSetMap.java b/shared/src/freenet/library/util/SortedSetMap.java similarity index 99% rename from src/plugins/Library/util/SortedSetMap.java rename to shared/src/freenet/library/util/SortedSetMap.java index ff14de5e..30ae565b 100644 --- a/src/plugins/Library/util/SortedSetMap.java +++ b/shared/src/freenet/library/util/SortedSetMap.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version) {. See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; import java.util.Iterator; diff --git a/src/plugins/Library/util/TaskAbortExceptionConvertor.java b/shared/src/freenet/library/util/TaskAbortExceptionConvertor.java similarity index 91% rename from src/plugins/Library/util/TaskAbortExceptionConvertor.java rename to shared/src/freenet/library/util/TaskAbortExceptionConvertor.java index 35fdea62..81c3ede3 100644 --- a/src/plugins/Library/util/TaskAbortExceptionConvertor.java +++ b/shared/src/freenet/library/util/TaskAbortExceptionConvertor.java @@ -1,4 +1,4 @@ -package plugins.Library.util; +package freenet.library.util; import freenet.library.util.concurrent.ExceptionConvertor; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/util/package-info.java b/shared/src/freenet/library/util/package-info.java similarity index 92% rename from src/plugins/Library/util/package-info.java rename to shared/src/freenet/library/util/package-info.java index fcc63aa7..6fe9200d 100644 --- a/src/plugins/Library/util/package-info.java +++ b/shared/src/freenet/library/util/package-info.java @@ -7,4 +7,4 @@ ** ** @author infinity0 */ -package plugins.Library.util; +package freenet.library.util; diff --git a/test/plugins/Library/util/IntegersTest.java b/shared/test/freenet/library/util/IntegersTest.java similarity index 92% rename from test/plugins/Library/util/IntegersTest.java rename to shared/test/freenet/library/util/IntegersTest.java index 5b035517..6a29edfa 100644 --- a/test/plugins/Library/util/IntegersTest.java +++ b/shared/test/freenet/library/util/IntegersTest.java @@ -1,14 +1,15 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import junit.framework.TestCase; -import static plugins.Library.util.Generators.rand; import java.util.Random; import java.util.Arrays; +import freenet.library.util.Integers; + /** ** @author infinity0 */ @@ -30,6 +31,8 @@ public class IntegersTest extends TestCase { {2,1,2,2,1,2} }; + final private static Random rand = new Random(); + public void testAllocateEvenlyPredefined() { for (int i=0; i makeTestMap(); + final private static Random rand = new Random(); + + private static String rndStr() { + return UUID.randomUUID().toString(); + } + public void fillTestMap() { testmap = makeTestMap(); for (int i=0; i<0x1000; ++i) { - testmap.put(Generators.rndStr(), Generators.rand.nextInt()); + testmap.put(rndStr(), rand.nextInt()); } } diff --git a/test/plugins/Library/util/SortedTest.java b/shared/test/freenet/library/util/SortedTest.java similarity index 91% rename from test/plugins/Library/util/SortedTest.java rename to shared/test/freenet/library/util/SortedTest.java index 2dffc627..098d25df 100644 --- a/test/plugins/Library/util/SortedTest.java +++ b/shared/test/freenet/library/util/SortedTest.java @@ -1,12 +1,9 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import junit.framework.TestCase; -import static plugins.Library.util.Generators.rand; - -import plugins.Library.util.Sorted.Inclusivity; import java.util.Random; import java.util.Iterator; @@ -14,6 +11,10 @@ import java.util.List; import java.util.SortedSet; import java.util.TreeSet; +import java.util.UUID; + +import freenet.library.util.Sorted; +import freenet.library.util.Sorted.Inclusivity; /** ** @author infinity0 @@ -55,6 +56,16 @@ public class SortedTest extends TestCase { {8,16,24,32} }; + final private static Random rand = new Random(); + + private static String rndStr() { + return UUID.randomUUID().toString(); + } + + private static String rndKey() { + return rndStr().substring(0,8); + } + public void testSplitPredefined() { SortedSet sep = new TreeSet(Arrays.asList(split_sep)); @@ -152,13 +163,13 @@ public void verifySplit(SortedSet subj, SortedSet sep) { protected void randomSelectSplit(int n, int k) { SortedSet subj = new TreeSet(); - for (int i=0; i sep = new TreeSet(); List candsep = Sorted.select(subj, k); assertTrue(candsep.size() == k); for (String key: candsep) { - sep.add((rand.nextInt(2) == 0)? key: Generators.rndKey()); + sep.add((rand.nextInt(2) == 0)? key: rndKey()); } assertTrue(sep.size() == k); diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 9bb28693..210de5de 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -35,7 +35,6 @@ import plugins.Library.ui.WebInterface; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.TaskAbortExceptionConvertor; import freenet.pluginmanager.FredPlugin; import freenet.pluginmanager.FredPluginL10n; @@ -49,6 +48,7 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index c27e2d5d..d3a8f519 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -32,9 +32,9 @@ import plugins.Library.io.serial.Serialiser.PushTask; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.TaskAbortExceptionConvertor; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Closure; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index 400585c9..3f2deeb3 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -4,7 +4,6 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.TaskAbortExceptionConvertor; import static freenet.library.util.func.Tuples.X2; import java.util.Iterator; @@ -19,6 +18,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.ConcurrentMap; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.concurrent.Scheduler; diff --git a/src/plugins/Library/io/serial/ProgressTracker.java b/src/plugins/Library/io/serial/ProgressTracker.java index 340ef9df..6785fcd1 100644 --- a/src/plugins/Library/io/serial/ProgressTracker.java +++ b/src/plugins/Library/io/serial/ProgressTracker.java @@ -4,12 +4,12 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.CompositeIterable; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; +import freenet.library.util.CompositeIterable; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskInProgressException; diff --git a/src/plugins/Library/util/BTreeMap.java b/src/plugins/Library/util/BTreeMap.java index b95d61c1..69f87fb7 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/src/plugins/Library/util/BTreeMap.java @@ -3,7 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.util.CompositeIterable; import java.util.Comparator; import java.util.Iterator; @@ -21,6 +20,9 @@ import java.util.NoSuchElementException; import java.util.ConcurrentModificationException; +import freenet.library.util.CompositeIterable; +import freenet.library.util.Integers; +import freenet.library.util.Sorted; import freenet.library.util.func.Tuples.X2; import freenet.library.util.func.Tuples.X3; diff --git a/src/plugins/Library/util/BTreeSet.java b/src/plugins/Library/util/BTreeSet.java index 75e24054..fd20f336 100644 --- a/src/plugins/Library/util/BTreeSet.java +++ b/src/plugins/Library/util/BTreeSet.java @@ -7,6 +7,8 @@ import java.util.Set; import java.util.SortedSet; +import freenet.library.util.SortedMapSet; + /** ** A B-tree set implementation backed by a {@link BTreeMap}. DOCUMENT ** diff --git a/src/plugins/Library/util/BytePrefixKey.java b/src/plugins/Library/util/BytePrefixKey.java index ae0efad0..abab406e 100644 --- a/src/plugins/Library/util/BytePrefixKey.java +++ b/src/plugins/Library/util/BytePrefixKey.java @@ -3,11 +3,12 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.util.PrefixTree.PrefixKey; -import plugins.Library.util.PrefixTree.AbstractPrefixKey; import java.util.Arrays; +import freenet.library.util.PrefixTree.AbstractPrefixKey; +import freenet.library.util.PrefixTree.PrefixKey; + /** ** A PrefixKey backed by an array of bytes. ** diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 217f8019..b0309bef 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -44,6 +44,8 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.util.Sorted; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.BoundedPriorityBlockingQueue; import freenet.library.util.concurrent.ExceptionConvertor; import freenet.library.util.concurrent.Executors; @@ -62,8 +64,7 @@ import freenet.library.util.func.Tuples.X2; import freenet.library.util.func.Tuples.X3; import freenet.support.Logger; -import plugins.Library.util.Sorted; -import static plugins.Library.util.Maps.$K; +import static freenet.library.util.Maps.$K; /** ** {@link Skeleton} of a {@link BTreeMap}. DOCUMENT diff --git a/src/plugins/Library/util/SkeletonBTreeSet.java b/src/plugins/Library/util/SkeletonBTreeSet.java index 2f9230ec..9e250c5d 100644 --- a/src/plugins/Library/util/SkeletonBTreeSet.java +++ b/src/plugins/Library/util/SkeletonBTreeSet.java @@ -16,6 +16,8 @@ import java.util.SortedSet; import java.util.ArrayList; +import freenet.library.util.SortedSetMap; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.TaskAbortException; /** diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index be1ca37c..da7be9b6 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -12,6 +12,7 @@ import plugins.Library.*; import freenet.keys.FreenetURI; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index 0c35a2ef..d74883e8 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -12,6 +12,7 @@ import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Closure; diff --git a/test/plugins/Library/util/BTreeMapTest.java b/test/plugins/Library/util/BTreeMapTest.java index c555a59a..845b3c59 100644 --- a/test/plugins/Library/util/BTreeMapTest.java +++ b/test/plugins/Library/util/BTreeMapTest.java @@ -7,6 +7,9 @@ import java.util.*; +import freenet.library.util.SortedMapTestSkeleton; + + /** ** @author infinity0 */ diff --git a/test/plugins/Library/util/SkeletonTreeMapTest.java b/test/plugins/Library/util/SkeletonTreeMapTest.java index 5573a2e1..efec1d93 100644 --- a/test/plugins/Library/util/SkeletonTreeMapTest.java +++ b/test/plugins/Library/util/SkeletonTreeMapTest.java @@ -8,6 +8,9 @@ import java.util.Map; import java.util.SortedMap; +import freenet.library.util.SortedMapTestSkeleton; + + /** ** @author infinity0 */ From c3c35a3481068aa851b911541f2802b2a7f01d7a Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 11:55:57 +0000 Subject: [PATCH 010/180] Moved two more classes from plugins.Library.util. --HG-- branch : eclipse-separation --- .../src/freenet/library}/util/BytePrefixKey.java | 4 +--- .../src/freenet/library}/util/IdentityComparator.java | 2 +- src/plugins/Library/index/URIKey.java | 2 +- src/plugins/Library/io/serial/Packer.java | 2 +- src/plugins/Library/ui/RelevanceComparator.java | 2 +- test/plugins/Library/util/BytePrefixKeyTest.java | 2 ++ 6 files changed, 7 insertions(+), 7 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/BytePrefixKey.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/IdentityComparator.java (99%) diff --git a/src/plugins/Library/util/BytePrefixKey.java b/shared/src/freenet/library/util/BytePrefixKey.java similarity index 99% rename from src/plugins/Library/util/BytePrefixKey.java rename to shared/src/freenet/library/util/BytePrefixKey.java index abab406e..4fd2f940 100644 --- a/src/plugins/Library/util/BytePrefixKey.java +++ b/shared/src/freenet/library/util/BytePrefixKey.java @@ -1,11 +1,9 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; - +package freenet.library.util; import java.util.Arrays; - import freenet.library.util.PrefixTree.AbstractPrefixKey; import freenet.library.util.PrefixTree.PrefixKey; diff --git a/src/plugins/Library/util/IdentityComparator.java b/shared/src/freenet/library/util/IdentityComparator.java similarity index 99% rename from src/plugins/Library/util/IdentityComparator.java rename to shared/src/freenet/library/util/IdentityComparator.java index 4320731b..54c02213 100644 --- a/src/plugins/Library/util/IdentityComparator.java +++ b/shared/src/freenet/library/util/IdentityComparator.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; import java.util.SortedSet; diff --git a/src/plugins/Library/index/URIKey.java b/src/plugins/Library/index/URIKey.java index f4c217a1..a217f888 100644 --- a/src/plugins/Library/index/URIKey.java +++ b/src/plugins/Library/index/URIKey.java @@ -3,7 +3,7 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.util.BytePrefixKey; +import freenet.library.util.BytePrefixKey; import freenet.keys.FreenetURI; import freenet.keys.BaseClientKey; diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index d2ed31e6..42c34289 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -4,7 +4,6 @@ package plugins.Library.io.serial; import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.util.IdentityComparator; import java.util.Collections; import java.util.Collection; @@ -19,6 +18,7 @@ import java.util.HashSet; import java.util.TreeSet; +import freenet.library.util.IdentityComparator; import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; diff --git a/src/plugins/Library/ui/RelevanceComparator.java b/src/plugins/Library/ui/RelevanceComparator.java index 179eb91c..84ab9fc0 100644 --- a/src/plugins/Library/ui/RelevanceComparator.java +++ b/src/plugins/Library/ui/RelevanceComparator.java @@ -4,8 +4,8 @@ package plugins.Library.ui; +import freenet.library.util.IdentityComparator; import plugins.Library.index.TermEntry; -import plugins.Library.util.IdentityComparator; /** * Compares the relevance of two TermEntrys, extends IdentityComparator so that two unique entries will not return a comparison of 0 diff --git a/test/plugins/Library/util/BytePrefixKeyTest.java b/test/plugins/Library/util/BytePrefixKeyTest.java index b951fb7b..fdab1f67 100644 --- a/test/plugins/Library/util/BytePrefixKeyTest.java +++ b/test/plugins/Library/util/BytePrefixKeyTest.java @@ -9,6 +9,8 @@ import java.util.Random; import java.util.Arrays; +import freenet.library.util.BytePrefixKey; + /** ** @author infinity0 */ From 3a024bed3286ad340c93e2e40a5d1fffd4fb246c Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 12:06:16 +0000 Subject: [PATCH 011/180] Moved some serialisers from plugins.Library.io.serial. --HG-- branch : eclipse-separation --- .../freenet/library}/io/DataFormatException.java | 2 +- .../src/freenet/library}/io/serial/Archiver.java | 4 ++-- .../library}/io/serial/IterableSerialiser.java | 4 ++-- .../library}/io/serial/MapSerialiser.java | 4 ++-- .../library}/io/serial/ProgressTracker.java | 4 ++-- .../library}/io/serial/ScheduledSerialiser.java | 5 ++--- .../freenet/library}/io/serial/Serialiser.java | 2 +- .../freenet/library}/io/serial/Translator.java | 4 ++-- src/plugins/Library/Library.java | 2 +- src/plugins/Library/Main.java | 4 ++-- src/plugins/Library/SpiderIndexUploader.java | 4 ++-- src/plugins/Library/index/ProtoIndex.java | 4 ++-- .../index/ProtoIndexComponentSerialiser.java | 16 ++++++++-------- .../Library/index/ProtoIndexSerialiser.java | 10 +++++----- .../Library/index/TermEntryReaderWriter.java | 2 +- src/plugins/Library/io/YamlReaderWriter.java | 2 +- src/plugins/Library/io/serial/FileArchiver.java | 3 ++- src/plugins/Library/io/serial/LiveArchiver.java | 4 +++- src/plugins/Library/io/serial/Packer.java | 6 +++++- .../Library/io/serial/ParallelSerialiser.java | 6 +++++- src/plugins/Library/io/serial/package-info.java | 4 ++-- src/plugins/Library/util/BTreeMap.java | 6 +++--- src/plugins/Library/util/Skeleton.java | 2 +- src/plugins/Library/util/SkeletonBTreeMap.java | 16 ++++++++-------- src/plugins/Library/util/SkeletonBTreeSet.java | 10 +++++----- src/plugins/Library/util/SkeletonMap.java | 2 +- src/plugins/Library/util/SkeletonTreeMap.java | 8 ++++---- test/plugins/Library/Tester.java | 2 +- test/plugins/Library/index/BIndexTest.java | 2 +- test/plugins/Library/index/TermEntryTest.java | 2 +- test/plugins/Library/io/serial/PackerTest.java | 3 ++- 31 files changed, 80 insertions(+), 69 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/io/DataFormatException.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/Archiver.java (95%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/IterableSerialiser.java (96%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/MapSerialiser.java (95%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/ProgressTracker.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/ScheduledSerialiser.java (95%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/Serialiser.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/Translator.java (94%) diff --git a/src/plugins/Library/io/DataFormatException.java b/shared/src/freenet/library/io/DataFormatException.java similarity index 97% rename from src/plugins/Library/io/DataFormatException.java rename to shared/src/freenet/library/io/DataFormatException.java index 6f7aeb4b..bc8dc839 100644 --- a/src/plugins/Library/io/DataFormatException.java +++ b/shared/src/freenet/library/io/DataFormatException.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io; +package freenet.library.io; /** ** Thrown when data is not in a recognised format. diff --git a/src/plugins/Library/io/serial/Archiver.java b/shared/src/freenet/library/io/serial/Archiver.java similarity index 95% rename from src/plugins/Library/io/serial/Archiver.java rename to shared/src/freenet/library/io/serial/Archiver.java index 7241a771..62bf589e 100644 --- a/src/plugins/Library/io/serial/Archiver.java +++ b/shared/src/freenet/library/io/serial/Archiver.java @@ -1,13 +1,13 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; -import plugins.Library.io.serial.Serialiser.*; import java.util.Collection; import java.util.Map; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; /** diff --git a/src/plugins/Library/io/serial/IterableSerialiser.java b/shared/src/freenet/library/io/serial/IterableSerialiser.java similarity index 96% rename from src/plugins/Library/io/serial/IterableSerialiser.java rename to shared/src/freenet/library/io/serial/IterableSerialiser.java index c0a3c2f9..1ba10c3b 100644 --- a/src/plugins/Library/io/serial/IterableSerialiser.java +++ b/shared/src/freenet/library/io/serial/IterableSerialiser.java @@ -1,10 +1,10 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; -import plugins.Library.io.serial.Serialiser.*; /** ** An interface that handles an iterable group of {@link Serialiser.Task}s. diff --git a/src/plugins/Library/io/serial/MapSerialiser.java b/shared/src/freenet/library/io/serial/MapSerialiser.java similarity index 95% rename from src/plugins/Library/io/serial/MapSerialiser.java rename to shared/src/freenet/library/io/serial/MapSerialiser.java index 9a39208e..f84fb449 100644 --- a/src/plugins/Library/io/serial/MapSerialiser.java +++ b/shared/src/freenet/library/io/serial/MapSerialiser.java @@ -1,12 +1,12 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; -import plugins.Library.io.serial.Serialiser.*; import java.util.Map; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; /** diff --git a/src/plugins/Library/io/serial/ProgressTracker.java b/shared/src/freenet/library/io/serial/ProgressTracker.java similarity index 99% rename from src/plugins/Library/io/serial/ProgressTracker.java rename to shared/src/freenet/library/io/serial/ProgressTracker.java index 6785fcd1..bb405b9a 100644 --- a/src/plugins/Library/io/serial/ProgressTracker.java +++ b/shared/src/freenet/library/io/serial/ProgressTracker.java @@ -1,14 +1,14 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; -import plugins.Library.io.serial.Serialiser.*; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.CompositeIterable; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskInProgressException; diff --git a/src/plugins/Library/io/serial/ScheduledSerialiser.java b/shared/src/freenet/library/io/serial/ScheduledSerialiser.java similarity index 95% rename from src/plugins/Library/io/serial/ScheduledSerialiser.java rename to shared/src/freenet/library/io/serial/ScheduledSerialiser.java index 231d6083..109a3450 100644 --- a/src/plugins/Library/io/serial/ScheduledSerialiser.java +++ b/shared/src/freenet/library/io/serial/ScheduledSerialiser.java @@ -1,13 +1,12 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; - -import plugins.Library.io.serial.Serialiser.*; +package freenet.library.io.serial; import java.util.Map; import java.util.concurrent.BlockingQueue; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.concurrent.Scheduler; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/io/serial/Serialiser.java b/shared/src/freenet/library/io/serial/Serialiser.java similarity index 99% rename from src/plugins/Library/io/serial/Serialiser.java rename to shared/src/freenet/library/io/serial/Serialiser.java index bc8042d2..992fed59 100644 --- a/src/plugins/Library/io/serial/Serialiser.java +++ b/shared/src/freenet/library/io/serial/Serialiser.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import java.util.Collection; diff --git a/src/plugins/Library/io/serial/Translator.java b/shared/src/freenet/library/io/serial/Translator.java similarity index 94% rename from src/plugins/Library/io/serial/Translator.java rename to shared/src/freenet/library/io/serial/Translator.java index a6ffd35a..94fa2602 100644 --- a/src/plugins/Library/io/serial/Translator.java +++ b/shared/src/freenet/library/io/serial/Translator.java @@ -1,9 +1,9 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; -import plugins.Library.io.DataFormatException; +import freenet.library.io.DataFormatException; /** ** A class that translates an object into one of another type. Used mostly in diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 5b8a84c4..aa77e76c 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -24,7 +24,6 @@ import plugins.Library.index.xml.XMLIndex; import plugins.Library.io.ObjectStreamReader; import plugins.Library.io.ObjectStreamWriter; -import plugins.Library.io.serial.Serialiser.PullTask; import plugins.Library.search.InvalidSearchException; import freenet.client.FetchContext; @@ -45,6 +44,7 @@ import freenet.client.events.ExpectedMIMEEvent; import freenet.keys.FreenetURI; import freenet.keys.USK; +import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; import freenet.node.RequestClient; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 210de5de..e0daff54 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -48,6 +48,8 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.io.serial.Serialiser.PullTask; +import freenet.library.io.serial.Serialiser.PushTask; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.SimpleProgress; @@ -71,8 +73,6 @@ import plugins.Library.index.TermEntryReaderWriter; import plugins.Library.index.xml.LibrarianHandler; import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.Serialiser.PullTask; -import plugins.Library.io.serial.Serialiser.PushTask; /** * Library class is the api for others to use search facilities, it is used by the interfaces diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index d3a8f519..9dce27a9 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -28,12 +28,12 @@ import plugins.Library.index.TermEntry; import plugins.Library.index.TermEntryReaderWriter; import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.Serialiser.PullTask; -import plugins.Library.io.serial.Serialiser.PushTask; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.io.serial.Serialiser.PullTask; +import freenet.library.io.serial.Serialiser.PushTask; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 0e2230da..52f516f7 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -4,8 +4,6 @@ package plugins.Library.index; import plugins.Library.Index; -import plugins.Library.io.serial.Serialiser; -import plugins.Library.io.serial.ProgressTracker; import plugins.Library.util.Skeleton; import plugins.Library.util.SkeletonTreeMap; import plugins.Library.util.SkeletonBTreeMap; @@ -13,6 +11,8 @@ import plugins.Library.util.DataNotLoadedException; import freenet.keys.FreenetURI; +import freenet.library.io.serial.ProgressTracker; +import freenet.library.io.serial.Serialiser; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.ChainedProgress; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 0ca61866..58b0ae93 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -8,22 +8,22 @@ import plugins.Library.util.SkeletonTreeMap; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.io.serial.Serialiser; -import plugins.Library.io.serial.Translator; -import plugins.Library.io.serial.ProgressTracker; -import plugins.Library.io.serial.Archiver; -import plugins.Library.io.serial.IterableSerialiser; -import plugins.Library.io.serial.MapSerialiser; import plugins.Library.io.serial.LiveArchiver; import plugins.Library.io.serial.ParallelSerialiser; import plugins.Library.io.serial.Packer; import plugins.Library.io.serial.Packer.Scale; // WORKAROUND javadoc bug #4464323 import plugins.Library.io.serial.FileArchiver; -import plugins.Library.io.DataFormatException; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.ProgressTracker; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.BaseCompositeProgress; import freenet.library.util.exec.Progress; import freenet.library.util.exec.ProgressParts; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index 5b4c7f5b..a9b6ea6b 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -7,16 +7,16 @@ import plugins.Library.client.FreenetArchiver; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.Serialiser; -import plugins.Library.io.serial.Translator; -import plugins.Library.io.serial.Archiver; import plugins.Library.io.serial.FileArchiver; import plugins.Library.io.YamlReaderWriter; -import plugins.Library.io.DataFormatException; import freenet.keys.FreenetURI; +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/index/TermEntryReaderWriter.java b/src/plugins/Library/index/TermEntryReaderWriter.java index 1b73fd36..e19c9a24 100644 --- a/src/plugins/Library/index/TermEntryReaderWriter.java +++ b/src/plugins/Library/index/TermEntryReaderWriter.java @@ -3,11 +3,11 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.io.DataFormatException; import plugins.Library.io.ObjectStreamReader; import plugins.Library.io.ObjectStreamWriter; import freenet.keys.FreenetURI; +import freenet.library.io.DataFormatException; import java.util.Map; import java.util.HashMap; diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index 4c3118e6..60fe8753 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -3,7 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io; -import plugins.Library.io.DataFormatException; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.error.YAMLException; @@ -38,6 +37,7 @@ import plugins.Library.index.TermIndexEntry; import plugins.Library.index.TermTermEntry; import freenet.keys.FreenetURI; +import freenet.library.io.DataFormatException; /** diff --git a/src/plugins/Library/io/serial/FileArchiver.java b/src/plugins/Library/io/serial/FileArchiver.java index 8adc1375..35aa5496 100644 --- a/src/plugins/Library/io/serial/FileArchiver.java +++ b/src/plugins/Library/io/serial/FileArchiver.java @@ -9,12 +9,13 @@ import java.io.IOException; import java.nio.channels.FileLock; +import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.Serialiser.Task; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import plugins.Library.io.ObjectStreamReader; import plugins.Library.io.ObjectStreamWriter; -import plugins.Library.io.serial.Serialiser.Task; /** ** Converts between a map of {@link String} to {@link Object}, and a file on diff --git a/src/plugins/Library/io/serial/LiveArchiver.java b/src/plugins/Library/io/serial/LiveArchiver.java index 36c88d7a..8df32175 100644 --- a/src/plugins/Library/io/serial/LiveArchiver.java +++ b/src/plugins/Library/io/serial/LiveArchiver.java @@ -3,9 +3,11 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; +import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskAbortException; -import plugins.Library.io.serial.Serialiser.*; /** ** An interface that handles a single {@link Serialiser.Task} and sends live diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index 42c34289..d361e682 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -3,7 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; -import plugins.Library.io.serial.Serialiser.*; import java.util.Collections; import java.util.Collection; @@ -18,6 +17,11 @@ import java.util.HashSet; import java.util.TreeSet; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.ProgressTracker; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.IdentityComparator; import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/src/plugins/Library/io/serial/ParallelSerialiser.java index 3f2deeb3..93f88d35 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/src/plugins/Library/io/serial/ParallelSerialiser.java @@ -3,7 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io.serial; -import plugins.Library.io.serial.Serialiser.*; import static freenet.library.util.func.Tuples.X2; import java.util.Iterator; @@ -18,6 +17,11 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.ConcurrentMap; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.ProgressTracker; +import freenet.library.io.serial.ScheduledSerialiser; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.concurrent.ObjectProcessor; diff --git a/src/plugins/Library/io/serial/package-info.java b/src/plugins/Library/io/serial/package-info.java index 5bafbe64..7ee8846a 100644 --- a/src/plugins/Library/io/serial/package-info.java +++ b/src/plugins/Library/io/serial/package-info.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ /** ** Contains the serialisation engine and related classes and interfaces, such -** as {@link plugins.Library.io.serial.ProgressTracker} and {@link -** plugins.Library.io.serial.Translator} +** as {@link freenet.library.io.serial.ProgressTracker} and {@link +** freenet.library.io.serial.Translator} ** ** @author infinity0 */ diff --git a/src/plugins/Library/util/BTreeMap.java b/src/plugins/Library/util/BTreeMap.java index 69f87fb7..626f1861 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/src/plugins/Library/util/BTreeMap.java @@ -275,15 +275,15 @@ public void remove() { ** ** This is fine in the context of {@link BTreeMap}, but serialisation makes ** it extremely awkard. Passing the tree requires us to extend {@link - ** plugins.Library.io.serial.Translator} to take a context paramater, and + ** freenet.library.io.serial.Translator} to take a context paramater, and ** passing the comparator requires us to add checks to ensure that the ** comparators are equal when the node is re-attached to the tree. ** ** And even if this is implemented, nodes still need to know their parent - ** tree (to set {@link plugins.Library.io.serial.Serialiser}s etc) and so you + ** tree (to set {@link freenet.library.io.serial.Serialiser}s etc) and so you ** would need to code a secondary initialisation scheme to be called after ** object construction, and after execution returns from {@link - ** plugins.Library.io.serial.Translator#rev(Object)}. All in all, this is more + ** freenet.library.io.serial.Translator#rev(Object)}. All in all, this is more ** trouble than it's worth. ** ** So I've gone with the non-static class, which means a new {@code diff --git a/src/plugins/Library/util/Skeleton.java b/src/plugins/Library/util/Skeleton.java index 54ede848..956179b5 100644 --- a/src/plugins/Library/util/Skeleton.java +++ b/src/plugins/Library/util/Skeleton.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; +import freenet.library.io.serial.Serialiser; import freenet.library.util.exec.TaskAbortException; -import plugins.Library.io.serial.Serialiser; /** ** Defines an interface for an extension of a data structure which is only diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index b0309bef..f1347ca3 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -3,12 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.io.serial.IterableSerialiser; -import plugins.Library.io.serial.ScheduledSerialiser; -import plugins.Library.io.serial.MapSerialiser; -import plugins.Library.io.serial.Translator; -import plugins.Library.io.DataFormatException; import java.util.AbstractSet; import java.util.Comparator; @@ -33,8 +27,6 @@ import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import plugins.Library.io.serial.Serialiser; -import plugins.Library.io.serial.ProgressTracker; import plugins.Library.util.BTreeMap.Node; import java.util.Collections; @@ -44,6 +36,14 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.ProgressTracker; +import freenet.library.io.serial.ScheduledSerialiser; +import freenet.library.io.serial.Serialiser; +import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.Sorted; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.BoundedPriorityBlockingQueue; diff --git a/src/plugins/Library/util/SkeletonBTreeSet.java b/src/plugins/Library/util/SkeletonBTreeSet.java index 9e250c5d..a9f705f1 100644 --- a/src/plugins/Library/util/SkeletonBTreeSet.java +++ b/src/plugins/Library/util/SkeletonBTreeSet.java @@ -3,11 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.io.DataFormatException; -import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.io.serial.IterableSerialiser; -import plugins.Library.io.serial.MapSerialiser; -import plugins.Library.io.serial.Translator; import java.util.Comparator; import java.util.Collection; @@ -16,6 +11,11 @@ import java.util.SortedSet; import java.util.ArrayList; +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SortedSetMap; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/util/SkeletonMap.java b/src/plugins/Library/util/SkeletonMap.java index 10285884..b9865ec1 100644 --- a/src/plugins/Library/util/SkeletonMap.java +++ b/src/plugins/Library/util/SkeletonMap.java @@ -3,10 +3,10 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.io.serial.MapSerialiser; import java.util.Map; +import freenet.library.io.serial.MapSerialiser; import freenet.library.util.exec.TaskAbortException; /** diff --git a/src/plugins/Library/util/SkeletonTreeMap.java b/src/plugins/Library/util/SkeletonTreeMap.java index 9167656b..91bf2efd 100644 --- a/src/plugins/Library/util/SkeletonTreeMap.java +++ b/src/plugins/Library/util/SkeletonTreeMap.java @@ -3,10 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.util; -import plugins.Library.io.serial.Serialiser.*; -import plugins.Library.io.serial.Translator; -import plugins.Library.io.serial.MapSerialiser; -import plugins.Library.io.DataFormatException; import java.util.Iterator; import java.util.Comparator; @@ -20,6 +16,10 @@ import java.util.TreeMap; import java.util.HashMap; +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; import freenet.support.Logger; diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index da7be9b6..e53954a8 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -7,11 +7,11 @@ import plugins.Library.index.*; import plugins.Library.io.*; import plugins.Library.io.serial.*; -import plugins.Library.io.serial.Serialiser.*; import plugins.Library.util.*; import plugins.Library.*; import freenet.keys.FreenetURI; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.SimpleProgress; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index d74883e8..a39f3b10 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -8,10 +8,10 @@ import plugins.Library.util.*; import plugins.Library.io.serial.*; -import plugins.Library.io.serial.Serialiser.*; import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java index ecefd54e..d49e89cb 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/test/plugins/Library/index/TermEntryTest.java @@ -5,13 +5,13 @@ import junit.framework.TestCase; -import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.FileArchiver; import plugins.Library.io.serial.Packer; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; import java.util.Arrays; diff --git a/test/plugins/Library/io/serial/PackerTest.java b/test/plugins/Library/io/serial/PackerTest.java index ce23c749..ee29eb3d 100644 --- a/test/plugins/Library/io/serial/PackerTest.java +++ b/test/plugins/Library/io/serial/PackerTest.java @@ -8,7 +8,6 @@ import plugins.Library.util.Generators; import plugins.Library.util.SkeletonTreeMap; import plugins.Library.io.serial.Packer.Bin; -import plugins.Library.io.serial.Serialiser.*; import java.util.Map; import java.util.List; @@ -16,6 +15,8 @@ import java.util.HashSet; import java.util.HashMap; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; /** From 590818de35b10ce25430117dc2b85c65d0e02134 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 12:15:27 +0000 Subject: [PATCH 012/180] Cleanups to allow moving of the classes. Logging is left commented out for when another logging mechanism is eventually added. --HG-- branch : eclipse-separation --- src/plugins/Library/util/BTreeMap.java | 2 -- .../Library/util/SkeletonBTreeMap.java | 33 ++++++++----------- .../Library/util/SkeletonBTreeSet.java | 2 -- src/plugins/Library/util/SkeletonTreeMap.java | 1 - 4 files changed, 13 insertions(+), 25 deletions(-) diff --git a/src/plugins/Library/util/BTreeMap.java b/src/plugins/Library/util/BTreeMap.java index 626f1861..6d6a6d7e 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/src/plugins/Library/util/BTreeMap.java @@ -6,7 +6,6 @@ import java.util.Comparator; import java.util.Iterator; -import java.util.Collection; import java.util.List; import java.util.Set; import java.util.Map; @@ -18,7 +17,6 @@ import java.util.HashMap; import java.util.Stack; import java.util.NoSuchElementException; -import java.util.ConcurrentModificationException; import freenet.library.util.CompositeIterable; import freenet.library.util.Integers; diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index f1347ca3..8837f35c 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -19,15 +19,11 @@ // TODO NORM tidy this import java.util.Queue; import java.util.PriorityQueue; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import plugins.Library.util.BTreeMap.Node; import java.util.Collections; import java.util.SortedSet; @@ -51,11 +47,9 @@ import freenet.library.util.concurrent.Executors; import freenet.library.util.concurrent.Notifier; import freenet.library.util.concurrent.ObjectProcessor; -import freenet.library.util.concurrent.Scheduler; import freenet.library.util.event.CountingSweeper; import freenet.library.util.event.TrackingSweeper; import freenet.library.util.exec.BaseCompositeProgress; -import freenet.library.util.exec.Progress; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; @@ -63,7 +57,6 @@ import freenet.library.util.func.SafeClosure; import freenet.library.util.func.Tuples.X2; import freenet.library.util.func.Tuples.X3; -import freenet.support.Logger; import static freenet.library.util.Maps.$K; /** @@ -150,9 +143,9 @@ public void setSerialiser(IterableSerialiser n, MapSerialiser> CMP_PULL = new Comparator>() { /*@Override**/ public int compare(PullTask t1, PullTask t2) { @@ -968,7 +961,7 @@ public void run() { public void invoke(Map.Entry en) { assert(node.entries.containsKey(en.getKey())); node.entries.put(en.getKey(), en.getValue()); - if(logMINOR) Logger.minor(this, "New value for key "+en.getKey()+" : "+en.getValue()+" in "+node+" parent = "+parNClo); + // if(logMINOR) Logger.minor(this, "New value for key "+en.getKey()+" : "+en.getValue()+" in "+node+" parent = "+parNClo); } public void deflate() throws TaskAbortException { @@ -1156,7 +1149,7 @@ private void reassignKeyToSweeper(K key, DeflateNode clo) { //assert(((UpdateValue)value_closures.get(key)).node == node); proc_val.update($K(key, (V)null), clo); // FIXME what if it has already run??? - if(logMINOR) Logger.minor(this, "Reassigning key "+key+" to "+clo+" on "+this+" parent="+parent+" parent split node "+parNClo+" parent deflate node "+parVClo); + // if(logMINOR) Logger.minor(this, "Reassigning key "+key+" to "+clo+" on "+this+" parent="+parent+" parent split node "+parNClo+" parent deflate node "+parVClo); // nodeVClo.release(key); // this is unnecessary since nodeVClo() will only be used if we did not // split its node (and never called this method) @@ -1264,7 +1257,7 @@ public void invoke(SkeletonNode node) { } catch (ClassCastException e) { // This has been seen in practice. I have no idea what it means. // FIXME HIGH !!! - Logger.error(this, "Node is already loaded?!?!?!: "+node.selectNode(rng.first())); + // Logger.error(this, "Node is already loaded?!?!?!: "+node.selectNode(rng.first())); continue; } PullTask task = new PullTask(n); @@ -1296,7 +1289,7 @@ public void invoke(SkeletonNode node) { private void handleLocalPut(SkeletonNode n, K key, DeflateNode vClo) { V oldval = n.entries.put(key, null); vClo.acquire(key); - if(logMINOR) Logger.minor(this, "handleLocalPut for key "+key+" old value "+oldval+" for deflate node "+vClo+" - passing to proc_val"); + // if(logMINOR) Logger.minor(this, "handleLocalPut for key "+key+" old value "+oldval+" for deflate node "+vClo+" - passing to proc_val"); ObjectProcessor.submitSafe(proc_val, $K(key, oldval), vClo); } @@ -1327,12 +1320,12 @@ private void handleLocalRemove(SkeletonNode n, K key, TrackingSweeper 10)) { count = 0; - Logger.debug(this, - "SkeletonBTreeMap update " + - proc_val + " " + - proc_pull + " " + - proc_push + " " + - proc_deflate); + // Logger.debug(this, + // "SkeletonBTreeMap update " + + // proc_val + " " + + // proc_pull + " " + + // proc_push + " " + + // proc_deflate); notifier.waitUpdate(1000); } progress = false; diff --git a/src/plugins/Library/util/SkeletonBTreeSet.java b/src/plugins/Library/util/SkeletonBTreeSet.java index a9f705f1..ef8a57eb 100644 --- a/src/plugins/Library/util/SkeletonBTreeSet.java +++ b/src/plugins/Library/util/SkeletonBTreeSet.java @@ -7,7 +7,6 @@ import java.util.Comparator; import java.util.Collection; import java.util.Map; -import java.util.SortedMap; import java.util.SortedSet; import java.util.ArrayList; @@ -15,7 +14,6 @@ import freenet.library.io.serial.IterableSerialiser; import freenet.library.io.serial.MapSerialiser; import freenet.library.io.serial.Translator; -import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SortedSetMap; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/util/SkeletonTreeMap.java b/src/plugins/Library/util/SkeletonTreeMap.java index 91bf2efd..a21e6c5a 100644 --- a/src/plugins/Library/util/SkeletonTreeMap.java +++ b/src/plugins/Library/util/SkeletonTreeMap.java @@ -22,7 +22,6 @@ import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; -import freenet.support.Logger; /** ** A {@link SkeletonMap} of a {@link TreeMap}. DOCUMENT From 807716650c7672828e7b409f8e6700288f6751dc Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 12:26:57 +0000 Subject: [PATCH 013/180] Moved the rest of the classes from plugins.Library.util. --HG-- branch : eclipse-separation --- .../src/freenet/library}/util/BTreeMap.java | 5 +---- .../src/freenet/library}/util/BTreeSet.java | 3 +-- .../library}/util/DataNotLoadedException.java | 2 +- .../src/freenet/library}/util/Skeleton.java | 2 +- .../library}/util/SkeletonBTreeMap.java | 4 +--- .../library}/util/SkeletonBTreeSet.java | 4 +--- .../src/freenet/library}/util/SkeletonMap.java | 2 +- .../freenet/library}/util/SkeletonTreeMap.java | 2 +- .../freenet/library}/util/BTreeMapTest.java | 18 ++++++++++++------ .../library}/util/BytePrefixKeyTest.java | 5 +++-- .../library}/util/SkeletonTreeMapTest.java | 18 +++++++++++++----- src/plugins/Library/Main.java | 4 ++-- src/plugins/Library/SpiderIndexUploader.java | 4 ++-- src/plugins/Library/index/ProtoIndex.java | 10 +++++----- .../index/ProtoIndexComponentSerialiser.java | 6 +++--- .../Library/index/ProtoIndexSerialiser.java | 4 ++-- test/plugins/Library/Tester.java | 1 + test/plugins/Library/index/BIndexTest.java | 2 ++ test/plugins/Library/io/serial/PackerTest.java | 2 +- 19 files changed, 54 insertions(+), 44 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/util/BTreeMap.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/BTreeSet.java (96%) rename {src/plugins/Library => shared/src/freenet/library}/util/DataNotLoadedException.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/Skeleton.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/SkeletonBTreeMap.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/util/SkeletonBTreeSet.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/util/SkeletonMap.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/util/SkeletonTreeMap.java (99%) rename {test/plugins/Library => shared/test/freenet/library}/util/BTreeMapTest.java (89%) rename {test/plugins/Library => shared/test/freenet/library}/util/BytePrefixKeyTest.java (88%) rename {test/plugins/Library => shared/test/freenet/library}/util/SkeletonTreeMapTest.java (89%) diff --git a/src/plugins/Library/util/BTreeMap.java b/shared/src/freenet/library/util/BTreeMap.java similarity index 99% rename from src/plugins/Library/util/BTreeMap.java rename to shared/src/freenet/library/util/BTreeMap.java index 6d6a6d7e..99e4c547 100644 --- a/src/plugins/Library/util/BTreeMap.java +++ b/shared/src/freenet/library/util/BTreeMap.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; @@ -18,9 +18,6 @@ import java.util.Stack; import java.util.NoSuchElementException; -import freenet.library.util.CompositeIterable; -import freenet.library.util.Integers; -import freenet.library.util.Sorted; import freenet.library.util.func.Tuples.X2; import freenet.library.util.func.Tuples.X3; diff --git a/src/plugins/Library/util/BTreeSet.java b/shared/src/freenet/library/util/BTreeSet.java similarity index 96% rename from src/plugins/Library/util/BTreeSet.java rename to shared/src/freenet/library/util/BTreeSet.java index fd20f336..dfd6592c 100644 --- a/src/plugins/Library/util/BTreeSet.java +++ b/shared/src/freenet/library/util/BTreeSet.java @@ -1,13 +1,12 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version) {. See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; import java.util.Set; import java.util.SortedSet; -import freenet.library.util.SortedMapSet; /** ** A B-tree set implementation backed by a {@link BTreeMap}. DOCUMENT diff --git a/src/plugins/Library/util/DataNotLoadedException.java b/shared/src/freenet/library/util/DataNotLoadedException.java similarity index 98% rename from src/plugins/Library/util/DataNotLoadedException.java rename to shared/src/freenet/library/util/DataNotLoadedException.java index 1212c5ea..2ef1556d 100644 --- a/src/plugins/Library/util/DataNotLoadedException.java +++ b/shared/src/freenet/library/util/DataNotLoadedException.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; /** ** Thrown when data hasn't been loaded yet, eg. when a {@link Skeleton} hasn't diff --git a/src/plugins/Library/util/Skeleton.java b/shared/src/freenet/library/util/Skeleton.java similarity index 98% rename from src/plugins/Library/util/Skeleton.java rename to shared/src/freenet/library/util/Skeleton.java index 956179b5..39c4f0fc 100644 --- a/src/plugins/Library/util/Skeleton.java +++ b/shared/src/freenet/library/util/Skeleton.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import freenet.library.io.serial.Serialiser; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/shared/src/freenet/library/util/SkeletonBTreeMap.java similarity index 99% rename from src/plugins/Library/util/SkeletonBTreeMap.java rename to shared/src/freenet/library/util/SkeletonBTreeMap.java index 8837f35c..34e0ce2d 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/shared/src/freenet/library/util/SkeletonBTreeMap.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.AbstractSet; @@ -40,8 +40,6 @@ import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; import freenet.library.io.serial.Serialiser.*; -import freenet.library.util.Sorted; -import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.BoundedPriorityBlockingQueue; import freenet.library.util.concurrent.ExceptionConvertor; import freenet.library.util.concurrent.Executors; diff --git a/src/plugins/Library/util/SkeletonBTreeSet.java b/shared/src/freenet/library/util/SkeletonBTreeSet.java similarity index 97% rename from src/plugins/Library/util/SkeletonBTreeSet.java rename to shared/src/freenet/library/util/SkeletonBTreeSet.java index ef8a57eb..a5e198de 100644 --- a/src/plugins/Library/util/SkeletonBTreeSet.java +++ b/shared/src/freenet/library/util/SkeletonBTreeSet.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Comparator; @@ -14,8 +14,6 @@ import freenet.library.io.serial.IterableSerialiser; import freenet.library.io.serial.MapSerialiser; import freenet.library.io.serial.Translator; -import freenet.library.util.SortedSetMap; -import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.TaskAbortException; /** diff --git a/src/plugins/Library/util/SkeletonMap.java b/shared/src/freenet/library/util/SkeletonMap.java similarity index 98% rename from src/plugins/Library/util/SkeletonMap.java rename to shared/src/freenet/library/util/SkeletonMap.java index b9865ec1..ead0d3a8 100644 --- a/src/plugins/Library/util/SkeletonMap.java +++ b/shared/src/freenet/library/util/SkeletonMap.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Map; diff --git a/src/plugins/Library/util/SkeletonTreeMap.java b/shared/src/freenet/library/util/SkeletonTreeMap.java similarity index 99% rename from src/plugins/Library/util/SkeletonTreeMap.java rename to shared/src/freenet/library/util/SkeletonTreeMap.java index a21e6c5a..65cb4ee0 100644 --- a/src/plugins/Library/util/SkeletonTreeMap.java +++ b/shared/src/freenet/library/util/SkeletonTreeMap.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; +package freenet.library.util; import java.util.Iterator; diff --git a/test/plugins/Library/util/BTreeMapTest.java b/shared/test/freenet/library/util/BTreeMapTest.java similarity index 89% rename from test/plugins/Library/util/BTreeMapTest.java rename to shared/test/freenet/library/util/BTreeMapTest.java index 845b3c59..fea5ab48 100644 --- a/test/plugins/Library/util/BTreeMapTest.java +++ b/shared/test/freenet/library/util/BTreeMapTest.java @@ -1,13 +1,11 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.util; - -import junit.framework.TestCase; +package freenet.library.util; import java.util.*; -import freenet.library.util.SortedMapTestSkeleton; +import freenet.library.util.BTreeMap; /** @@ -22,6 +20,14 @@ public class BTreeMapTest extends SortedMapTestSkeleton { final public static int sz0 = 0x400; final public static int sz1 = sz0<<2; //sz0<<6; + private static String rndStr() { + return UUID.randomUUID().toString(); + } + + private static String rndKey() { + return rndStr().substring(0,8); + } + public void testBasic() { BTreeMap testmap = new BTreeMap(0x40); @@ -29,7 +35,7 @@ public void testBasic() { try { for (int i=0; i backmap = new TreeMap(); for (int i=0; i skelmap; + private static String rndStr() { + return UUID.randomUUID().toString(); + } + + private static String rndKey() { + return rndStr().substring(0,8); + } + protected void setUp() { skelmap = new SkeletonTreeMap(); for (int i=0; i<1024; ++i) { - skelmap.putGhost(Generators.rndKey(), Boolean.FALSE); + skelmap.putGhost(rndKey(), Boolean.FALSE); } } diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index e0daff54..49dc8589 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -33,8 +33,6 @@ import plugins.Library.index.TermPageEntry; import plugins.Library.search.Search; import plugins.Library.ui.WebInterface; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; import freenet.pluginmanager.FredPlugin; import freenet.pluginmanager.FredPluginL10n; @@ -50,6 +48,8 @@ import freenet.l10n.BaseL10n.LANGUAGE; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.SimpleProgress; diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 9dce27a9..b5ee1749 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -28,12 +28,12 @@ import plugins.Library.index.TermEntry; import plugins.Library.index.TermEntryReaderWriter; import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; import freenet.client.InsertException; import freenet.keys.FreenetURI; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 52f516f7..5ada5104 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -4,15 +4,15 @@ package plugins.Library.index; import plugins.Library.Index; -import plugins.Library.util.Skeleton; -import plugins.Library.util.SkeletonTreeMap; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.DataNotLoadedException; import freenet.keys.FreenetURI; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; +import freenet.library.util.DataNotLoadedException; +import freenet.library.util.Skeleton; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; +import freenet.library.util.SkeletonTreeMap; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.ChainedProgress; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 58b0ae93..3ad5b696 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -5,9 +5,6 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.util.SkeletonTreeMap; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.io.serial.LiveArchiver; import plugins.Library.io.serial.ParallelSerialiser; import plugins.Library.io.serial.Packer; @@ -24,6 +21,9 @@ import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; +import freenet.library.util.SkeletonTreeMap; import freenet.library.util.exec.BaseCompositeProgress; import freenet.library.util.exec.Progress; import freenet.library.util.exec.ProgressParts; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index a9b6ea6b..664ba488 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -5,8 +5,6 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.io.serial.LiveArchiver; import plugins.Library.io.serial.FileArchiver; import plugins.Library.io.YamlReaderWriter; @@ -17,6 +15,8 @@ import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index e53954a8..18f633bf 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -12,6 +12,7 @@ import freenet.keys.FreenetURI; import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.SimpleProgress; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index a39f3b10..2fe8bf0a 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -12,6 +12,8 @@ import freenet.keys.FreenetURI; import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; diff --git a/test/plugins/Library/io/serial/PackerTest.java b/test/plugins/Library/io/serial/PackerTest.java index ee29eb3d..901c0e06 100644 --- a/test/plugins/Library/io/serial/PackerTest.java +++ b/test/plugins/Library/io/serial/PackerTest.java @@ -6,7 +6,6 @@ import junit.framework.TestCase; import plugins.Library.util.Generators; -import plugins.Library.util.SkeletonTreeMap; import plugins.Library.io.serial.Packer.Bin; import java.util.Map; @@ -17,6 +16,7 @@ import freenet.library.io.serial.IterableSerialiser; import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.SkeletonTreeMap; import freenet.library.util.exec.TaskAbortException; /** From bed5daa945c1c1b5b037234b8838ed16378ce567 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 12:30:29 +0000 Subject: [PATCH 014/180] Removed logging to allow for the move. Logging was commented out. --HG-- branch : eclipse-separation --- src/plugins/Library/io/serial/Packer.java | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index d361e682..dc7eaa1c 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -7,7 +7,6 @@ import java.util.Collections; import java.util.Collection; import java.util.Iterator; -import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.Map; @@ -23,10 +22,8 @@ import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.IdentityComparator; -import freenet.library.util.concurrent.ObjectProcessor; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskCompleteException; -import freenet.support.Logger; /** ** {@link MapSerialiser} that packs a map of weighable elements (eg. objects @@ -75,9 +72,9 @@ public class Packer private static volatile boolean logMINOR; private static volatile boolean logDEBUG; - static { - Logger.registerClass(Packer.class); - } + // static { + // Logger.registerClass(Packer.class); + // } /** ** Maximum weight of a bin (except one; see {@link #push(Map, Object)} for @@ -608,7 +605,7 @@ protected void pullUnloaded(Map> tasks, Object meta) throws TaskA try { // read local copy of aggression int agg = getAggression(); - if(logDEBUG) Logger.debug(this, "Aggression = "+agg+" tasks size = "+tasks.size()); + // if(logDEBUG) Logger.debug(this, "Aggression = "+agg+" tasks size = "+tasks.size()); IDGenerator gen = generator(); Inventory inv = new Inventory(this, tasks); From 3e0d321f1c3230a7ef15cd1e00b20bf3e2ce75d2 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 12:42:52 +0000 Subject: [PATCH 015/180] Moved almost all files from plugins.Library.io and .serial. --HG-- branch : eclipse-separation --- .../freenet/library}/io/ObjectBlueprint.java | 2 +- .../library}/io/ObjectStreamReader.java | 2 +- .../library}/io/ObjectStreamWriter.java | 2 +- .../library}/io/serial/FileArchiver.java | 8 +++----- .../library}/io/serial/LiveArchiver.java | 4 +--- .../src/freenet/library}/io/serial/Packer.java | 6 +----- .../library}/io/serial/ParallelSerialiser.java | 6 +----- .../library}/io/serial/package-info.java | 2 +- .../freenet/library}/io/serial/PackerTest.java | 18 +++++++++++++----- src/plugins/Library/Library.java | 4 ++-- src/plugins/Library/Main.java | 2 +- src/plugins/Library/SpiderIndexUploader.java | 2 +- .../Library/client/FreenetArchiver.java | 7 ++++--- .../index/ProtoIndexComponentSerialiser.java | 10 +++++----- .../Library/index/ProtoIndexSerialiser.java | 4 ++-- .../Library/index/TermEntryReaderWriter.java | 4 ++-- src/plugins/Library/io/YamlReaderWriter.java | 5 ++++- test/plugins/Library/index/BIndexTest.java | 2 +- test/plugins/Library/index/TermEntryTest.java | 4 ++-- 19 files changed, 47 insertions(+), 47 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/io/ObjectBlueprint.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/io/ObjectStreamReader.java (95%) rename {src/plugins/Library => shared/src/freenet/library}/io/ObjectStreamWriter.java (95%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/FileArchiver.java (96%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/LiveArchiver.java (93%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/Packer.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/ParallelSerialiser.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/io/serial/package-info.java (92%) rename {test/plugins/Library => shared/test/freenet/library}/io/serial/PackerTest.java (86%) diff --git a/src/plugins/Library/io/ObjectBlueprint.java b/shared/src/freenet/library/io/ObjectBlueprint.java similarity index 99% rename from src/plugins/Library/io/ObjectBlueprint.java rename to shared/src/freenet/library/io/ObjectBlueprint.java index 36969847..e5bb37ef 100644 --- a/src/plugins/Library/io/ObjectBlueprint.java +++ b/shared/src/freenet/library/io/ObjectBlueprint.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io; +package freenet.library.io; import java.util.Iterator; import java.util.Collection; diff --git a/src/plugins/Library/io/ObjectStreamReader.java b/shared/src/freenet/library/io/ObjectStreamReader.java similarity index 95% rename from src/plugins/Library/io/ObjectStreamReader.java rename to shared/src/freenet/library/io/ObjectStreamReader.java index 05b46786..fca8565b 100644 --- a/src/plugins/Library/io/ObjectStreamReader.java +++ b/shared/src/freenet/library/io/ObjectStreamReader.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io; +package freenet.library.io; import java.io.InputStream; import java.io.IOException; diff --git a/src/plugins/Library/io/ObjectStreamWriter.java b/shared/src/freenet/library/io/ObjectStreamWriter.java similarity index 95% rename from src/plugins/Library/io/ObjectStreamWriter.java rename to shared/src/freenet/library/io/ObjectStreamWriter.java index 1843c740..5d271432 100644 --- a/src/plugins/Library/io/ObjectStreamWriter.java +++ b/shared/src/freenet/library/io/ObjectStreamWriter.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io; +package freenet.library.io; import java.io.OutputStream; import java.io.IOException; diff --git a/src/plugins/Library/io/serial/FileArchiver.java b/shared/src/freenet/library/io/serial/FileArchiver.java similarity index 96% rename from src/plugins/Library/io/serial/FileArchiver.java rename to shared/src/freenet/library/io/serial/FileArchiver.java index 35aa5496..7b7a04d9 100644 --- a/src/plugins/Library/io/serial/FileArchiver.java +++ b/shared/src/freenet/library/io/serial/FileArchiver.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import java.io.File; import java.io.FileInputStream; @@ -9,13 +9,11 @@ import java.io.IOException; import java.nio.channels.FileLock; -import freenet.library.io.serial.Archiver; -import freenet.library.io.serial.Serialiser.Task; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; -import plugins.Library.io.ObjectStreamReader; -import plugins.Library.io.ObjectStreamWriter; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; /** ** Converts between a map of {@link String} to {@link Object}, and a file on diff --git a/src/plugins/Library/io/serial/LiveArchiver.java b/shared/src/freenet/library/io/serial/LiveArchiver.java similarity index 93% rename from src/plugins/Library/io/serial/LiveArchiver.java rename to shared/src/freenet/library/io/serial/LiveArchiver.java index 8df32175..2b5440f1 100644 --- a/src/plugins/Library/io/serial/LiveArchiver.java +++ b/shared/src/freenet/library/io/serial/LiveArchiver.java @@ -1,10 +1,8 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; -import freenet.library.io.serial.Archiver; -import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.Progress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/io/serial/Packer.java b/shared/src/freenet/library/io/serial/Packer.java similarity index 99% rename from src/plugins/Library/io/serial/Packer.java rename to shared/src/freenet/library/io/serial/Packer.java index dc7eaa1c..0115f16b 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/shared/src/freenet/library/io/serial/Packer.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import java.util.Collections; @@ -16,10 +16,6 @@ import java.util.HashSet; import java.util.TreeSet; -import freenet.library.io.serial.IterableSerialiser; -import freenet.library.io.serial.MapSerialiser; -import freenet.library.io.serial.ProgressTracker; -import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.IdentityComparator; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/io/serial/ParallelSerialiser.java b/shared/src/freenet/library/io/serial/ParallelSerialiser.java similarity index 97% rename from src/plugins/Library/io/serial/ParallelSerialiser.java rename to shared/src/freenet/library/io/serial/ParallelSerialiser.java index 93f88d35..3199611c 100644 --- a/src/plugins/Library/io/serial/ParallelSerialiser.java +++ b/shared/src/freenet/library/io/serial/ParallelSerialiser.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import static freenet.library.util.func.Tuples.X2; @@ -17,10 +17,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.ConcurrentMap; -import freenet.library.io.serial.IterableSerialiser; -import freenet.library.io.serial.ProgressTracker; -import freenet.library.io.serial.ScheduledSerialiser; -import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.TaskAbortExceptionConvertor; import freenet.library.util.concurrent.Executors; diff --git a/src/plugins/Library/io/serial/package-info.java b/shared/src/freenet/library/io/serial/package-info.java similarity index 92% rename from src/plugins/Library/io/serial/package-info.java rename to shared/src/freenet/library/io/serial/package-info.java index 7ee8846a..fa610b0f 100644 --- a/src/plugins/Library/io/serial/package-info.java +++ b/shared/src/freenet/library/io/serial/package-info.java @@ -8,4 +8,4 @@ ** ** @author infinity0 */ -package plugins.Library.io.serial; +package freenet.library.io.serial; diff --git a/test/plugins/Library/io/serial/PackerTest.java b/shared/test/freenet/library/io/serial/PackerTest.java similarity index 86% rename from test/plugins/Library/io/serial/PackerTest.java rename to shared/test/freenet/library/io/serial/PackerTest.java index 901c0e06..1375a732 100644 --- a/test/plugins/Library/io/serial/PackerTest.java +++ b/shared/test/freenet/library/io/serial/PackerTest.java @@ -1,20 +1,20 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import junit.framework.TestCase; -import plugins.Library.util.Generators; -import plugins.Library.io.serial.Packer.Bin; - import java.util.Map; import java.util.List; import java.util.Iterator; import java.util.HashSet; import java.util.HashMap; +import java.util.UUID; import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.Bin; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonTreeMap; import freenet.library.util.exec.TaskAbortException; @@ -28,6 +28,14 @@ public class PackerTest extends TestCase { final static public int NODE_MAX = 64; + private static String rndStr() { + return UUID.randomUUID().toString(); + } + + private static String rndKey() { + return rndStr().substring(0,8); + } + final public static Packer srl = new Packer( new IterableSerialiser>() { @@ -62,7 +70,7 @@ protected Map> generateTasks(int[] sizes) { for (int i=0; i(hs, meta)); + tasks.put(rndKey(), new PushTask(hs, meta)); } return tasks; } diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index aa77e76c..02ec7936 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -22,8 +22,6 @@ import plugins.Library.index.ProtoIndexSerialiser; import plugins.Library.index.xml.URLUpdateHook; import plugins.Library.index.xml.XMLIndex; -import plugins.Library.io.ObjectStreamReader; -import plugins.Library.io.ObjectStreamWriter; import plugins.Library.search.InvalidSearchException; import freenet.client.FetchContext; @@ -44,6 +42,8 @@ import freenet.client.events.ExpectedMIMEEvent; import freenet.keys.FreenetURI; import freenet.keys.USK; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 49dc8589..63f59df8 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -46,6 +46,7 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; import freenet.library.util.SkeletonBTreeMap; @@ -72,7 +73,6 @@ import java.security.MessageDigest; import plugins.Library.index.TermEntryReaderWriter; import plugins.Library.index.xml.LibrarianHandler; -import plugins.Library.io.serial.LiveArchiver; /** * Library class is the api for others to use search facilities, it is used by the interfaces diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index b5ee1749..bd10d4fd 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -27,9 +27,9 @@ import plugins.Library.index.ProtoIndexSerialiser; import plugins.Library.index.TermEntry; import plugins.Library.index.TermEntryReaderWriter; -import plugins.Library.io.serial.LiveArchiver; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; import freenet.library.util.SkeletonBTreeMap; diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index fabd1744..49d06e68 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -10,10 +10,8 @@ import java.util.ArrayList; import java.util.HashSet; + import plugins.Library.Library; -import plugins.Library.io.ObjectStreamReader; -import plugins.Library.io.ObjectStreamWriter; -import plugins.Library.io.serial.LiveArchiver; import freenet.client.ClientMetadata; import freenet.client.FetchException; @@ -34,6 +32,9 @@ import freenet.crypt.SHA256; import freenet.keys.CHKBlock; import freenet.keys.FreenetURI; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.LiveArchiver; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 3ad5b696..fa141910 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -5,21 +5,21 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.ParallelSerialiser; -import plugins.Library.io.serial.Packer; -import plugins.Library.io.serial.Packer.Scale; // WORKAROUND javadoc bug #4464323 -import plugins.Library.io.serial.FileArchiver; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; import freenet.library.io.DataFormatException; import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.ParallelSerialiser; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; +import freenet.library.io.serial.Packer.Scale; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index 664ba488..5e68b91e 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -5,13 +5,13 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.FileArchiver; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; import freenet.library.io.DataFormatException; import freenet.library.io.serial.Archiver; +import freenet.library.io.serial.FileArchiver; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; import freenet.library.io.serial.Serialiser.*; diff --git a/src/plugins/Library/index/TermEntryReaderWriter.java b/src/plugins/Library/index/TermEntryReaderWriter.java index e19c9a24..e64da5ed 100644 --- a/src/plugins/Library/index/TermEntryReaderWriter.java +++ b/src/plugins/Library/index/TermEntryReaderWriter.java @@ -3,11 +3,11 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.io.ObjectStreamReader; -import plugins.Library.io.ObjectStreamWriter; import freenet.keys.FreenetURI; import freenet.library.io.DataFormatException; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; import java.util.Map; import java.util.HashMap; diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index 60fe8753..922bf724 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -31,13 +31,16 @@ import java.io.IOException; /* class definitions added to the extended Yaml processor */ -import plugins.Library.io.serial.Packer; import plugins.Library.index.TermEntry; import plugins.Library.index.TermPageEntry; import plugins.Library.index.TermIndexEntry; import plugins.Library.index.TermTermEntry; import freenet.keys.FreenetURI; import freenet.library.io.DataFormatException; +import freenet.library.io.ObjectBlueprint; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.Packer; /** diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index 2fe8bf0a..baf7bbb4 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -356,7 +356,7 @@ public void testProgress() throws TaskAbortException { srl.push(task); System.out.print("deflated in " + timeDiff() + " ms, root at " + task.meta + ", "); - plugins.Library.io.serial.FileArchiver.setTestMode(); + freenet.library.io.serial.FileArchiver.setTestMode(); System.out.println("Requesting entries for term " + sterm); Execution> rq1 = idx.getTermEntries(sterm); diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java index d49e89cb..f1540ea0 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/test/plugins/Library/index/TermEntryTest.java @@ -5,12 +5,12 @@ import junit.framework.TestCase; -import plugins.Library.io.serial.FileArchiver; -import plugins.Library.io.serial.Packer; import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.io.serial.FileArchiver; +import freenet.library.io.serial.Packer; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; From fa6053e79ce39c7759f92c9b2a59e7df9403d8df Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 28 Dec 2014 18:23:23 +0000 Subject: [PATCH 016/180] Refactoring the yaml-code to factor out the Freenet classes. --HG-- branch : eclipse-separation --- lib/snakeyaml-1.5-sources.jar | Bin 0 -> 131686 bytes shared/.classpath | 2 +- .../freenet/library/util/SortedArraySet.java | 2 +- .../Library/index/TermEntryReaderWriter.java | 9 +- src/plugins/Library/index/TermIndexEntry.java | 12 +-- src/plugins/Library/index/TermPageEntry.java | 96 +++++------------- src/plugins/Library/index/TermTermEntry.java | 2 - .../Library/index/xml/LibrarianHandler.java | 3 +- src/plugins/Library/index/xml/XMLIndex.java | 2 +- src/plugins/Library/io/YamlReaderWriter.java | 19 ++-- .../Library/ui/ResultNodeGenerator.java | 20 ++-- test/plugins/Library/index/TermEntryTest.java | 14 +-- test/plugins/Library/util/Generators.java | 2 +- 13 files changed, 63 insertions(+), 120 deletions(-) create mode 100644 lib/snakeyaml-1.5-sources.jar diff --git a/lib/snakeyaml-1.5-sources.jar b/lib/snakeyaml-1.5-sources.jar new file mode 100644 index 0000000000000000000000000000000000000000..aa6736db099d8983a0b0c8e172f0ffebd9d1737f GIT binary patch literal 131686 zcma&O1yo$wwly4rySux)ySuwvpm28y?(XjH7ThhkOK^fqaEAoRU+H__yPf{}_WNrL zsA6#T*n6LvOXiwuA0=5(Fqn70US<&nynlT7_b+JRyS%uXFuk;b1f$ZwFN1mq0A6<8 zKCW*F{I?zO2kp<7$qOq;ONgtgG002Y%1=(n$PX92P z@b5>P*xNa~IJy31Fr>-vQ{(-&!L|(PQQ=)>t?b4envZcQ^23^ z(O;+gFT(&%PQTrpzkY}RWtC0{PB%XrXiYshn zCY+1Uis~jq`Zyj=?d?q}_0P@ifZ+8@9pVlprGPM=42~H&{0L>}9aQ=Z!-+I5S6frS z+OvZU6T^%~A^~m|!ZfKZ*6MYL^(J1@j^?n7*w+Zr?Nzxf=!}J{Ei}wQR2(2e8sGpg zn@h4?2IUN@Cq&lBcOWMQMb1f!Ssr?gKPn1Of*G2?Pa2o~HC12MsFd7pWIX*Pj)-bv z-aNL6Q5~f?r2)Zixad^c<25wA)y|znyheCuB#>6;;R7fgr5Um=(pGW`5qiBC@f5aY zMp`5;L!!y7lch}Q?9OLB+#$@6)x_Ez>C;P`cn>)td#nMjv;G4nys#0317E(M&R(2h z&pS38*^?v7#B%0LnF_VTx7RKr_Jl@Up8mZfNmV97D6#K%%Jf#*k(BMkhuTz_#5PD8 zlF_MiOg^Xa6@G~*Hi8x*SuO{ohjCKFUUu_lSB}P47-zLhciehNJrn^CgCu)Q3L)u# zg`ZF{gnbBph;Z7y@(-5=Y$w!Y%#;XXcp8fo#RJT}lkRfLC*zgNYi|Xb2$3m^B1feT z;&St53~rPWR(etS5}8|!4V-3K$OB%qU$>>eZCxYSVPZLwUkw#*h(_2|7>JR5*+jn=Z!b#{ufie}bi{Xx>HTy@m?pGm` z7W@moZJQlv1l_n6lTxeTSn`1V{pf&a9!X50QaSJ;cHYj zj`(HwoEm~o37jc8nLT7ExATs%v0aa_YrE=N^Ae${Is*sD8tb)vGB#};Z}zQjA3e#o z47&}S#yZ9cfls`iZ@FHv!pPS7^2JlHFP^gY>zDkpAKb@Mz8REPCZBNy`sr=1u<1LM zOer+C1qpTb7_XxOVxyzVKgAUn4(+RaUgX#53hmwUwIpOHmw+Bw@_4RcZ6OilrDh93 zQTG|4|Hw+HhKOmL&{_I%M$hnzK^>9{fw&^p;}6~UGn=&@>I=@MElt&fp0~b@-qRbE z>*qOyg|MDY7 zNPIb1@87*k2YvUB{_p+BzoWFO3otg=irblr|BBjwxD(q%W&1TIw2-Y^+S(hiaEfxp z_2WuFvgMpbI+XrUku*a;NkgM`nI+qO?LFO4c6EPVEAe^Q0PqkynqvsV#a_@CrxLD8 z*dt+?t&lV3-1XYyyEfq5v??~nVNhECweLd?-{*CYoU0RDu`vYS&trpV@u+*wSIrd5 zASNKO-h5Z;}ddcx8-Pvgr z)U({Y_W~LV-`JITYZQUz+pA|6?2ylvNlC=5249WW8S96}Biw`KGBIdvYOdNvlt`D!||sAUq{9!Rv$$LV&$7C#p@e^RW8Ps)6{*baNd*}2&t1xlAOp~MR7 z1C%hFQxJk9;tf`NAU^9VCGE)#l0@LP?LA1=Z!3- zFDK8g`rg&-XVNL(Jt4omO}X%8gd~};Y}$8Va$CW}{8ZCZ?o$S>CR1gO8nv3XiigoE zjbDmxDzzSh$6SSKl9Qi8)5#3BF7i;z$jP>)Q`ujCH;#k44Mr>{3fTj`!-l7w81`*< zOE9Eiv-V;3JosE>OMzSdQ$}~ zvO8Vzr^aI0XwPvI7`zos@G z&TRbr!UxgHISqK^e_RFNnh>KO=+4$#*t%m>h10;$72?#d2xYU5uYB$Z>IGfrEZMa- zqmtLF$E(Jl4W(YdenxyRK96e;k|Q;7a={Yqb~R|QAn>5(Vb9gJqUkeu=M8;bYPD?O z-Xv*0OvbC$ zCp4f#CWd_Xj_w~VM%vcF#?r*nMa|X02JnYZ9#UIZSQkR_k=!MO(5!qPjCH-38YR_U zu3;JIKqaD1xb*N@+xW7h^AxM7n zNPut`JmmAU{loDM#xRMpmME2Tds76%_s9_{HVUOkF!Xfrfi@%N&9zY^fueam7%r^y z!9EHP+tGCa7p!JFVsmyV%lt(tBE7+EbEqdQ6JvkaaDahnb_7=9o!WAL7F6S~er$nju2A zWNORL%ONz78q>u;bDE$_fzzo=F_Y5Y$~X>+^MKJGm41N1R~Tqk=A)5U?1-hj@68q3 zWd`}m6-;!u{mKP5P$1e4Yerbw%G!pzwGmC(xJ?rL4L)0*@~EFKUE%l~WjM+E2}^D| z%%2PoePBdnl^~p|weZ6{6&A^Z)FePu1{3R+JFbE^A(c*C>HsArzcgPc#o2bGD&oVg z152*+VD$5Yu61p4Z@aLQpcWO%N3}*iT+4ofS^gcAkU7(sDfr&z{pfdi%jvl3=5)OBNhf{t0BwBlYz0Tru*=Ik)4+nZ{$H<%L_>XUPsi)5|VQ;{zHV;h2Cf>`EXVV z1t=X$bAT}Y$1*gXkX>1osJHMy0IjfoZx|yWo@UurC2J}91ReYCD)cmKGwnP!ME`*g zi~SSfBUE*uzI>!RlCpg*E_T{QzibI{qU2v0i1H4X|S+!V-Pe;0r3zx zF~R-pXC4v$f-2SU4j>iPCyA^HBuyJ=c6lJ=^;n9ap&K|72hkXdcP+S>?^!`Dz0rpk z{eq3xTR@zHK`Ug@|yw6oHyS{Sp`#_X;S%kl9lksa*4cZz|&Dg@j89OSNyLfHfU9m8Ir zw%2A`_wEU+hU41j{i$S|Zr#_BERXKORz#$l-0c~U`>&8>BNl`&%CuGf7EY%WX44&D zjv~gC-ZCA-$(J57^I~7?@rcvB)dJCkV=8yo+|*I~CkK+dc0;i_?+EqXiX0?31a3Sjqg5ikeaDvB$3NVqksHxk}*CYP;Sqj;5V4B)TZktDtkKpW!6|C- z@X>#lN8T>a&AWZBy{g0yh-`!6F~!8Zd!E?j_kJGvYR9Nh-$|SPp`rkyJqP0myiGn9 zg+pxQfa2h=8l@17eQ$i)wuCNWm)C(kgx`;UNtpS{fI|Bq_A)hCHQMHG{r%ABd)S!4 zWZrF8X~8Raa1v!tzF)s7#nh8-^4U{eMi1$&%=@mR!gQH+=)D$msL z)$J2$Yy=~5Q+lz*iZpoEnFQcFT+3Nqcwxs!i){v;v1Wre-?jWj7N{J_+^w#oKI-JH z&`*=RAig)G|JnGC)K%5rQW5%y$pORaWY7P>O+HV4h!qco3ci$MnDK=qg)`;#UG96g za|GXtAMkuzEiT{Yifrp~>lsICJCMt!(B;+NvS0s_Stjo7hvES<%VMB4GykI{i`v`R z0Lz7z_ICddfdQtOAzR7G97V-bHHDOZG#gX&KZqQz>*xTS#-Ih6xfv-5Q;y|Vv!*l6Je6S7jUkohl18q zcco6>hpMSBd&`|0awiy>6D>(i$es(x)c9V4l#XAp>HMkX4F$}0{YN$ALkdHnQ+%co zZVjj7r*@L#>)9Z>Ijq^IR^&_=1zsa|VaHSyt1%rV9Ib+chia$U8Vrrm4kU`+2b^So^E<83oA&VJVEsqKG}gtMa=O(=~-)n7{jDdnEALs@A3xg!Aq4&FgW#BuxYbZ3sEhi zT1le|$8?|!yM943I<%2dhMutb`At;+qOh+heP%i=@PSt&r6=Jm`V}ruNvH9Kk3@Ct zO|!6h{*8aZJP~{_?#C3;3w%)0@XM*G%`XD8Jgt!}e$~kv`nW!J&h!=Rtfj6&CG#eg z4M<=>8l)?V`czIp_Aorgs2ZASWcL-C5>|%`RMIg3W=2}w9=XqD!wZB5s$uZMEVi3A zAg`Hl8(c+uOai3^7G;?k4^CnS}!}#k0)uwJX}s#`G8C!tZdPS7cbpbHp>K8UWt+>AG~y`kpn+>t=`%9P`! z=&t=f+Q=C(|D9M!PZeTV`Mo1cs68-Xs4CvzG5XQvre5Lk3rO4o_+*W)p<1K9>vu(C zI!E+~vKjWzc=&RHqm2|J^r-FgB4w;~MrfPs2YQehn|Mpt4yf6?(9~pfX>g0!JxWRN z1j1A-PAh(i=$l+X>sYLAd%i1RG^E0dnBtuwm4K|FiR!~{9<&SdW;GyKoN`-EVL_=Aogzo6*UKa_P?%eOQP^(!L?%u4TU4lB7nMFT5pA9T*uI zk>tshd!ODX$KHMTWqN?C*Z;JtN-^#378^y|;*40enFncF7?V%Zvff_HrCP{1yQw;?nJ{=R2_MQa!-Dk7i&rg>OY!B|w$hbG_*C%K^HR)J*~bdb69Du;<&yJs2}xB46ZWl^ zha7Xth{f-^@7jmSq7&ifZNS{6Q_TuUJ!LorHR5nRWR^}%+og}NtCyH`=`R=LJ9OsH zdC4ZspE4LOMTMNAPqt-?^dZTTcsU!cmH^?GWXUH~V{GZSiARf>)jAr)YA60WDw4DL zwe60o>}2&0Y=O%gET=zz%%;(zL7Luvy^fz#p0Ps49(y=e^boR~VuO;(n1AR+s!53@ zNdJmTC%|yRDY|Qqb;g!zC^;yV^>uHPi?`?Ky6%nJQ!Cmo?lLQV^9O--$(X(L$l8Q} zj7=+>@|wUH*nVJwJ~lj_U&AtjSYSIqr^LNrn?2Bs?IUCBb@Di-^otP7a|a$MadFx%kH-oC3wo>< zU711X8EYi)D4EaMC1!>1ZH0;SIf*ln`ij7t3I|u)$i6je1IZ=h~7x9d^ER zlczckV-|BH>~cl^`wQB1~QR6Ew9J6G!*WQlG>4C3M;p-LXn**Bbz`{>=h*8D4~=;R^qDOOy{s; z!IB}1vWV9`F#!(aWplfBT|ehbe>2%CQ{AAw1A)G(TsJb_MEpd`F~BHo_7+2B5Qo^! zCV?xF!9Ez+;wPv({Y|cKeEX(rOTD8Ux2wvDe3|7*SumJ$`8CQ-0QR$Zl2EBEq;P%isCIch_vTyhc&VHq__i<4277(j;7jM5<3VOKmFt_j zK3~1r4N#4E-^xr@N-jg`+w~!MnL8Z@=U_kk+`Ms6W%phmfzL@B_2sfNJ~FTvwn(%s z9P#tEtd>aaG4vT}mJ8ENt?!3m7}d&5zE*2`}Fu#pi0yus#QRrTQkGTy+(g@qZ)ms@a>6jZnP6PZh_ zqx@DDQ2VRU);|kuL_%3jjU{-m7$L*^LNhEID_vs-^1(|3_IJ z7J&`jm|IBR1J`gS$;Ee|a3qaDEn@iHPM!n`u7N9&6-G(~x-7joY^D%e@JI7z)Xl^u z6?9c5FI!#VV+$usx-oSVL>BL)(dkE!>`bL?N$SrPh+Bnh3T3hT%m!~-d~`d^9lddh z06s-yN5ti*eF}r&s9?n{Vwv1+^Xv^d5N_t$pc!^U*NG#q_b} z$(2kR9AzRjudw13&33M3#SkF@ruH$@Eb!;s1mX%9~ zu<^`{qXriF2T`bm(rty=RO20{a?Q}8JlGYaTE>@#64&KDeO}%6l%2W@>WnR~s?oBw3Z1igL$V9RQ*f~q7;}##Dk$1fQk?6P>#4Wx%%Ihfnl9rH3Zvoq=pIHM)mCdj$qy zGJI8U%UTAdFA98byJ$l3>mxV5LY#>~7fMn{QGu(eEL4WbnzqFt%CZv(A?`V{{iBus#Eoffd#Im%Y zWDIRy-LtDtxY~cpMm4h1_b~@LW!~1ePvE8Z_VVrDT2Lh~ePmIHGEeB~>e(PVPzxes zmB%NmR8g#Uo==gI#u?8~z_zTH3@jf`E;BZV>6OAQAk%|n#o`VSwE4c*PCHitL31WV z0~V;d#|IpZYV0I46Dj+7Wz= zFH$=#Ra&aHPjawiI%g!TR8Bdc$QzjHLLS|zKIor8s~Rbz)%05D_rnP?gkb%nlk3WH z{$M3s^k>w2S}zjIsg2uoIm>6mh$R`w$8tqF*crt8DB~yG;G)u(|A;|c5kN60?n3kJ z+_MMzy5$ncrCw9Js(*<=&f7pSh_WHLNvwsoA+uhF&Z^TT`K*XM1Di_{h}+zS_@`En zeUJXOm73Dok?e~@X39Y(xxw66hG6?lPe#vsgYLiN169h%hr~dXl><@6@Q+SW-q^vx z($4%ZG-}T7mn?KdTkG)~U621o*N@X-8{Ts?%2Q?jxa_2Gk!ij5xBYl=&3%;Vz;bQ$ z9#-952Tc-|R3LPH^=uQq`>QA!YYcvIrHc6g_pyvaG6buIX(-Mwy%D?2cqL;1aZ!Q_ z`j_5_@GrfQisy^a5KsI#P;cZ44$w=6dYWu#AtG~7|59Rq!Q@57jE);Wy2|G*fd(l3m$NT+d z05MkozCg)*F%j?Id_BHzaxS?LC3W>}L9k~-xImIQ4-XL>-=&%clynvP89P^5hxL(9 zrL^glQOvp|h<$-FFeEMN$5{LmFN9>wcS%1BAYb?R#n;82y4NL7Q~dm}FGbw8ysMK- zyU69M3?9TkIOoUCpC?s=8L&iJ%j;9j$Y#*zNJY*SPQtyC0U|ck-&TJdcwSh+llh^? zX1g$@TArGz9xsa>3^RWjxJnP*#n%ThWWFW1+3oh|{fmwQn{HY`0t8#-|LxNLO^yD7 zwtl-`r2Q71G|y%Ngtn|jp-5#!T^U~wyDTBj8tZDjUe-JfN$p^ZH%j`T>7eg|#r|uL zCo9-O1*EFhNe8Jm9)0uzwBF3oW$n{Ffk+CnTl}$%AoH~FGK)9>SRDdL8O1>v)2kSS zj>7p8VOOB*Jv(FZ_E|v1P{2ygeS@Hx3z)~i37RB^-~S}mcZ6mQt0G)k)sZYx0#(yA z@|GJg?Beg zQ5vyQ%*6l7oHPPfz~xsMjs*RmFpSr@7GWo?QxS=cm9wsa1VkH-?9J>7XKuI5{x~DZ zS*8*b3~G2H@<)UustoSL5sOFH+xnmphferC{U#Zs#*hPwo+!6Dio*p=s6GCVQM;Yk zM7+a~+s^W}?^CIBveNID$iwZ)_J7a;!|=FXsouZB@DFHdM{y-m_4&419E^1COcjlJ zn03D&Weo1h+YM8cBZCspIps22RE`6_VT$F|+8zY73#LHMKEJ;O%fn2^g?K_axN8rF z2Ynk?RPgUO?3vjAXh-<9PO|mro&u@WCnkmBLHD}l1dvD00eL~Cn1@1MTw>ITV#jr} zU8C0GM6)UxMZ@}%mP}sCaIbircGSL#qxCk8$9w%p=w&^gnLJp6rW^#q4W7X8ogs77 z-@?r*J(Qae2(=}k(9QafzU@Ed$iGBUSG75(EFL7EN6oXRF==YkAB$hvauV5cSme)n z`@3`aV6;qPgbNd)06edcowR)osA-T$YekLbGYn!!N32|7xSMwpsk)F+FK}P=^tycH zTiubW28<9ZYVS#Ru)HXp)i7W!)B_+kFo^BM6ylgs`ur@a7h%2zW7Zq>eT18uVaO<4 zre7?CEWvNWh!E9T8HEAyJ6Pe0hjG&9OhKcOJT6}U!AB}Piib!VHl0n>mJbTw4J;KA zZBS*_)i_THDlfX~Q*~`CWwBI3l*e2oW~~Ffo-@Vb!g{J;YQTd8buG)ct0Cv&%k@mj zno;mAKnvNVn9Hqi2MMMaR(E&Y0SrV1q$pU}xgDhTJODxq-LnwvJ%W#7Y=R+!S~IQX85>t|DC zCQR`j5*G~`EUH_itP~jTkkXhlnzO2u#_&QjOJrSTG4Te959a$ek-c^M*Yr<)`DkOg z;LX?Cn8b_yx-R&6>*A0w^ypdY;}!eTTKraIYI7RDlnv_%Rd}=)krSjR`Dg>KzIr+< zmFqy8E{3cdk6K7b34_DRcmBP8tLTqtu^*FV)Z$eb$CxO`WeHQo`_*=7e^T1c(_HIz({lrKb!E zHC>an`ADZ|axOiRIZ`eZlnmn*@f7q8iP@)$Ac7RRJFvP7>poV=I~eI-iCDhlg9=ja zg5mq_@*RtPMAATp8<%$|&DJs%Q+1rO=iAfj0m1mslK9PDFRLS8zpdFSbGr-V=Q}WN zhmbwL*e>i-hPfK?b}lsQ*=C9JkZc}4Z1J)rb*bstSJ;Cdbxu4%sj$OD3HSao^ zJ^`6TQ&H?fy1*CAtWP!H2L*@Dcu@sJy8}J@E)1tE$GWd@Rf&BcJ=Oipe*UF*2@G7V z{#WG-7f7&}{?Xq4olE@1*lbn*NLhW4G_!A#CD1DWDE^lMi~kg=QkwU30YO^K|H>w| z+}*@7Y)IMds_X4MfR;x60u)QEH25|K;ErN@EK(=@+XZE9jaaB7$(K+J*Yk@Ni-E zL~2hY94MTFW7l))SnI`N_dK_ibSI#usR{~Izhgj6A{1_}X|I2b*sDDyIRuxK(QM!n zk?)c+zXfpglFf(ZrxNjz?Uuwd!dQW2vvj zTDq`-b86h$T5ox}B}(P=g2!9S4N#2{O-VSUnOb7-B$m0{n%uVql$mxbq-MF~_FKU> zez_6YEUqHY=@I?(F*NVnjQ|+gNI60e+nTEoOS|(gI}3eM5UGewlndf!^i-%Bs|28n zlpvIoyMn==Wa4tFi@t}IB=wl0Z+u$_>(MLVBFcJ<4U3NT(uf+BYLV<61eLKa6&4y# zZuuEvQR8ygcb98)eO>6wO7<4ouVC`sl`XpI8~@js#yxL>pC@@qVME6cPdyQY%~>f8`!kz<@ygkKh!xu{3u6 z6a82DmsD^|E0O(oS%(o|rKB?oI`U?V7go!4@xoYetIOxs>}7Q_0K3|BlR6g@f?^EK z4l@dolg%ess2GMf96&x3+&BxbsAM5mfJ{xp1TLb~7@T?yKU&}_Ala4;YzFo{C8+B=QJ@5;W)=Q4H@&7CG2V}t90X!Azf{!? z9?VQ)&JAsNG4Erqt!~_XAb*8ZFJAuIR&bRoTAjVr9o8M$FJ<}7uB|XQ4!=HAjQmVW zMRqArt z*Ge|C^Y;8;i8#}5hbi{#fIsL4S!H{NbTMcvR%VYR2Kj_6f#?jZ#lH2Y>8!#@QMB59 zSkuKM?MaFs6HcsU-oaL+Vo%_tlN>$3PRWnXmr!@Kt*VqZva*yha2%BLyH_-LAl)L) zQcMkte&Eqa$;CJeenps}iZG>>{yrz@D zeq!|o_0i8QY};|~E8=O+%*8VvZN;d)@OtGh*B`5a=!hFK$grW*PMrpMoQpv6LB_CdO_+pqKxspnqRI-yaCFd#L}Kb^LZ-LLT5^VQ;GBWbXiQa`F75XQ)Pf%YK~$$@kIF z@k;h!F(pXA79K`Tc796h17_+15!Qo{ta(O^m=zg7UH7&3QXx^F+eyU;i_j*i>-lNZ zU67BP7q<^v@1%=`WtHoy5V?zA@aH#ppBK1*NO%pT6FK{_Sb2NX;PDS|A(3qKDf z;m1|o*n{S58dl);ug+I1Mk@yJl9+JH-`=w*2h1bDLlO{1dcvq#+z8DP(ZDD34N)t!|Rxxi`hKpr!5)F3H7i&$JAUIUn z(`Y^P3#mqa@yIS~p-O1*L84$wu3*DJ2b@^!ecz53Ti@2;XQKuvvx#r6P`BWE)8?8B z2QC6do03pwh*!@pZtM_bL&x|pnYgk37BU*BdpaFR*Wm*?oXIw^33?OZBC68FD8XX( zb5xN~PHY)X+3K=jCZW;*iX4Y~Wkv>H3l^dbLhP{M2ik;^9`#-1Q$rQ*qVM8mQ$t2V zFHMeD;x-|@A_dhy$1J=S)io_hNpIJxt%t~bf=-i(UK9ty;qY<3a}H0?j9}V4FNY*m z_=#$BIew(QK`kThxGYu_3mdlB69jAJTy_;tk}}Q1q^%?fR}q$O7jK=r7LXZF5k=SK z!h2!eE}V+lfRW@*lqsHF4u;Dl4XiuQqKxhD#(a@D7wxhUmi+oQtT;O*rEbp_da+6n z?RqD%F?YVp@$i^>)gu~X?+)LO=uAURYH~KeX@wmmel(?NNF~0w_`)9Q)`@NzKsvzL z(d3}iG}tw9nVX4(3(|%BWmQA4uvO9~-*+-R=y^|(sEx=eM9Pyey?9iK`xLld>#H`m z7oM7)CXi<7LJx_LY&^FH@fZFLhFd+#&TocNYVAKkDsa~Xx@y0_7ypQq_Rw2cSQ~|L z3{!*yml?ctygd=EDkl^axH|K0LudlMn=`8!pd zyjKq$GWFtbsB!&KJ(RC*g4JJIWE0C?7gE+TPVP)*4j;-Lc@i#(P#D(yAG;B~rE7T6 z(NZ1Q-gocl>j%BqOM}vzjVEvXKs;vRG=Se>zcZg5d+7K`(@D5$;}>MK!g4aq!BECA31(AXbDSz&F}Ucq6R$X0aWykG?BQu^-s|F zZQ&s%Pe(ASWg~g?mEm})IQGh2$rKCsyhMLKQ>|O(#MSbUVMy}}2t?xG;kj(TQ*hP5 z$)JPkDu*1Al)Oazegq0e!=WWop5`^=$Z)43QBF4;Uxfuf`<{4I?6Y4?8KU5af%_PzMieU65 zIM5Dhfa+}EhzKvl2hxc*(>G^BZil0-2Vr<&&J!xhnky%IMq**kbg~l`YS{hCmQCOk z^wWoINJ7><8l%kliU>z?{qQzn<63JOjzOtuz~_<*GE8WZLQM-aO)%`tlBU|KROn#@ ziWFJtK$-;weTKRGK^8$SJd8EQA(s0Z6hti_X>p0p=;EpR1!Lbeq>ZSQD}Ar6Z9YJm zA2D-GSlx47FtRKe+YRFI0?Xjf&r280eu&gSN#!TQldq?NzF2L{UgO7M`7kQ4lfb~1I&pKa#75tZj&V1{2z^%Ldex+FAlso; zw*g@;d@WVo5k!@?UFOR;JmIo-&TGZ=tK|ScJIgRfQc5bb7MVRur+JA-<%S_)=DAEU z_XF1I@xVYM2Yo6#eptPLPM#;PYZA$-!q+8#xz)9HK)h3tfttm9i>XJTlv2~X%MgZ# z7NyESA!86+{>Ki^%1p;xD`auIU@QL4D})xF{?yv6H6B41&a36q&9(DDMH(y{RUcKd z<^8h#q@FHSTqTzstC%nw=lBrVWzRPT-FT!A@l$qZ&N;E$pVg}_wXd+C6{+Wl7 zg<36INm|FN5Q5^0h!Z|MH)-3q9~E4XzJ*swNnI zAD(>S95Jl@NUd7vQeb~?37d_#K@gReHr-)0Cn~xHC<*&0`@DiDfJ{2&q5lM2eIS2C zx|p2wh@3ydt|;l%p3}iYM3^$#2+wuf8Y`x#N3xw@rbn^P%mLrPAT(*!1FioKC3Ioz zy9Y8)5!}pok%p| zFSKlt^1=ZWX-o8VfQX2ZF;fQVc)!Pi%rcy_OUDg&gh)_Cs|51^^<9M595vG>QgU05 zptH*c8Cxj3AsH|QQ6q;0NAC15;6Gvuvu{!S)1A#oD8gC$A^dQrew06OFiVQ3a=IHp z?O~j{s0sx6(M8q`jHVEAFrwVkE;%` zE$`6Hf8|8>!d1R-A{5&;+g@nNZP|xtTV(27Mw|gVE0jsl3*V%4fv8|Rp>N}`;Cy+G z3xYPG5kJfoO-dCwv%B;JY6Nz9^)Z^-tuM{hhNWFw(Q;SpO!KE%nsWphUxi09zn%WmWyyW^_MK~Cu=TR!wkiT{e1w_UotdpMDjiriB7v=I z%zV*#%lZ$>150F4OLFmd5edq2npZ_;KlU{#68$Cj7jaQAq4C%c?ii4E`&d5}L84qI zn?i#|6A@<-nFjk9G;om%i)nns39x&mLw$aU<|4AV6ml;}6T8dUSs$81uKz49^CoIoo>n4!++E_xy`i_u zyut_=z&+~4)Gros*y$$h&WSA%2T6LzKN|CT1O|2$nn>BtkrvAm^+beOD18=mWOQ}m zUzJn!@;K#hAkBT=_AW>Ksv}8qCN6`%pJtS%nCNWX267;Cq}}L^Ph=qpcMsGJ)`AS< zg z8-#aa5T+^WO*cMt1iKDiAuTqSx5C?~N2Oz;uN|ck5-~4_`5my$dPu%$pkrAFuL0==Um@(Lx)^kJMC#1az820fq*YMnRKhZh5*1k%HhVbo>& zp&hPRrdco+mE5kr3YwmVaku&~&2nLr&xb3zE^VY7hyxWZz zIPMGIi@c{iM=kd0F9?T?a+VjqPcaT{Z(5w}9UmJrFH@}7ONeL73j1h{!XU79BYzf)wd9|&=*lBVoZ?RK#>CAq#X-!pE?l^rFQfXg&Hg6 z>=&cz5USM^jt)?oxQElRvI4oiBr1R|h3`+PH!?~e7Ba&=SEGl|9NzC6F%+pQ(8@4y zh62~~n|YElLD#8}WD~xeHw1&h;y?={+75pOe;AA{R!H|wSo`+4BN-i@%k6U0ApSFz z25Mk0)XbRR`<&`Hwc?U4W=aZxMeTGvV8UsBuv$~UO>NFwoRlD7H6%T!PdGg%I@JIE zU2u9QV}ZuBRj_b!Wg=LqVwSTk6$(KRJKKEPzKvE6=f+w%G6!O1W63G1An;g-KmY}P z(2*8&$`bn~|512&iAah(hdj)O1!MLwVHTERy#^kDV<`p$ z59nCHSzh{Ce}Ud$Pl|tLoaEb-X`r0QPPitNP64exm8fv4Q%o@l!kA{sioYaLORi0E zQ}=-|mOb(;MP>?N=&gQ;BR_x-shSodw3WG>s9NWs0{#Nt;NVpPG+H?-p zrrS<@ngf*+z&NNn^`?tnKpMU?i&SWrqKG>cE8bap)M|x^<4XU%>w4&OI_J`C!`@V+ z%@r{x+A=ju$sM%XR%D%6QM1MtAa2ciu6(Ro9NMFRNcTQaa`A)8>eot~sPeVKB5D!2 z$1kH9-a4$yP~`yTxhbOud`Ly3!nQ)koUme?80HKwg*t*>*v@kCQw{Vb1@44|dHNu; zS2~*H9^#YQF>l@spUg?Skw)?71e#^}+9Z1sH|@ZkG$pDwY!@#UU2P*?N6~6$;h99-ATpM9*pty+xtL8&B?7=wQ;%p@b{a(4EE}rBEoI@ALUH8E7`%99H0tys;;vXr6YW z;cBT@0Cde;P5y=WBS`Ql09hWjT$G8g?`yJ6Us*kuOyw9R%E4;Ae5d#rV8Jd4l{LY| zQbnXAC9}3bxN(j4EAIyTh>fZ~rAP^NGiT95%i6_bYctE5ZC$Uu$j`Z`XNf9Rr8x7@Dv58%zubjm`$n{lOI6vRhRoN;G!tJX|dGoXQ_*M)ki zUJt{0GiZ#rz8>X?s{9dK(>YnRG)+a&Lk^MJ>P%(o$~+iJ9WI87?SB^|qimmR-6Sd1 z_%ScuBynr$zJO5&cyb)Kr%$(W=7!d3Ei>|FEmP8ym|D|SqMQXri)W%kBRci=`K}OP zwqfRWT(mb|Pv06dMbSEuK_;x}P(`Ahbu2+u*Scc3Sv|(pw4rY^c-P7lLzQaQld{t_ z_j8=0MFM{SGZf64C-9oV;Gm=Bu(!XW1G(Jam;1+Gy3R(`#e%I=2Oo^?6=Sv?3hsxKcnIK`&gro_{^KuyV`CT%=X(^yS^tOYhd} zr5qE1H&bYdTFoKwG&d7&|3{uz2hq4Vq)0sz*of-=2p0vf5mqz^=W_AbU)>dIbVZte zECG4t@M-T4-_Sx8n2zHhib6!2K7~d$6BUQ75iz#~fHERqaf<>1U`I_$WTwSJXC1uH zPVUgQmVq9b0R=aff)rZjdWR1KOsge&chiGLG$sHJ>mh z-(Xj?EroZMAJic8Qrz(44*PWY$LKP4t? zHffOiDp1zH!2p|YE)*<%KcrSW0n5t9oVs18hJ$uJ$EeeN2<$FDq}_Q)i0&p{UEk~= z4BuwoB#sz(1SJjY!ed#wK3g<3WBgR^9KWnIw?EOm4DO&($F^i2;wryLL&hP1c z?0)v!dmsINyZ(T+uDRx%W1ORoKI8EG1L0|R={KKYh6y!wV={DMYP1|oW-np~3khLH zJm2()gSQyT22-7E&@Obt$`QHNJpHJnxXh?OWSYs;kLbEuXj-&gZ0S3f`aL#TtX2Ew zg=0+y=DQbbj|0xgKdvxf;2-R6oG2AlGcR;0G`VrZS(6kc+B;mBQMY5(Df_?Y8>7Zo z2UU^QP(G8Z)04&O{L*y&|8VMvz#`EUa1xzG+Db7%=|MqQ(MT3>2GZuzONbpgH5UXD zXC=#$zsQ$sb5x3fO>V*i*fSIQOe9Jgfj#&$V z2*fv5j^Cd?KAfTaeG8Cd5KQ8nAHPR71Tb@j`Ot$|0vS0rQOGu*nngE@r5{U>Y1IW! zdw^7!MkO*}X&t!9ndBo=r)xSz=%)|?BCR#NC(+g4iemNMXI_9NyraB(XKqt#e$-E# z(9idf3zdjvl<8*U;rY9_DILIe6*-i@(Bwi1|F-L6qTFnbs?zN#5(gYg(xiRTvSRE# zrJDbyyDYU{dOryg3C;M4u{bPT*_&uWBGA~A&1ZHWGDToN!azeI#;`Y?XbkFYZ38Y7`wkftRC(IV~JOsgFAT3pd znM?a#1yke)kS+l#vRYC(5Gk9~81M8di~@llznJ58{o1e6_HDwU$`}2Sr{g zT|YfKt+LrSiG&|J{*+^!(=+Vvw7=9JbY0rAq+`^=yg9qvv5twIWZ3ISSv$ZDBY38SjRaN>^$icW6d+V=g+DZ*dJvllmQ=bl z99IIylxpUV4+dkf@-$$lLyM1M;qp)fD4{xgNo?)-30iDklC5dhTWsvku^=;?p$C#! z_hUvJ(x09aaqw$xyZtw-*$JFS;bagrXtv4q>@+Rll*9neUO+uXFfkVX;biRIZR3~z zSXvQ^(t5C(mI8^yJiQ9?ZZPQF)CK*m#=Nea)bAoilJ6s`_V4EuwVFK2!&*G5RVRlb zTy|Oo5TVMG_F@x>&&+ti%2WyX{Bsp>51^*0|sy+y=wE3MkgQsLv>N08y^ zU3cc2TaPbNn@O^CUtU_=PAnnY-rp~*PYLW*A1+3A)wAZv|4Q_(LRW+4bkVaqTUqM6HMy{4G{0mripM$TOt+49GR zk7B;fPfTdyI)`cgZjnCrD5_pLgC7u#(^nu|jdfBn(p~Y^Bp}O$t=w-brM&Ok0H7&8 z^ko@PAb2_rL;P#Q)D0bgrSS9|uwO(2mY0s@eE0{lZ2-xy?$_EGQ|tBG=9z=7cx=3I z?I|OR7R&`lnd2T<+m1a10tE`da`8nsi}I(;)$i7jP8Hb%JF?TJo<)e~zJ$axtwu^H zLb%(Zd)8{XkuSewVppzhJOUWtuD0PXui%k>!#RW$pMG*I0Dd+cMMF0jU@WL8JzKfg z(#8Nn612`pxSTJ0`rNmrLVe%{ba^CQF4_tUhkzNjQ^g@DHEu9RiLMNYuuvkPSA_Pb z!E3@hqfj#eSv|c!d|Az{<#Go@Lx*ZO4?c40i1)?1SL48X@`<#;V@zA8CZ;7W{3jKZ znxS6*b`+l>%C(RF)6zrhNYqFaCf2CNwNv>N5i(kocGaFio_Ig?rR`{X*ef0K(uwkl|3b2yaBSC+pl~gy5({Fv&=g^ZD6_l+fG` zu;?YcQwASIPQ~t@VM>rab(f!BbqMZ3b& zW87`_PFBpocv*KPJ0{4R$HxhAKiZ(TGjQV3*~Wv&b~yRdbMK+T&PVM~0oLGZv?H#G z*l6=-`Fr-AqM0`RaSb8;YEwt61>g?ySRbKws5mT+|3^}CG2J1wc*wi$O@&nlj%2Mp z#ELyDTvKpR&L|4bCEWJo$-8R%OJ1*CZPiu>meM$P>81b&A79!dNRtF4Q;6U7S_lNK z*-c`L#E~UhPKix8sWo_u4Xd-)+t3yxuUjnr5m(5|r)cF2QPf|)X66}i-^ z2YATO@Z2v}(LH4KE2{N;Cd~p_5^pKhIWxgUJo2+~!BEhwcFkAoH;G;0rp1zLkM+&w zt9LZ)LOA_~f3zVXH!#sm_5%vH+cVW+iaxyVCRJR^1D<44m7Dnar+pS7sG~b@Sa}Kv zu3IY-YTHu_7MCh9kI2ic2?|ly$RlWreW-fhTHNf{~+B&3+j&| zSGMXM#AOrDjoB>J^h8xGYraMAcM{OeoY$PXlIzTTQXc@E4K5bP3#T+7Rs_=`z;4Nf za=sl5`q1rMy~WRnv4)G)K!-;2#rT(_!kS{{&@PRmzOdU&KY3unT%1Amh%^;nmoJ1x zM*y{!ff~f8HuLYht7P5*fX&l8&j}Lr31Id2V;x_6`$|Yx!^Hl@IrAF159HeU@0Zfd zm!dBEh|WR7R*aT8jcYQQudmMCBlBvEhg2KJN%V@XxajTlIPd($^@r*Z#hd!K8Or)c zf+;B7k<}#7P1vh#2XNdPvKJu3&R{kk)&?P-vrIV@hC~!?II17u3xHmcDU-18USDjy z&P#(J{d`c{6z+$1oA0u&F8`MIh1>>PpXfttj4Muj&+fExuTLCruUl}-NbJ+}TTA=m zeU}cT%t}vM9CrULHlgbxx0r43L}XxN1VnIhZ@6>Sbtn01Z&}@hER`{Mw1;>uGsrxL zC)tRSUo!$8_@unZZKa#;slK`Nb^iM9JvZu$3P@4(3^$vcFvAR+WBXoDMglnFX*tt& zz&z&Jfh@H;JzY53{xj5L{oSqs5DE?udf13Nn{>_|PrJd-+k-Quv zvnXQ-$x7|E<|VwXP`(!Fq<%QWsEdz_FM2od>nmpIqOW3Gd{+#|%`Ft{hF#COaY@os zD2yqMj7fyEAzW|@0T7sHjGrnL9L%W>>z<5Xb?FhNQwJ#3BT3){`Fo^sR8c7LZ=$z3 z>xqdP%W(xE=N@8i`H*< zcr8hjgHipU{;~V%dR|?a>{u16pf>>L40!Ye>=onJWB z^d7!laVSeM1v7Wf&tH6_M@qjG>!`19dBFlxSAVTSzM##4(1K;}qsx>#zI_N6a|^pN z4gGFJ34hI)xT}NGN}-w4CT7;dBtxWH7W04mQDtChP0j+8t#p`UVkwY^EbD15_04!A z4nFApRg6*I4B?bF!ln)J26IV0!cz?}pJm;luCM;QvSbd`wFZqE`QpMg zk!S090-|QF2$~LYMUX17|7kQQhDmxQty!s&NF5*5ykjC?LN1G}@))e0NJ^!2{HOJ| zr4(S#f+YcV-1>f*@LQLcFUf3&j|hIbOgHUKx{8s7U23e6wn4*x^(d7)_r#B9S>5C} z{^0uYgG!E|e|@#-lr&LdLwJ2J4{o&^P&eD!ec?8#v@q$g43)AHxubKb@LaogEZ$p_ z>FoIHR9-ssC}wv_x2;egD3>|X{ce5A%4Tf4P!6uM2-f#WR}n*-k6CuMgNS<*c$8%X z4ct}XnmWzj%R{%d|YFcu*XdaQ0jhkR<1Rz^X=eR6XHE-i&i)KrCwl&;<6Ea&B%l(q1U0lH+1Li)NsU^)fa+B;^-E#;o8@bs_6iyTkVUI6n$mU8Fx4Sp~VF(=8_vyo=6Z{)_;@XfWZ!FZ!Mj@= zt^AqY=Siy;rS{<`>-B_Re^S)%H1ni!jV?#&R8kxW^|A2bCgYbZ5eqi$73MV^Z~1L} z^3-Dme-UZR(p{;C;9t_(YP4wZ^9V6wbJ-JDF07lw>V0TBK{m3Szc~@mwro_SVAv^B zFT%-og%fP@x4o^)r}ao(bNC%9)+R)cLzZ#CoTbi(rlhh^W+0+M^}`I|yR4dRy-N98 zhtkjrJ-#r;==WuWJSYqvJi`fnZO{f(2~geYXb8x6>TCU>pvL74HG~Iwchf+QNH~VZ>3QM;?G!5z{+0e%SdzPu5mK_0CaAYD52ntdK-EcMJ?1m?oG11vUr1)L-ccIuf+V(n> z7Nv6wv@$|%Xi2QugkB1~N=eFGLVK=yFEq*sM%z|y98Eht+p6fLow?T1HD78h4z$ct zu9r%NSV7Vr`IiGvGm+cv+54RYvTQ-Qw zAQhs(;usB4=7t8U(~owvTlA2a3#wC`Z@@3MG^%+2_cZfL30eh=y&}?TU0oi`3HE!M ziEFm8gsaD)e4?B|NRNQdv(i`r*93?=`ODI7rj~?VZk8+gP|R{YSfHW;{yaN<9&!)y zE6WFo!6cSP%7S8{j`bPbY+_B$a4DYIt$4xIwv0`nbK^RD5xLxrOQy0~~!df8=t`(;jK@)NaWH6JWJwnh{WCvJhKlFoifd`q zgrm>#<(}JIjcPayR)fXZvD_;qH`q})9#CaPf)b&dBvz$oz8ZA}Iz{wV=kVe?|I%Xy z4%(;Kf0Z$!Ds$|&5_d`*fs1H8407HGX!Grzh<=}+@p*5*{MIGQW57aSxWR1djbs=a zS71H_+4cSg241ezSd;Yx$p5_^9`18ZuTG?gvL+ej%@prc9~Z7y+}v+I^_WAgIB?&t7&dp&y4n(D`|Oh zdh6dx)ITaGpL`_3HVh!31pEKd!u#KrD7*ilL-Xsot#dS7zoM5B;*>j>{#JCuU$0K$ z>R6+2RVZ%aq@+Yd0+-cQElw@2vba9#xt`bwB7%xfzI0|Jz{>)I2K$(smpF)M$_=29nUN%i={w_r=lVazE$oUF(VJV!h@*Vt;Ovi-Tx(mv)(|rOerb3e(5>@y{nwc z7>lyn%;m}I>*2xP_JJI)5hPF?K?Rc<iWQ(kG>;P4YDZSV+`ZJUS`m|@nl8Z+YFE}*c zqvx02Ajq)Q)6<$6%Sd_g#_#3f^JvM3Z(z`8P`f&~DEYMha<)i}D#V;F>8*9#3L?ZR zVHwGtU`rwfACe_ks8%J8wmpDMN3Uo7B2p*rN_mH-F|b5pFbIBS2@ExbEQeCNmzefY zR?m=qasnOyim?k+Aj#V;BOyL!2mXoCSI4p~W|}JKcLz2zewX+ew0%1(chiIwGtV-O zM4G*mxB}wqv9*RO%)Z?-&f43((S;(OJbW&}sC!9;$q6llrFX!+-Q4MiA7!$2_t%C4 z5+$;Qx}>AItVjRWk=qlQQ6Kp82j6W^uxVrnth?$^#K0>&Q^N5ktv<~3e$TDZYicdO z9fosBP45RUY7}42HSY!5_YW@K5A3s4_&uqwgO$#9LVrKRYCzUo9e>(@c|AV1xIc?G^%b^hENXF_L_j~r5kBnZ~=Dv>S9!h0hH z<;lwIo~|I;(x>M_Dl8s?N+Bp-3>s(kz)SB!jxMe(<6PPe9uzPvV`(1>W&NNU)!ar_ zOyX=ppa9#akj;Ztm|VG~*2W$5`08joz#59H`PaQqV;^YD$IqzfYZ_VubHJa>qE&JVvTF^M0`zoRq4H&@a5wOAEPq0jO%3zf?f~sSm=@-e zGBcHLJ>rgYA@S#Ow#ZivXGN5LH2#R`*HCN2$3}f=S4I~4XcEVrt zWYI;KL#a%Rb*%QdJRx*}TK=ks5_QbXDhf54nbM(!KX7n9a2Qo~1RP~6LBWa+cHJY5 zb4lh*;-#0whHH3m>H{DMuyO1UqC6Bu*~j`v?v(Ccb>FhNItJ4r5=oG{Auw+krZ~Vm z_D3alj2ftpPOK6Qe6&-K7o$rh$zi5o5^Y!DS0i|Ka%B^3xM$@exDufV{7JVx9D`YF zMm^Ztn;WmxhLV{fAY1%{I)jhNDYoQy+4qmY4dByp=gyp&E>cz0T4hb%Qw1%_M6 zhc8F=l)}(lY*9q#%j9*!%7S*N1rJnIymDK45L3lu^s*u}K}$mRn;LoO(1<>}6TPA% zh&NYu+{{d+Sm7VpFuA`M*n}90XAtDU-s`Yr=t4Q90bIYL>|pdbl?ulJvpoyvz`SXe zq*6tzgEm;~CSaZ5H0P^u%;hN*C0N{?Cv%5ySwpD3;$iI}aYlTXbm$5BKJs;YiTh2$hli!kR0^uxiWiBXX*4wb*8t?w&tiualko>epH zViK+QLxmz;H4FLNNK^PL>}P>3pA{L=ezk#!c{z!dDd4cLn3Ie_HX`@5`z{%J|aNk%siG@2Z!a+qBbotDpW*T{d#SPa1~a zcyF@R(dplx*CtJ7vgr8Pk13!B1p>?7ao4^fvY zW=K6MiZ$kxr`g=B&bA<66z?8!iUA|SS8uIH~KUWf)< zAz`mOQLkUj42A1$MR@;Us7JLq<$$D;sf^*eC(Td|M)Y66(en@rCazz z5Y;rI?JOEtnH-#~Uz!cl^e9LISsAF8na&;*f7Wo&KCFR`nJmB-3NR6%f_=D6{<$`b z>!DA;o=YvPbKHSU^-}MctHwgq@L zCi>>8A$1)Uj8+!D`z+q@)+@8X2JBAsjOqs!Ku?-*VkF1+^6-efPz2N7Q*PzL!0UFy zw?y5kNuz#j2!hO63C6GD0hftK@wuWZa$34|##|DYR30a(YUsA+--*>cTvuFlr-)Sn`0 zpdWFI{;VWKeCXpAqvNLnu(GkAFZFiT%XAQPa`MJ41pYZ4RDs;(pB=YT_?sihVEfw$H%YTr3h`xh!I*cJ1ditH z;6S<#m!rcm2%U$`Ob}l~=h`68mbjj7xdzNB2&tOLr9Y&|WC>3e5A8^OlV)uGf?uje zr(lvlJH@PW$5}3N(lD0k{LIV)@tL36Y?fa^?p=i+E59$RdI_-g>(czwP|d=J7u0Q! z1I|BPpPYHkqnj7!Lnsz<3a*~F4cGfKYb7jNfW;x}3Wpz)mg3Yl@4%tptEdkAmGs>JW(Y2(*i@K5eZF}DthtOF zEC#Ma7sj5*DY&_SQrOLY^P`ohYN1CHT1}ahDG*F9bPqiE>H!RN{~$wA^qsmj1`9`E z7S0t9hz}hH4Vw@ZEguBP9Gy^REv--Hs*|_V7bi|k!NR$LrR3gjK=me_i!k=V=%HF# z%;Jd{_OZV9egT;WTSjCxacBF9GVVhGlI`LW5V*Wwrt59y9LwjaCoTGES2=CL#?PPM zz1h5_b*a(B-rwkIU$mG4BQ!S}=N(Vy&_sIugqFpoih6!;Gzc3!i3AuM2eO+m zXoK{8rarhN5#!h(NmB|NIqasC9xuMmUZW2*mVLuYhzyvb`Km3Yz(^w(u$nq>QdmZh zNNTe-DAq4*2(%eszaa!Fvm#I>bI5A%JGn3t!wsiMnGySXDwytkm@ERVqBR2=d#b** zL(ymW>KujXRWvwIuk~Uro|N5NiTb1g08mh9^xN`YDi7I}=6l$M`-%9EeM1pASKR`7 zYris3QcVXIYb50q`?p$rZdnl3KqxRcG;WQs-|?mTp}WaY%5eGCW5na^MGl5p<;n-U zshYTA-V~Wp2&iRgqc`H3)JR(bLczLq zXgBDbEa=pV+OU=WyB0#XYMPhL%JCplmz^HDo&~vudDg_#-!BhB>E7yQBgtFlCZH6F z*{I3sz*9@%7bz2Twl5sQH|UM$!AnV3<(`ZEl#qO$HDA3fb@@jN?=|V~Ek>YGmDaL9 zgq1*fVib%3wf*#Jpo|$i8=c=TPcz$X*_yaM2#{d%99+C|!%%g`z1TR+TyFiFj7lvpv* zGF*O9B{;mBowaqZHmkWLbJ;2D7%4PKICb)IO zZtS6VV(K#6h!6N!392iu5KL5zH|piB=5aBDX3H0HSG>=4!=G4vL(kA8O#1l?AVf$yh>y6?4-*@R#T3_SAmT5 z=v-zMA6w8`cs%gK+J>1k!UFq^!hpHNRn~W0F}Qk;#`TPEwu{Pa3A~BwV*svQ$m>GM zfo&Ys`C=_gfKFxDQ?51x<-{%VN9mJRhR$aW1-fw|-cxG8gJTuXG_`zD37Z_xe>QIE zitn_aV_Je#{~V5gM8dpw{2%~BaupiN5B3_mFku(ySp%2|s?}gT!O_Jtnn*MSMi6ql zBMqd<#^RRTR>+%mcQU~Y#}s^-wre%-`R7o%z8QJD-2N>>$Xkj{UzULQjF{j*ZZqQB zQFAE|nB2!}kudOR%2cLMcgYJ3CZv5?`ZU5=i<2QUlSJJ$!d#& zelxRACb{#uRfwT7z530RMw5W;XL(;xWh&@}f$)bt4RdoFQ@v35x~ZqGjM=#32mXUc z4NItc)Xvk%tEH}FGoOTu2MJA6FwKd+aT3N_=4d;DYGt5v_|E!?;&5J<`RpHFO%L`i zUzeE2h(VThGN*QttND=}LI zfhsD65%`qn4!)2im=!tm6FNFB|Lnq+EtrbHKfJIjp#0ob#bSBHU`X5SRgAi2k^Lli zjW%l>H)`547RxB?T8(W3ZTpETQLGtW#YW05*b5s32!&<_NK^h27G4Y-`L~NP4pAOK zZv*3wck`gYe|s@%hW{+_JriWCsq2K=?*TW21Z@79AQC6k2B>k5$mNT|;?G*8g*(Dk z)akHW+U0n^<3x9qmxIq|iH!?FRakZPU|6}nOD$tyH{v?qy-%X{s{{T8f$U;77X5k; z!(4vNw&iasbJ35QK_EDs;{AtM%KWX-T@EpU1Yvhg!O)#NR&GfhFalfzrjrXlM8#U{ z!x{-N%y8!(D#&5}1_E?Lp3hTPjLiO;#;xy{VVDIHlL^^}@kwAeB)D!qEf#U>8U6IvOlbOleOx?I{7%Y+_ZvP$e;=@FE+3O<0rZh4i!9ct>ptW?B3U zt*bA~n>?Vg!7-qU)1#v=hkO zGoaJg$%olSl>pxUk{ZOC-$H%1Sf|^_7rg@!B2hr}v1apfE6hFSwL&;(sESk?*~QM1 z(8FA5Eroz1gRXHyv|L|Vc=J`>Y**K z0)75nQT5nmPMSi4tddg6FH^hsWonT=Y4sS&@)sNw{GsbU)w@&Z;2O=-3OQ)oIeiy4 ziq+#Oubz#@59zNL)YJ+wvD7JovB(k8*NCUZ60*QEzHh$e!c2B{iRXs4Zs9eIvk&m4 z@?BE*gGn4N+$0)#E(e+FbgfRliDS*k?=2Ir1-pKt+@L?)(?ukIfA~8*znewB^;#jz zAU^OyVCAJEJEpDT=#|%XEGlzzQbSPw2pgX8PU;4g;QBe2m+XV$uyjqy5XUwbX+zx( zq+JGgiD(gg?_NDvOb{6R;w?}oBuFG8rykZ#y%#8uiK6eJqrlMMmZ#FAmwctIl+knNV7ImLhcdkd02;9UfR0<#|6#N#f9$(;tp~-5S@@`}dtlH?p z7QB+sj~H98%cPHYMxUB1Usu<-N}nru7HhA}V|>EAB&t12zf{Nlc%@UFbd0Gvt`Y_x z=hFL*x&V@e3JA4Lc3wNQ5ig(onaL)pnB+WA$;~1`=qoh{jTPmyF@ht;QkJP02xamP z1yka^j_0)6;V&KXoTvM&L%rxkMukl37mZ~P6}|pOaijuGYcrNZ-WwxLNGn&OGj)UQ z)=7V6@aauFQfiQ|oPK=+PTu%AjMd4vrDVIMHyGEY(lskG9+MbYj5VF6vK@Z%lDyp< zp`F5@(FEHiil?t@Ml_JeFQERy!DUK=ybs9#v#9+?!F+rl6H@xULUQ?DAqoAzS4g5R zPR@2VqSl5^PX9Y8@jrJ-|1lO%`DVBOr<-2 zM*fK(p=o7C5D0ilkYMpYgltPkM;3q7^ixYalWuaTIpplT93QU3VafE0i5&tIy)*oM zya$bvGCDJ*N;vo#3`6aRWP`sZa6SK+>9fPkk+^zi2*0ryu|~C4RD%S`6lL@*VNG=j z;2;9OG?Asi%NZ&~U{Xlf5?gEtrcroeA*TzL$*>r>^7ne=384ZzEq=F%EtZZJ|IAcs z3KX@ulb#j6JmAQj+&A$ZEA*Mkp_%!HVw9$a!s=fOtI^S*Tcv7;LwrLqa1U7L6r7E} z&A)MdXYlNQ@{2~l&A-d+_#RI*3AERsE8j>iE_ouE&>NdfvC)-;4av&us1EcE#kBFC zosmb-bg4`_1Ix2C+5;bMpz|-lzGAnJSl!-5k@O$2M!u{(2&pa5-oo&6>d@9HOuTco%TJkcI9 zvAD>r>B2!IEi*r4S(lJ6d#}6Xt?TH_kx#(%fp1#&y?V!bPSOW7@uxSJVJsM$ea+S3 z2h%jpYZj;jS%O79 z+R2rVvkT-iXwQuMmAri%?ubfy9j>)3WDZFl-XH0(w;XXp@omA{%i6AaU5PS&7bfvp z#0webcHj{B0q-|eS#gV_%aCJim{5WC0~A2a25MIlcHBi>+R5Qa`pry{=pCtQH-o!3 ze9{Yr)t={Tq6cZsPO}~7+Cbp|(XQ7>f6a^6!f7);*OGIQ#|WaMS2K3;S{VYSEniX* z7AWuZ^j@n0V^Gh3y#y7YbiR~`KtNbZ|G#AXs)lC&w~Qap%4wbB&ddDUVlp`y++ez< z;MKNNtLAVZ@k8fM%h9ps+=&<^FZJ-&1M`C1UW(sGhO2 zsjL3>)!ZZq?|t55CweFwOCyJGbM@5|_OG{SW-fjm_vtI@7XmQ&O}Q~MqX|cb2L~(^ zA?sYW3ao&dd{C#E@W~5-hvgL#D6nNkg~m1}*U{f-xsz;n0m{aBU=&I+(P7hIpSqP~ z)M3(8!Hjl>fsd;yGLG#pi;HWjH4;8GyPfPTU!fB!fYPwf z!H0l->wXLl*+bYzE`;1peNH^&6ktfTFzO}vFD0;$`vPgRSzMDB`(5Fx)iVyrc$Vvz z`g4Qg78@f&W{z*iBIzcqR-z_Eo1f5=0=V$VER}uK-XH0!+%P zN+AD`iX5EJs2Z1PDyuWyNe*Y`EHWwSS1dSzbEKRWlYs4sTs@jBhW zl$>m{-W*kO~kz5}aIQrsP zyW#F1rvlL2;^NJtf`(8LhjU1}iT=ob-SB3#fuXnEQRp5lr(VWagkZ#))XBv&6jmSR`EV2%)GbCVa1dF%JlQ$}RB= z4|Sq^bpt`McyjL?_jDRwiEmfBAlAyR&5_KpSk9y}%L0)_N4SBJGv~26?YSnV4o`oQ z%Admc^Rh0ppOHYR_um(;FYbEmzE|%4bbG^L+nLw+TMIZr*U4zguoX05fGpC}(C}w|cDafw`sC2L($+2ICWA;dp&7>@bz{Rq221C>eAiYvg z1Gh{+TUH@0>KYBA(aO6^uHcYyGsfD>*d?Q3!_oe}Gn*ZUk~!1~8HY|y17(~+Va$q$ z`|aH`0VA%Er}FrI>WdUh4NhLy^`U^(lBVOqm`lM80)Yh1;kux*1+^D6_%98*Rr4Kd zVl03_I1!dWT^$pPre|py8x3@l(3f-KlmqICa#4P%c)eQENwa6TbE!mdK8EPBeE>$O zHV5XJW5q4frENN(4ITE#CA!N7i2x5dB?`_o4IP2{&^F^`8_3Y+pk0OL#7>}iDJJa_ zBE&TZm4|nc)cC{}P*5U@cHq?1qAbQznT^0SaI2`3%|Qg{YGaJiWdZw!Q|8R+jTDfr zXrLGZ2p{iF_fJ7w4xE;An)_Pl3hNfdj?JMnSB@ac!qg93@Gsc#TiA~f<$Y=>W^(Zx zgHWb{(4thjWO4?(;9TD$4+$ey6oWZ?oY6FBh{z$3@!`FSCVuI#L=I~48Ze)!vN}Cz z@8V?(eU$b<5A+9OW<~HYFxK06Y~;Txj}xALhS2$?4*M+fvL2ALj^Zs*+2CNxJWd_~ zS(+V=<|*xX+NHNd+L&ZfR`O@+FQXdy@I)~W=j5sJ*r9R3ZLJMWbO_zHx%wCbePw!7 z>*CT?v900pSy}Je1Z-unJD711wSTN+T%7+bnT#E0u4Jk#bNQQVvCp|OR$+Be!n2wy z6FQCNO#k8te_&V=33(_tf9A?My6c2!&kwrVp>MK5di6j%Rj8oE=1h8I*|i=6X$-F- zLofLDMQywGIe{sd5QysqNXt8Ls?cgcDoYCa=FLQa2}^^`qMI%{R+Dg+$Niwq1TD+Q zFrcd>dr^UfO}rG3gZB}LHSH@-FTffyB9^9Bl;{B&G7pfncT}s8Xy9bZ$hU0@45R^Z z65bWE4+v1c4&MP8sIHsI9gANmG)7=kP(y1MQ$&#(=q%wdH>9`y`NY`!_gS`=Ul7=5 zj_)!hD5!UZ<|J$JpaM>#}{lk#|8l_~yFoQ*pUc37K+jKQtpX2G`#3jPM7t}3guHvb4; zpOetv500t24E)@d)<{Dq5`hJB{(x`wOLd$E3xk;xnN|Y|NDJIsYOQ3v6PfT@bQ0JD zmM|+T#0hPnC!$ibo0Fv!jY9loGCsT}=C?@{m&;3|mntRhh}CW(gtPH}Jl}kMMaCM{ z;BSCyzm_=%w*OJlC1)~`JTX5KueS(9kB$lYAjdCZdV9-+pQOUf5DPgphC(g!q>-IJ z7gVL3ZhS=H+=Oh7MJ?;lBK<>{)@5td0|Q~qJ%n5xi)fVtMHX5tndlSaJbdeK0J%2w zX~H3#AZTNZ(ucj1so@5rRt(`Jc{JmZy8pZvvFgh-?~^`xk+Ups!BP&4vqKf>7%VDZ zP^*W{(Att$@%js;qv>lM5BbjVMI(ES5WBNkKhYFUO=U1EiV2LY&c8;7zZCB3JkM<{v??mn64p(?bx%ryRz%%voz#-_(v5Asl5FoH4^%DnmsWGnRN;RUMc; zioya7pU^urQuSa|fK{Yxx?oG-r<2^)hptC0xmEk+iJGgaXJ$q~qT^#VulYrv=TzI- zviP0pL@ZCUDh}QAzxZt)bN1*b5x!2E@vC>GxyuL&RTBEK9cIONesUWLgW=vBh8e`1 zS5Ica<*3~s-ynW{&_6-^YG~#0YEz(=Vo0s9^-edjc@?S(7t57GiekaOoS@Q4hNgU4 zG+p1r7jrR-lM|D1O^>hPPEa2_U&cZJiwA*+b6Tw`6C;dDRL)OHA#3YT=4`CQA;4hK9#_%>e(4F;!$e&qf>1fA0EL%jTR? zCC*fNO0=B{qRF9&TmyNjSrU_OX(8sP$G6O+Gx{_tga^V4{KWR}1C>ua zP*6sDXXOLVH4AfL8C_f6vASCeEX<_O#0jE{TGJRM32X2>JgAjewafuVekxV%y$}pl zI=sYRuqQr&?(AXV3ZdAsa1eNPbCuc-m9?cnt3*CEaMNwlYO<0>;dT02XZcjaDP7-% zkr4?Uut1aup$|W=)#0>icj_4?VE`{#8fm)tU*SSaRp_(L+T=(jGpxj@w{0df!{aBU4`Dt-mmu9s4vn5omA|a8Hjia@J|}}O)U5j$n#A#xAedXSe-KOd^6f!# z^uv1gUd}lIenH}59uK1)x2mOP2;?-QB$&b$of=o2B^$kwRhs=R=MBX%i^H`<1o+G; zFFYd7&KX83?!!mu8j*F-W_P|HIF;gT-%%gS4Gsvf+Me0-OMrfAUlW>;i3ptyg{}47 z){V-LDlc5iGx3UrhNVy-5~fb*1>>>v(KM=h;u8-gb|xwXmcD3!U>v>#6djZ#dE??Q z()~v;&C$J!uTpuO4rVWRNtBYbQ*__ zkfUVPK@Wo%HW0ua&?6S?jrYnJ6*X8;n=PUCmypoqk^qfx&_Ak!r;IYen7AiiypxQu zQd};e;!jNx#mgfWX3tC?#l(I@yJD251(y+BcAek#zVQ2?`VTh$xki{fpcPpUQ%PO- znoe^hLn*w6WxXD%93Mu4?#s!NJ#)D&rCV9D0_7|Z5>M6+jpAW8XTO7z;aWI`xEe{m(ltSwen-Dq3Sz zo`<@kTUxEHcXmyEo>#+bUiJCBVOA}N%PtI$mVruN?T*l5Bx)>~RL8 z{?bsh6`ZqJQ|s}-sAjDOvL0I}h_J`7Sdl$R8iJ~p+3KbqDY7wLRp=OgrVn2HEkHzCB_VK*77R|&u>|sK!`}7z24lJ#Yd+Y32 zm3b$JU(di%?r%!aUtj;9NXk#KIY#sE;gi`nllp&vHyYd7*xNaoIx>p>$H#xQz{IF3 zf79l$yr*kl7D@wUL{3A8U@Wvw#hT_6is5cgcnnl4BXU*|h%;ZV-?KAez!0MrhH9pd zJMP(e_~3SsC@L(QM>sLwK7bTr<9hIBm$3&}=D)GBGLKSRkU7w?a{!3u38kP-q5<^% zhdWx(mzXsu@?A2%&~DI&50z`c1(mkC=&1I0%!SQAF&E8LNH-D2rovJ?6;&{OWRe!a zo6g7XsWT3I?g-5d-!T^)YHT_^5Fm~EO#wSgXCmvX_{Lgn}sAdhG zPD4-Lzl>;Rh_{p0YboK*$Zk#N&zz#U)7aK*HH@C!(CX_pZuAxd8e%=#zdPL8yz(S3 z8lpMk~}vKMJ-KyCt+yHT%A{g}w@l+7Rnkih*wmxpAcB+aOuF^Nl`WHp2e zs4PiPs&2jtIu2kMc1&JQoeu;`&HHEVxp;7WaueF{;~GZvT(-(jO4Vpd4rH`Z*iIHY zCpO%H2!zShPUlEJCSnjfwV%^7WoEXMx^`DlJGXeRS_B}c|2?MdpP6a7;bWBccOJ4i z!T(6B{=bgxzohnyTJqmRqJM%dt(`}dB1AkKWvnQxEooyYTC$l_OdNy1$1ZZnbWqw$ zY_HdP-tizkCe+frF$fsq3>bEVPxKfMH={|U?jYuTn@}y9GBloIFsOHtu>E8>T!D+Lj7D0)s4mvAV(m zNx*1&qx&W2hmWv)-mJ(5$cq|e`u?Nd7&&s}LhPGEts!{k4X}+1&Ga}@FzsZ)$D;{{ z96%HdnZWA;X+cF!ll{nF4wO!%gNVOv(?u0d$qF}Bsms#U;G!imI;a8uUxd9?Sf1^Y zts9)+?(QEB!QI{6-Q8V+2X}XOcXto&4#6$BLs(z_xmV9#eRcOffg_Hnr{1bjRpV|D zLpg~CO#|nBHb#>P3~;)2WcGG`@Pi~2Us)+UwP8lsJ;RxU{kknz zatxL?hBYmGrJPS9kS0?vNh+%=U#jnEg#U|@@F9KeR6O|ufc*lmrU7jn7;^p- zft6Oq)aEUC^PWf>gZW^}{ax~>pu`UW;*TI|vh ziCM-`+23-BQ7t2@1`G<`=)pl0IX%(RzKaAF>fm~6fz-JegV&y{1wijj@u1+E{*(zT z553;KrRX7SEl6#+7p?Hy(7Uu>dLg)$9+ek7>{)wA(-86xDM&wSMhhp+`)yo!!K#&e zn~a^WZ3wKywVL#FhkNA0dbk6`j3DXD>Yaq&&PdWvYQ7}GyPG7sA|u`vs7PT;ynJrOUuBo^}v3`rfA!C$G|5*crUuz0n=TJofuHeMC5jwzi@j2p3K zGgEy9qbOi}0iqS=3FAkYz!P)+vn34IAZPeKGtqF=s{odU#^wPg2934DaZaU7P0BUN zC3#|MxqL(|#zQmm$a~B+>o>`2v|*M>yTTS`JpFAPWksawv-Qi1@w@>4H=ESq;$*Xj zc#7n1rmI%TMIUt$O}ncrW}N z)%(OyU>R2~t@pn8ri+xvoc?OdvD!a<0SPx#?oI2Yw(G<`NwT8#6d%ad&+hKUlQ-W< zyiImel(y9soPb+gR|fFShU>Ym47p&j|0W1Z7Yi?AKDHyXondVZ|1Cvepc27FdAcQAl)Sd6xE!Ya%)Tl(o_bu@t4@#c#vDY#ZWvC9$ci;P=6SCd zG8;*Kk=tJ%yGaJW3I2dBj6v%bw2n}L32GdgqRv$WCum0?9OHRdYrIY!MU%;iqfnf) z_ZEd`8s0Bz_G8-!OzC#>7W++wy=2tI+r;X;{iutN$~ryqHg$~m&Sk!N*vARm8fAZ& zp;G>P8?h<_clagFwx77EfFTvbJf5*R$tatfJF>+UFfIhEy!LZ!pqXl#hwT;m@(5hA znj*%{0R8PS^7eRtATN8xd(pZ=MPB{Zl7Z(Ap?`+uF(BkFV2D$diQ> z6f+}txoX)osN=DirGt%?vZ;Tdip%xQKY3m+FpMtwS@5|Qb(kd8S3_42_r$(03454E zE!&+_o!7I~m3Y#5HiMF1<>0fgcG%med>Whj^S=J4q2}MaxX)v*5dd(YIf(M*3*~=5 z)Ku|wFcmR%Hg>Z7_dWHLx~}uO7@F@>)aM|+9w`k`%JuBXjf)z5>2XK1-ysbO3?%z%)Vm4KC6y$=_u z&kwkuv6Z%fa;*w-qZRd9~ zyl&{;;S67*f&@`fepQDE4H71G006a+@_3%~sz8(k0~)qj2tQsKsZdX9aF11F&YdG) zM~bHE4VbK+D*oIckAEcROI&Y@(GKQ>52+t;PlWH2l9V)sFc^`0g(n*V&1(9gkaL%4 zi}?u&9~$33lq-|(mCr<%=^zckc;!;)ICGWe#aH2lGR3 z=Z}wR$aT`)n261l+?!}HpphMHQH)r#kgz33T#xtcX=I3dK9|54z3XVal zMA8htl7LB?R!gk$72$mS{P1qgQ=wGPX0RjT#lWCfnruAF(3qW1b07xg0l46>*Es#}R~Qt;k3?mW)HIk=zrB+4LHoZc4-Btz(L2MAYM z#$~}Xykqhy_2)?R2Da)-+1nVV-gjb!vMvWimfoR@LHVw}{v*Sa)|)M2nx$O$N}u85 zfT$|x4V%5&N?k0_8sr;j1L_YsyA6i&)Wc2C%WRD%>Dvn|wo~RQmo?5CGESrX;e>!k zonIzu1P^$D+QC+0?huU)>@?)QjODr+Ya{f*3aITektASmD?`oT zy#qCC#{-A2q^oh779diDy@bdF!#Nlf$k1NbICnCXTQo1oyvT%1mCZ|-HP3;3fJwK= z2G<^f1kC#!X1jiUBSo8V-Z)jx%6>b&jAJ06OM?$~ogTGx-gdBq9(`NeGcc zC&78uxiN^ICA?UU@|CO6gzGN+fX=(FmDnEQ{*f#0K2C^AhB6yaB3Q>!U${lJ+2%FD zCH&_o=-a3xRk?Of1uv-ml|t5oFnn=LkM*si*Yw>}vS-HXhDO7gH~G5+vm*P=tV0P;Z{L!{ z{sok5XgBx#U*~bf#>y}@r#>2huUgg|AZk&{Ct)ySmZw_O{vRf}uCDbj$a^(v@&p=k zV?lG4WklNBNen%GZ-V_)S! zfQ|u3tQG}YDN2}@!%T}rC^&ao&zx$4j^4~Vk3|D5EL8F=ntg#3AMZK4iz?OJ!Md9B z@6CZ=<#%-_c@#QBsduK#{KRv5vM;m0_98~K~w=8*nKNh;ix;wHhY1Y-p9>DwLuT38%e!00(m+Ps|->i^E0EldNVr2-Lf)Gz{&ZDKEF zm~0hty$Aq_>pf6{)x`)k>D#$~Fku?QPP~}xwSTA}yRW)FVG76000HZEGf=*SWhHd( zyb5=`i1s+@ylp4j=ER*AL4e{CX%rSZ?b_^GD`DF&mi<&)nakso^+_F~@z@^XDDRIt zl+;s*Kb3ardy=^H>p)>7nNX3-7FlBX6rTX$OQHbm3m} z)A~5Ui8DK$?H6&;Q?tauPFlv1%2u0@pp4T4XoTkcXA<0k!08Z4%JHYC;nL|b7QWIN z*j`9faC)Up(zIz-^Xc3jlbUpp;N`Pqb3zz^?*PKr4I#Q~;`#`YnQ6%6QzuaTNH|Sd zHpytxp!zP9GIV`o@Cr}!QAsWK^WDeDCIqXszAmy+O1gyoH2Y&*vrf$sXmGz5FzdYkk> zx`AErOiCaWMukwBMm8!f@=kc!lx9*Rm1;Fv=YlK)PVh)p97`r>6J)avSCO2EshRWV zrzId003|9DKf4c1A3|x@pv8#v-OHc;^M_zvoN~?w8;ZRZa4RNzf7xjM=_B>$4f8B? zAB$m*C`0rK96?|v5E%iLM=a7I$~bA;XfgYZ^fs*5GC$WL3taI>k1fA2!55b2O0heL zJPk2emb+A{D5WL|#(rcn3ltfq0YWj4oB|?|LJPKBNI#B&6JiN_OKv=AT!*e>j)L<= z%3Z#4M#>!uW};hVIhLdimTW>7E}*xlZ7#}Y%&*MpqyUQS#y^r*KC|9ZKaP^OLzJ_~ zs7Cy01Jhidy~5u)iLDOwQI$&nfJGP`>rLDc$waQx?bG}2x8-udaa!JF^m3G(C@O14f` z?-}A7=JCM>;4G(Cy!P$5Rtp+6Cf@Bj%oC26$ON)amZzi&*PtqnF&s;B94|Idl95Cg z)*j;zycqi&Z;bFfDa+^$t%QLJs?aXhtW1z*+$+FW-Z(Ee~qVcnLxm>F+4U$ zOktY9usZ{F{HOV@A zuSAT6MELYa$`@bcv3My1m4bZq3Napn;`twp%`KRnndXRCf1s$IDJGAs<}O)=s5w}Z zMvZV-B7S5JK_*ANGY&B^LjPI*5)#l7P&$X7J}Z+JcD)9{fwv`APjmmYVBsR(rjE)N zo!V?QcB@udK!RGss*jw=Bo*X+p+l_x#kF#g z$-~`UX!y|7?%$cEL55FGV_En{oi!sqfnDJ06246g75CHnAnQu_5sx@WpXg-<_h-Kj zwlcS0kf>*CQWWNKYF|^k6AeF0xBc{j6qEP8G{sVSyCQy7es5DYgxNk;9SFgRgOzX5 z=`jEJ-S_@VnlXy(xHtcWa?IE^x_}X)fS;tC^--#6&Bs!>Jg*)E=cW=rHLOw2AC2ow zk%xGoyh*pyF`~cN7CTPLB}UlFYxN_b?S_(HPp5~>b(PZNnb|xUi(OHaCtI$6JMNj- zTH1@5<21*Wp^<))g9=4ES_w?4G2I2`XgX~oRcoU1kWO;x#JyY%grWA~V{Lf+yfkRL zcjsYR-pfbG^8%1AmU?x#wzMh!3qEr_V{K>_hsh@~allfT8erdHzwHGLWjotEqC58z1WH?&C=MJoTfJ9sey8&kkXlN6zvg+w+UrJ${siCp?I z*^fldA&cN7?fw0aO7*#v_4^mvMqkl|)_JW}u;si|X*cC&=TD_3?l?|`#Y`xT{ z$9RiBmI}CS_q#8N6K$s_4068{d^Hj-p`Msm@l;);z&1b^D{*UKG!>9e9$V!Dn)*X4 zS+KF>ahj#HC-29W(*D7E8&Wj7feNPPD-$JDI!vVUyWVG#`Z%ZwA&}@o0pXLd+7WnW zz8fSjsux@$KfxHKb%g)yk!BPN#ly#xW0MF+aEiBI^JE0@TZs1nAGZKUJj`*_)M#B^v7S?#-&V`y%`Cb1*>N8|EX*A!kM?Tq0nch-E5tY-}XX$dHr$g|{dc z@lC{ad-oZuU*K944qaw6Yq9qweyHB$PtUAEGSqmO{k}z49_n-2ZD|8&QE_ZI-mp8w z7#^zdec)Y?q#)mI4uuOPo3D(}sq`yUy}hhuo&l5C0rZQ8bh=Qi`WTe#5ulHuWZ}Jd zXl%YS%ifNJe26+|rE%H&&qcaQkW;NrKtsxR3`b{)2_XRSetDpHBEG`o+&CS<)?9L| z3+Jd22=x%5l1O5=Ey8NN_z71;$tvO;G_~@yr^*+3gChc=mcZ3($AO~9=d?c2erU>D z#wn&F)?#{a)C0i&O)!&gOw@!$;~GfSRrkf^m1f0tGNym%K8Gsh-z@{(dB?;c0X+2)14&&; z!)uTiys0K&)GZ%3t3k_pXq^nmU%!hwVn#iKl+x>(4^W1eEp{eE5u}?Y$w~$x2?MAI zVRnFyyWbDP6`7t*J9lTDGrGuZqYXaX8-5s$FAYTFW~HR(XAi^uAwa1wk~dvv!B=Oc zxk4W{K(WjzGu6M%I>T@(hcn&sp{{Hm&Z53D6F)N-XDUd@B(wAy)|{BCVlj#M0jo$e zW>z|g)adFuvIRcdie6_lK{!XogDqeCTmsn4V2dYxEuz|vBBBKc{pn_yJXg310^$ov zF>BqT@YsYIv(^Y|b|MoM4jXlc-Dj2os@9Ne^i@-{ggRl$6ut6S_k6qO{C7)5d(V>} zXBa|J!5kI5%V&CA9PLvzi%WPtW0;0Hg2t8B;kFY{xE_|8bqx_68pRM$zV9N?#H^Sy za3P8-<>te~iQR7v`D%yyO<=EzG?xkj8f#SrxiN8aJ@8A6iLPefuxzj!c#0>q>AR}@ zq={^q%nL-Dncwt6$xoNU5l$Fj1FBAqr-pL+_8 z1U7aGPRbwsO9X#9Ondd{$nr?h@grAWXsfA$8G@!z2=#F(ZDrLDs&) zgrUZt66X$Ut8w)%f{H{za8X-2K_-`?2D*=IawZeTEN|SqvP-`bnvXiakib;(ljC$tpwB+Xpuw^v3I73rX&S3~G<@JY9-MZM^8#CVgf4WeiW zYKxJQQUS#>RS)lpR}O-Ko!^>QC0E1AugK#i1i$#kvzkHIY;P)=e8v(;NqaW=)SRGs zg~vg!Xm`UBDh_oHap+N?iTdmOg)sSk|t#-E0q3Ex}_X7T34YE_FIA=5`nd?)&$;YIb3Wp*))FXDQR?S zvtagPC8aLNgQ{12aRo}i_~0w8NnEh!4EwLFWYVlkLFnU$cb4yoj#2dk!($|Q$CtjV z0>m*3tl1p&6I@y9$11!r&Z@?VJBCkC-Oux(B5kz@9vCO;vFu=?tu9CEG3;R!5tb-E z3VE{F!8(;qhBIk|!Q<9l_Zr@rC2+TzVj0N3Up_N@HoI=}(>8I|ij)@_x?{ z$3lv9N%>N_sR7l*QlsqeF}NVs<9`vkmSSb?a zZF|BtPickGYb*^&wF8PKb=`EiNjWhYR4Gr#2}O&mN@ZQ{#Oa`}G9(24^x% zj*m@IHOA--eb{7#lZHOJX%OyyPN-3JNx01t+nTn=<}FrjRZ3`)CTAR8i*6n;+pHz*f( zN`Pu3JtX8e*Mxj@yXiUv3)VLQ1?ZOE>wAwrlat_59sq{=>xJ97^7*`yv!mNRgJ1KQf^=;A-b+ z5f_S{$7e6twCW$__;+tzCawzSXGp~aDoye))r2~QDcq}4f_-#_lGI%_j2pRQUMF3s z%@!$eDu$M`EdW$P%J2+3r)bGP`(-r8FF4?Tku9B3v$v$wh`a{t(CM~4s((vH74d|L zNzX21;WqkKw1K&HR(F*MQ$5~ggEMg8f@(z2z^k}9USUwO zAn4K$ts(JKHqgL67Lnb$Ka_0%Qs7cq#(~t53UrW&DSuzx_m`t*r&0W&7^*s3Ip@SgYTrNQbJNCt4OJ|h^z{zvfo=m1B!x<2H^bslD6@rco=QH?y|+jyP2N6Y6qu`ooDmtr5MWQ8|g@p4xpYpKF$gWlFCQifZHwv{-;rHMH9NdnEa z8#&~5HMzd6di5(u@5EC*wU;r@bM4||O#3OQY^TBttLmeEdBQR`VF7F&zj9dWQ=r+G z4=|Jx0t>L*`pYq zx!%=bi9TgD5p56-Q9&!?CGH{fBS3xGH(>5}FwSoyM$S&a@Ly_XHR+CZ*kY8~G zDNe$+BKtl5B^p7r1{+$<3e!{4o^JYjK)+r10ls~D^~5h@!e{}2Ls!onoR44no$yvG*Fi1k#K6(o+!SP4B#Vh5w=K>#Qc{vokUhP`?vi$q0(k#1_`>ak6$6R_%zjQO^71#4<=LmJSOXZVHKR@s> z^%;iokZ^Zlu5E4R+7W}poRaVDP`Mm+_r^XcwQL`~COs)m!WRuYd_dmWUsB>)iJe%{ z8DBohiO=Z4KP9#d=RlV`Siy>30KJmqHNL~@^|Z9MvxfWY@QD)FCGPiMXVOCDS!peI zI?G$2%;eTwiAjB~34FgRn9mDB_LJ+352tb0+>z9?=; zW-cQHNTcy|%&r6sjN;#>s2m~$ZNZqOd+VOlTbHJ(wr9rjO~%IDT;EL85%%8^NmO+u z+!p@^mNfDmD$AFqV>4ypeCa{6Ha-zQi^6>S(Jbu7Xb32iG4n54E)$bReS2Ov4KPmse3iv;g=Rb01i1M7>KO-BOn%g8XfQaw< zGA*w3H+VX7RSEUOxxNq&kc~!Js*K00P6`>@R;fv0xSqcgo0_i_sE8DGNfirSJQ`T$ zm=3I&-R(uat$*QHP~laF;SLBLN7yhyoGc{b@Q>e-s8i;cRZ6bmoA;FZlKF$`!J% zfHlFUH~cEqtH3;4?c9ez-}1>kE@9can3PHmP2Gf({8J#78P4K;&OtVG0U?b<5nEc) zYFH}{N25I6N{wr-8VJ8~f)T@LF~z}`1;Vd*Y%d~z{*fR%o&ib1L1lzN{gHs|1CvOV z&Xo{m@nq`U8x-cy3BRw~BxTSTa!An=?lwnJ(mI1Q;{A*>%&sgT6*b*2UYre;J5uugTwy-l$7eJrTcj&Qq1Jo1hUakao*Us^U1>0aY0boy6jl9}uVhHL)` z!S+MKUg@#jf`V*uRUC!b3V{1DVnpuKN0Z6;h|mHYL)0#hfes@$5iKtNYL}n378UNp zzxjz32N|KeHbuHgeo24Z{{M}+DNUvlZ(zJ3!+iNd@!!Xsl%boUkg1{FzXpc?1l^Px z@UUDAsT=p&KkXT1O@WHEw8=7JHYSp>zIZ^z=(&v~Y;ov-zOgfj7VYCIThmX&&52eE zHIUEE%zV4+DP{5E;Nbd;-0mg|xAfuqAQ^Y)N@=OR-pvLB()s2u1SRFnXtK})=?Mky zpZQCiTG!UWZHn0b_wfK9(ep725B%%_5tNwM*}0ny1!HrqvL%G3n^~>{l7qZF7?7)P zHau~mKNn?1rOctVZ_j7T7*InvO+O#P%I(dGEY)W!koGNI=ZS z*B@4$C2UHvxJ?Qz=&MF@zZ#awP4tulm_1ASp52Wy~n*175(0Pqkto z24TvRO|Pv=l4KU7cBe3#S3Mry6+?WKFDJ_Cc{sR^SQw(#I0#nqCMU(THz&8GdeGD% z{4$cm!DC=B;`Lm@MoofB-eKu`MpwZVY+k7P0LhxaTJWF}zw`Axx*_Tl)t*ndSQlIx z(qOJt?OSG1HlmnOisD_8<}5Xu&Yl5wsa0^5l1!45VR5~gFy}F~b_=T&p`eZ`u}YzS zX4W@4Cx*~9rahGaF&KW&mpLBr^W0<}GUpA#pOTYPgzTmB4iD@@(-f2{JN6R@wH!4(?3}H zW#AZ~X>ae~?)J?o9=7RXq27BJDeiKur{u5gtEPL&+_#u%UyO-C9mPi;Hj@YloPe}k z$K0^h_y4+F)lxXu-US?*P69t0WdHs1wX-)db!LzQ{**NN=kb~<3I2ccQbI^R@=y|N z&gN4|M?4h^BCwPNB5^6LNfELH@^UJnxn6hCPQk!iqWI0VwKYwO$wQw|^5yj?H8%0s z$TO6c%B57AKR%)rR??&}Tc;~uvHB1jNDj-yYONZs#f7XyhI^gK2IL#$tU}H`LL$G` zafD%{?tqS#dAk?|&l7Z$Qp6)bt)ZM$lk5|tQ*WiK!Kflbp2p_Src)r0&6uO;di8ts zV~5Iz#Y#Yb()(zMUros-zzrfr5^*iJNUo!EYosANb!4^ciK3F zHq_80m-;&{T^C|W;W&CI@~NAbUJ`M5-Gwp)TrbXh(oAmqH~h|(`~6A!eF_yOvPnRV z)*%x^6B#0<)aKLot!-4fW25+mZv$=hcir}p`x@WIkxz$s2LHZz%8n=zTMX=6X1y|y z1us{kQ+;!K|l(PGJ?|1LA zP`L@8ai=~7H5(6s=*oy(I3^5tv9;4XGuC421+BybzEjVX)%%f;UJXih-34GK;8{_G z$JG!=Ffjx_V_y^{cWWdYX?l;dBFX5>i_M(uWah*YHDX}`Zo&xiGgSpMdx)6 zB)?KsM%KqDiB{KJhM9Du&E|z;d zM8))pnS&WDsup3k$<&dYh0uiNhSP{~bo&Ka@|uDsNDL)lwKa)$pWvjSlmtr&_vA8(tG#>Bubi=d=IZRsq+7Y$KvQ#fXw>pc zt39qH|9C|;oAiG6?s;bfCmSw9^kaaR7hoZ;3~x`R7v(9eTcRV^Hlk={E}UDeLE#S0 z<||NSK~Azjm~W^|fj)v$9t(0Acque@b}XFxZsKQvhr(q!lrK?*f_UOCb!;&j1cJOU z($DUQCV6NQPb(3<@B*7`5ydtxZb(37ySTmMo??~(4fI>m6YoE5xOus7q9Z&wQFi8k zYx_}h`D8*?;&&g!W)f6z*)PAqT+3#{ViH31q0|8)!R{!TC8-id-Fj~l0s4*pdh`?F zIVM?*AVj!ruVb8$KS4dyK8c%rs!E%kJZ7fB^WbZt%pO%lM1gJF;p`Pbsf+R$0_919 zZ4-T*9js?nB{RFiG~`64TXUU0g{S_HkoMQ2W* zTyts0O)N@#$ZF#Eb87`v#rqqN_U<`WswzXtIe9JWu%qQqkhn6(MU1!BOZ1e|(ln87 zvDhHgf+oq6HNe_1*CAj&Bi_S)^msTsDQU@eBWm>1eB2q#kDBJmM%6VmQ5e2*$(Jvg z4au{$Al1?&##d|8BWDmnw=?ps8?IxbEG}`Cv0r-OA(W_$;pZt_IHBwX-JUp{z4p5N zq_A4qzmbnW02c{I}!ve=76;zFOFG4`CkQ zdR7y#9t-~G#B_}eTufl&!#AW5V`;5PA3XN-CMZ{I>|sn9X8yYA@wSih8}J55tih>*ib zXhNjVDo_<+=?MNr1%F32stGVhieQoFm=p?5IM1G15V?_w8l?zbwGeX6#VV~SRqKmh~@Eu1{fg$^&?jQ+aPi?>%53?3p zMo|S$9AHa6pM??X0%$*4bAyutdgr6arv0FydnuWP%4kL_zvcU`VvNmk_XQ&WO#RLP zIZ8IpKJuY-DV5bHlb`2j>iMwjmrAHMJvx5+<;bOkoIfEix5##wld;AEN>R7Fu{s$r zKPcF(j7*AQkr1hd3QCHc;5>f{Q`2o=*hJQ$dnaM5f^$wJ(JZKnf|H_B>;YC}Hu87Q zn8V+Z$KOqU#m(vs&kreMdkcA|hC?rzu#gxDSv^lb{-j}%@8VV}#N}^5DMp*`G`|fu z@VneWjIhdJUX_e8ms>dPYVcS%OyGC?bbjFI?RBO<|16)Ow>>S7TMUh|Q`VN9>RMGO zB$mNCb(goLL{1S&XIV-Ujc}7mFtE6Gdp;i1piT9eVHs%FWcxUV=vw!oydYPAIHw*R zCk%L^1zfwg&6!&{)kO02LyAdh$V_#NuE39JE%Hx|CF2sKka@=gL>fjde0Wx|qT2FU z64y7QZ7&`hoJAI=`&3bx)yO3I zbK39aB2buV4J8|MA!b-6aJ_957<5YYY~;Z=u(oXtW=_ey2waidZ!@y0GcI)&7&FQ` ze`yN4+%fQap&J&q2BKDUS0m5(FSe?8xF~riwx;MwgV`-(Np9@q|A9|Oq*=dD5MM1d zYhWN*EvUKL^rH{&h|>(ZneT`&hCqPLchHBg4YDwk?_eORgM#R)g~jUVm-9k9!l0qq zNH0~$3~iG=_oCz1Ft|fV>2kR%lh2^`M&P8iWAFg4sJ76sig5B4=ylD?=0Lw{p&$Cl z6*gVDo5lt^=yAJQ-D8SD*}nR4+9h|lKnRnOb>^YJk$ zM_7B6*6>|+fTR;-YU5fdtd~!yKoN}g4^seQaO13aPTp*q5S@~Wfg7~oFp5|EPc*-R zc^#Oxdo#xR@AcBrGc~lLG}cs_utGRxtSC?`aW|kK`lwZK4j|#Qm4V;liC8u-I=Etn z8}V>QiCaUFv*rRK3T&Xk&9W>y-9}XOeR?f%VqF7y%bj{p^4I<0jw7k`bmA=BPc+fW zSgGDsokij8wSiZjQmZLN7*!zU-w$B`=0)gRR6r6`X`ZPWPRZ71e@+m-28c(BZ4f=kHP84scA@V_^n^oV8PuHfhu3D(@IvY^V+ z%GU|FW*zcZuMHvZ$ZK7#JSI&m4Gq^#2HqELLM3IjVx%Y($uF3o$q5tQMpp8Sy}hOS ztd_lIb;@a5QClLFErT1DY(6s|gqIW^UU)l?SJt^(ZgeS1@mql{rllR~8upfXeJw1Y zDOeIPVxd!}iUvl4$m3*$@DVD)nC}E@lPUl|1IRtO2~bNTP2$YeCWwS(`klKR6j`H; zZ|Rm;y2u+?7K2Ik+?dMgd|sp8M=%@3);Oa@#@I-0jgOellYi8=^^E1F(fftD5t zoEc%HcFeSuvx`zbGEQ+k5WFVOb%~$K9%TCW*>a&N-Nvpv;R}~BQTB!ZWl_|4NTu2q z7`({9qmuvOpyXfQ&%Yu2pD6Ll{Yt{W5v7Btyl!FHB# zsA!B016Trd?xO5UUhG|NeqWBaeRZQo2-`XIPI%pKl_8Y-_kF;`MOfRAqxXy82%GMmY!Qyxp_uMDS`h-;I6R!dTqAako<4y_CJzrh#K(v30Ud@`?JIR5IEL! zzO4nYEm?F&&*$G{ zju5X+**;l7V|MaBLa1wXde4jopLFtgts!BRh|XiM6dC)}OwdW-Ii=)$>%XL!;Hk zm)J)%v{ZHIDc?mg3X<{-oXMa*AIhwm>D+!gLD!3v1(}YE?(^P&8K<}8l(zyVNvTU4 zov>GIx9z2%bbCf|G@EcWs{9HjSyz2WGRKmY0HY;!Aux0^KhYpsxnilHVqJdC9>dLk z2W#A$M+aL|3fhw-iSG$IWn~MIs&6P>XcO(=stX>zEaFRX!_!S24v3rA0E9hMBt!+7 z?}^dXi@|$_7Vq9OZPIHp3*_z%?~&Vv@9x5>+b$udF%yyCOb}j>G2wLo3aIqJog-%T zs?xObfYk~QHkUBEx}nh%mYX207a$cYy&=zFadvIz;@E zoyZf!xyesiNUO$v6C(7I`J{8NFmv1%^~NV2{6kBfznLp`I>;@2Gz@2oRLaR~Z}A=jZvfcFczWBly<#vpH4MORKz=;QSYB>b1MI zuV*z{)2SI%9K1`?cy_2iuk7u^F4J6>aJ)B6A@JBg9AlS7cSAg0R0_D6;iZau8d>-Y zOqW8R@U?Q<0`yr7pU@XQ-E*z_JC#0bmM#7iUN3r|ef%df?%zH&bRYr`8>lXw02RIcK*UN~Is3tT} z)86j5izT6YTBGR_*d%d?H{86)idthXK3- z>?6cpMkorDQFjFZwI!`nH{To(a+9c05Mlc)B6mR$*6bEzaU?sUDeGgM4{@|+Til2o zhca2@Q2wMR($nhhVM7wAg zC3NRHuX8QKI727P?D)(Pd`lq5FgX@qf;FE=qX?-K%=22@whgoZpeg&)`&4O&6h`|Ju!i%k zJsCT>Bti@gI%K<0Q3ZL?g4(kEU&7La6xZHsW$xK8k-1;9bRnV|suoUp@mE-4 zphP7piRL~G7_mV0!#E1xB;hY5q;>2?sr%N}jSw2Mta~-HWpkJq)#DNyIpf|R{n;Ba zB&%;!3_o6Nq5V+BJ7^{hA5w)@q?{41lop&8?}DIem(TzHZSSQ#dJ{IRZu2WCv-pG7 zO0tKiiA;GS*@|8gyk^&UsRu+Zf}GrJQ54uw-x5j{V~qP?`j6)Yf94rTqps(#vfcvb zSQdS_g57z~8uD*1?o@V7^zkJLW|FDiKR+=~F+;oj6u;YA4EaFtClajQCTOQUdhWgA zs3@lxBh?1a(NWnD6Q5(!p&xc$_R=igTkZzE$&2miyu1z!8YG>(NNhNad7fOvS2Kj# z6nc8pN0QO}@L$F<<75Bk~pTp3*6-b@&f*f@nrzQN{VJ5VNHd@~g z7@rAH$G}p3V2q1~$2Cenj#O+M(ED&1kc%~L-!i8?GJixQb_aa%z^hq{u30I#ZXL8C z{0-$KPTCjAW6s_muz7c&|L>yU@3fasMMk0mSjLV3pV$9d6sQ=QJO4|xP1;e;6hIo? z$-ePl-2>D!Ir>;N^Fxx>M}5^GTIV6CBUax-xcWbgy<>RZ>$W}I*l4T`lg4P0hVRp^wa;FAowN4;{q$Vd^LhNn9COS$#|>|IcIR{HZzhZfoj^d) zoO9{zTAwl6+xv!#)1#EkIanX%$IlSBS5OcW;`9vKB!+r{pBU6p4U)0VE-Pmym4`%5 zL5B_6ZUoiYNJ`iuQq+oK$Hn8%|4FY=bm^*Lc4ZVAx(S2;P6E-w$I-WV~g+B6$3rQ%8^N#B&{ zai_H_4#Q*#GQ9ay3rA}N^tFdmQgChpQI4wj8Qf&l1D*(_QUp8^x-``sj!jed(X5q4 zv$$qfyp<((BP zF4qf@s>|7VSSwqwlqy#q;KCvK&z@w;+8~$*&WiM2-}ORQ=xdW@i=D!tbZ-O8k<|wo z0;M(!kd#moOQEYyb5s)s8)iqp^ujStx?W0HDoEcylx{RPqA+`Q7z*<&`EMB9ijSZ$ zOM`o`N@YmqPXU3g`8e@4UKx&`YRwZzY1}!k%IHR&LK9G6>jn|wN#uZ5srtj?` zpWbyp_H})3LjCzcrFRTh>GR@t++5VHDFIcBWlh2L|NNc5&@? zeIq(f&jxOODB5=?Q&tyi@*z4$@oBh)r_Ss&PjaSJrVtf2l}=SI+-mRVA4a^9?Tp*` zSAZmkk}jAzXERn$6!-d|XfiJf^T%o+zF1GEXy0TXDC0R8)3zZi*`_R>oqE(6GccjI zMyku4awb@4RNv&b`n-v}i-HT6_s00>8+0v2ygjmc>fAgjm{AbF6kZ$mF%spVB4C01ULcXz%j%2qBR9@q? zmF)XdbpDJ__(k(?#E7Y7!fQu*Qct7yH(ZzX3U<2XBVm-KE{B^=O;@JeNZLLPAL%XN zpnZ(Biq~*fjYxX05F3ie}~lRZ+X`6IyQfRB;qO zQ25KKzQv~jN31&$rykzR@m%hw)#H;`xEqUO+Fv`<5XMoZQX)KT&#BR!+&{S12H9@z zcEJGL23)R^F%>?BR%hh?IS#yQfj%9Tc&Binty^CSWci`bfnwQmQ00&cN>Iy973rYC z$s~tRtP_c<)!*yFGQn3p8@P;|y7k1@m;FE0t^?k`mBxZA!Rnb#7uuj>BU|5MIaMhV zhHA7P`x(c{8(#VBwb@azgdMVx^bNUeM3jH6Fj28+%4LBZ5^$TL_V^Wtnlssrg-igh zLJA{$ySwKyI4eQv8kx~3gqkL|6@@BTEglLB)zqGQ1N#!!Chx(Us{P z4L1tHk)ndAoJfnv_UD2{zC@c+oef1p!Z-@)=X4_B(n+X9lKylG?K z?Pdi@hwIM3DVr9>{eX_eAUq4DSffV|_JlI41hR)1 zMihm)^D&5do)VkP6BvDylVM0R&6dXcHHJH zQrVTMqXkEnmQ$<)$EDw}97XJj*br$x+uFBqgz7WnK6U~#B$N?3xq$}8q2(yk-uQ<^ zwPWVEg{L&eBH|3=HWKY#?`@4Y|uI5NSP#*cP>*d}CHK616RVMSJl zq$s9p^$r@n=(}-eMqV#h(_w7&UG`sxd?14k@jF(ekcJma#FRf-Cub{a5HTRYAy1kM zrB(}Ey*3s}MoHd{lZY1QmoV;2Ow$QMIa-pPKq?Mv+Hz5-&ufFImb-x1g9bxjB?=Q9 z90h5#Rgxg6Wq{Wq;3jZRhzf^1QVLdlz%jse5n`?0$$R7P)lHjIgNzpun1bWh!aIs9=)a54jM_)~88ykK3sl+IkHLD700(A5BHe@mRgFPL%)_Hv zZVDNgQXIWRN5}kRsE$>+$pqo+v$-SZBiqf?`fZ`))Ym;BG1$Iq=2{N4bYn&;rZZR> zgHQv1$H%x4h(~Pu4uw(NuwuHwHhRps!246kJuK4@RB>AQS+dHOBN#~M*;;( zeS0I-hX!VD|b~K)dGv@_gguy^~QVjM5CIdssn`jjYiR@{}zg!id#vxW(w2GHA zj^-)M?mU_>wdH7S#02k+-&V{}u&FJsla)ly#|Q7LhM{(*tR7+9TgM z;VE`4IKx{3dB?GSPaCv{tD0d_6OKPhSr(=W4Mq(^FP)UcIqsu(+LktsCCF#$h2KJ( z)%t=)DRCI;p#!E-J#o1GY;%Qqr*ZYf{1~Dfrj3;+@1E-@3A@#2$ytQMRT)RQPj=V} zA`BQ+L?bw&DiOd6UOlygPYFJwY_j+=3VNJAI~Me{GPFz&u8m=cljpEPYm}N?C3KVv zva)-h>Y@(FOMk;OJvTXkT_AV!ly3eU*V(2nyCvS(K{(b0ugs!|<{*reYF3%W!R4y$ zhi!_M(VJOiDP}FtVA*2H-f;Hg+CJVDud8_tj@!6n7n5jkr3*r*qP6k6OYwSnurt%N zW4E8Y7w`8C`^z;2e-^6;v9%@vb5?q?8{$?5q!wD6|Z()ju4Q~7X;Xie%INPlE z1fUIsg8T4+{=Zf4U;Jon^uKHPq_U+g7CXu-F8TY83y)GP68>b>?PAq z*jP5X=d;3LJ=6)D8ngmX24x{Tb<~ZL{p4T=cx~oMB6$OsBCh2iX_$+K>T%)I_y-GI zhf)F}; zh2dhcM8mEK6pF!-#-tVP)1B=Ck3&;OrIc{;rcKvNz7KHeI|jCPh!U5Qg~|nSv5aFU z43r%}xs5Q--*2dxvncOf`F`^f;4g~QMeKcVY>dum~)evKU4 zsoT$TVaW&<%H$i)%@&PK7vB5M=G7n40iBJgfeP}xU>m_IigCPYGx>gabl(R=i!baf z`e-V5A~mo13sQ%+yIu{9Fi(ZTw-}`->64!JReFITi5iJS2GwIT-ilp;KC$s|va@qq zH+ow(>NLw_lvK;4lfE`dGUYE8o9RMJm$8yW^%#t;Uq(kI5+Mh-8{C$?);2uUbkam; zd+v37$|`G-u+-eOz;?I5G^c0zu`q^IUfd5hmkE-jj72KQe~yF+{v_+4o38rvyqmTR zd@m@zj2q8WpO$(GfdO1IA(^M1;Eh$SxL7((*Fqp9+8JHe;za9Mg{ zDxN2kII?58qZyJ>e_DxphSdJdL+aSgn1Rz&VhMixh#`4$TdO;%y!4vIbW!7ss#3NN;5iJ--3JttU{NnI7oCG%y|OlG`s1@182^Ose$W#L#P3n}W&TH_vTzGfB*+JlCG&fIPXqPMo zoNBhTTo_unx$9 z9&2-rt%mlKec5IZDZxK*_{^ZO2}=;HAxlIeKwq>~*;*{U=&FOi@lXg2^{m9U_1}5uQ_k3NfgHLY{yS_Y-Np1`g%gSqkkxsi>Y$il<{w;YRqZmT zwlXkp$4fW@lSV5%-|DNM>~-&NMfOVV`C^u-+q!?IT}Zn-|7_d1II0v0c;iQ>=}L_c zQKc9JQS5*%3c-wt@LD`+nzH>U00yTPB^p)0`t;V6UrA{jeO6=2J$bgBs(M@i|L z+j6oJrd=X@wNZSegCyV!N+o=i{OjcTkFnkK-PS=5!mEPK6nk2VMv$Me!mC(!Awq!_ zunMgg`kV&~YvkIJ+t!ufGWQbX-(@FaOO8CN%TL4LDZL49Fu}E&kQR+gmzwWp3!W$z-vb zdQ`!y*K#g=Y$L($u#+C49rqou(bWMt4o(~4{i!M!c@(*=%d-QzPNzE(h56d~aQkTo zO^^IQmA+}r)_uN`$dK)>T zG>r7~00RrF2_Wh=Pjjj^MFS{@+6Om^Plx9p>UY}X_33Q|Y19m|A*0!SD<_JG50@my z7$b&V4$r)y5h4VyA72;uLnubU51Z7!WPgPCq0U3c*&BbDuDRv-JW~(%m7}f^ZdJRc z>LNnTkw$dZdxVfn!9D8RP_l-ALKX2my_e5*CTBi!igBCOfEY_>p&_~rD`oWVlS7xJ zQ!)|s?zHYi>!g*)kBt7ISy^42#hFalyIu^0V6}o6En>wft>T8(zV$v5F`q!SHFX*^ z$e5It2hi#(eqf=#EO^ZPBhLOg*AFp=Eq9zRh5lUw5|d^T%*#LX&U`6@kCq)?lTCpp z!E912B!{y7JP)>#vbr1htkw@e7Qjb^)mKoY;Fa$V`hz@uDZSq}sAKyOSQpXTf;iT4 zPpYt|?Cup&%gk0~xn+n|riQ%~(OFxvkG$w~eQeI{s}b9jy9=!=qY~p>OBdO!zmqh0 z-Jo8`XF8hS)@6qKy#K|2_~nj`8w3S31KcrVz&gV}B)0uW5&cht%v$l%_8T4It4`T_ zKlwATHI`XPrPE@eabnplnHZ>w@^|zEoL&n9)24Y8>E|t1fNSA|1|7Dw(f%-XG_|^V z0+q?Hn@l-aU*xAU7hGY}?8+a54Jll12Ubh9UjgUTEBJdMhZwMslMJapiuAOG&+{Vi z%M(U*$}C9|cW80~ZEXfD zdecA<=h#n{EiWRj8}TZpSNzdtc`o%vvP5y&xhM0Oul#F5b%3)e#t(=BH#`2mjE1xk zc}uy+_T6x+Y1JMHg=D~sYm@453P9s1NG65KI+TEw9M2gCeT(9m>t8E5M@j5v8!Dtf zw-9m<>49`G1W`P$xFZWzT&ZGa@4UG(`V8690t?KULiM@)-rStfr3-2HQA zkEb92o-lzYWzr8mfmUmZ;#2scP>9cH4%8AUsaIgKBUvL9$pl6>s7d&Wa`_Z3O~V!9 z2H)KrbnN;g*E~!olfGwW%eCuLp)zx+)qX~3(4#%xL}8G~pJn4F@KQ*72LqU9V6r>? zcH1U_<*ku*>GS%0@r({^_EORx*jdRG;2_MeLWpK5P<@Pp&os`VCUf-I_v583lrj%8 z=(B}guTb5WQ#MXF4~#FL<`*ZAHl>>%?A<&@kms-E>-4~I2}a||AN0cPMIQKRU=tN? zhvGn%tD;{T?j;@}!rtWJd@71_14^2jn~TrqP_qmyBi1iIcp!jC$^<`HG06$aKGtTD zm{Eoq+*_$XUM_7k(?=!jlH;askg+(t&T-6qoly#}5+YVJXy38fl9`4;m?A&|XH@cw zdw6UjFqcpH*51S7S08`&kT#^^G2E?vwC#5=HpBH5TZqH#Hmw*hn;WJTe3g3I7N;%q z!R6%PFO$Y!pTHo!|3EDO{`3F~?jP3C{`>@#ZOjb;TRne~lUXX(fSg#wx2futCqZy7 ze7dyaGVT);9I;f67Q-|5IVloHG0GN=Hbogyy7T>R*M~cQqrJkiA;W=<&CU90o_5Li z#}=?*yQ+k&xQfQ0rY*l6y{kIWckkdhXxIzAJi|@`PL|GCo^lkB5hY*nGc5f?$Q+*V zESeWTk|d4Z8En1`xajc8f+@`Ql@nNlT96`YBl38JGyyU~(2fWCT71+;4w=v7i7_=v zF6S5I9d2iPUOR%cmH3YQ5J*pGOK|)vz)ZZBmI?FTKJ6RseL(jO>OTtBna*`5rburA z@in;-f~*vMOke>yjOJw92k=4yxt_H|rEx3%Echzm=F=A7oy>C*jo$^z1=~@Wsv$#1i@>B(N9%EEOrUAIIB>Rmdwg+(Yc8m$n0;|)QSUiG&D5RJ zovt!XaeQJ4ntkDZVae#e07f!&Lf9k?Cd5yTUmfU?Z|1Y&gLH-`m!3(>X-mOS(HEnbacqU?MjYSFchr z4ZXe}&K`t(3Hn6Ulj9zcuz59%cGshejJs>%`7=6x=QJtalky8O0&LYMa|XJa1^UyG z!lrqPUScQ>ijVy9254rw5MmT;L8pz;!qClRAW8%xAJfq$Lh&Ny4}knOJ_EtY=sS_> z)6geFFA`P&ub3A|^@tz2fAioWYG~*gVi8-;CMRz9KIo=#evu z15*l2F(_cOXq!yjr^~d9ciME^{e=Z2aL_!&%{r1`=C^tYg*gdD^*3^#kgp~fLy*%L;VY_VSFGB%nPj@ zB}`JEhGroa1g_Me3#q&eImNQtPKQmx)#ui3~}?ALR#&!{V^@%jWHXekzq~ zWgAU?^wyYo%Z&Yb+qWb?K24vP(@oOH_TfO_yo_+-WDk^!O?`$*{tXBLK@!18j4!L7 z&rs%F!|I+~Xs;JLAMcPA9UTG(33f_8J60zUJ9132qpu@pKHjt>F2yC`MkWr?`~Hrh zy@flms7Yxfd{HL5lr|>RE1(IsHawkZ;CdlHWqpg7Rr}*;w|c_S=iLtRwToDdQ@816?|Oy+H5mE`vHt@m*54o>ttWtbKW+9?F)Uj zC*1H=_nuoissE!j4*FJD!AbR2b+8R{d3mqm7)Z-o*4?|QfdzhgAGwsbAHU)iGt?nT zE94rELe~s=?_%S~tX$~N*mcZ4e_SZ8erKvXtn06cGkU<#KJ=aaI-2OvXwxcO}&dMv#pXmSTE^6(fX1@X~ zgiOE)9OwUb7uEHx|4epNw{~1>`S_Nd_7d{RCto2*=2Fq4OHx*RJzFCusW#K(=Hq7q zD0HN4pj?#3`S(pgSddtFqS2ADJL95}f#!-=&GVT7I&I2yHn$h>^mZeKi^kU_w0tAb ztxmV=lB3719R1IVldg2Ct>0pEk-I)Zr39DAjU`&vSi&dx6F(A0M(*TjVdoKfqIdg- zg3|QO3?7f^pD97HulXsVN%Vgd!cYs31bI)q>hq6{E$tWqO@Ioitg&R3_YM0r67nG* zEjf=%FbeoxgN4{{^_$((*GlTH?v8bvW_m}FVTo!QMp`Qx+v4hK+oR>_*(;6ntce!- z+@H2ZGspJ^5MTMmk(&5C7@NR~{3@A-=_^KVD$!I%7BM0)hh`JXF!9XXrtTpq3nm!z()32Vn{ry*)l|Y67lG!tsL16Te5Nu;6KLB%txv(SG5-t zy+wUT={81lHl809gf{5fnM;36_tyG^oJaw<eAhL4lE^AG_Cn z6~Xw(pcLAC0L+NXBjw%N3oGiAOtnL=wTI;Y44Bh@F{k>v3huaSP;N~OjU{>7JoNz~ z-U!z58bk*Aedyy0iiTC{;0toR@m28z{V2XOZ`Bi#4N5CW8nn(Q2_+fdO^^A+8+PFM zgY};$L;f*lYWvoys5~V}@9gDgMF>e>#cr(WNU);(-fqI>i|C_b;D{pl>3II=+_9{NsORI$en)C z+0$>78gv+CF7^^5LS8QTLCcFORnO(7;m?Vcn3Tv)T}hLCRF6Oz z2v*CM%q&-=P42G{q5Gu{qK)gLFMVdHE%D*7msWX4)EizfW4|1bGE65zc@;QhvO~o8 zE(DTLOII)-BeCHS<3s$h+T}pJwZ(9rw}=%x&f4Vr&*n^R-_4%G@hnQ$Gt;X)%9QCZoTp7^S3*c>MIWLD(PN9<)6 zmhMQcM)D?H_U?K%NT6S8P5v_0NT>T2!pLQu{DW8l8DSR9lQ(1S<>Mo+q@$+AYGy6( ztN>$vs0bs140wZBKKBrU_Z9I)bIs!0v06DR^`hS0u*td@gNZp7d(RMGP1Iqgk^26= z(8^vW8IQC-+(kIM^{oBc2L7<;$EFJIl&$yaw$ry*9omt)iF4kEV!buhhDZua*#B%G{RrEM6__ka-)K;@E*0S6Hwt>78z96jcUlcB~) z);M5e?`wBNk@l60j6OeJ@b)i1Zar7V`g#$gqDB{p3&t~&sX_#-u>#5lNXotm85$w? zvf&d5=jOr>Sb$y>J^N77ch-LpIm4xjo$bw18)XUIaExQ4NqPh6?0vN09pzMcVZkMC ziWq@ZqF(65AHmD0My0XOK4nc-TS!_Ja>y+$O=ljohDXZyY6r%fM8hVt?Y>N9z^d0^LM9MNJ z-2^wg20(N%m&WA1qs+*+$)hz_m&tx;yBILv@|QeovWbv2vn3YO(i5{F%b1Jt_GxUk zC77ARP4hX;M1~jssIs}AUpJjB)Nu^&M{d}Jt=AG0Ca^@UoiA{6UAK1|rmTV8)#*3p zO{NGk7G$ot#TA>qE1r210LfH#)TK_a^Tu0`!N$ZPnX5)o7x@{?+Y(kSpbtxmkqC}> zRGRYU`a^o$*QUn9@%^9RKbHR(44padIi8`Qa!3CD-6P-$->#4-rLQ7a68%6Mq{+ZZ zc^%BLq8p*Yt7Z@pgTFdo06IccS8S;?`iD|`v#7d3u+Um#Png40;fZi!yZm|%#l=fl z$enqpw3i`fI+_Dx@YT8s1W_bA0tXZdZpRbiCnc~Q=XUWQ^RQrzrKz^MpU*EX-5ppu zIN*wpw_1b^!U%-alG7CJmY;`;h^wdrpxVh)D{3J4P8jNUTb)%OxGM8n)*&m2DtS{@ zTM*PaKb?uQ!L%K{Ou5j+#6rExoNdiih=L^AEMPkH^5VN9v$izTIEuW~FLcHxT)$q! zYE`y$3a5$6GhY@^tjne~`UF~m8rn57M_HNdt#-AnGxa76o$l@g@U;l@9;hF~Oh)9X zVCP~SVvE6r%mqVlx|$CJ{75c*}m>Mb%KX@yzKr@+0gq<=}Qq{X=4E} zl>C3hRtPy;+Zj9j9z6iw6j3~7LG&^Wey^EK(DxVoV&OLz{GszCs4yT8HYU6izKt!J zauh1h@!dl;es5%8)Y|k(4j~}xEMyAz(>X_HfB!2t;AuIs) zK<{iZEde4E0~?&mTk05k)l|MTQZoK-{5u%5`N}${9fZX22ppL{L&!+M5(a1ZDXZur zGX0_CSQxk>0f|1;LN39cW(*iOt`N0}@W^^D5jDG41N8E_K$cO*Bq1p#LLZTjU|tel zvU+XcHdW}A5%W5>wQ7MA{_sJW_Y6m0-JAP+e~`8DeBO3mG0e_Wz`vf3m!$Bh164t+ zOO=MS9YVt=swoqyGuMx$C0gdnap4bBqD>XbAl;FWm$_7q203NEuCRg5*TEbQQ{l7*dg5G)+-nE;rNOxJmWnz`)_?W29yA zxhvo)og(*V521$&apAxaNtSVeqQ=tj=`r`<;47H(m4E^=m^sQoYK?qjm zE^XpW&%x}ASew;3-5(2tSek(@diyRHNx5?jr)D!L6!DfN2{nS^>m$1fUqq60KNm=- ztf|O|og$h>f;QB(rSH(*);7m_5PYowqgc zJ+uza-9OP7+VyiJi%_RB7}We%>jzc8g~g<%f)QRSy!ucD^dg&a!ICTz|MS@M1HdC- z_<0&2!(GA)V9Q74190$I+7Lp{uGniei|uAK*#cJ^DywM~V<;30%eE$C_Fl4_s2Cf8 zuCH6KR=UeEIxDEfUniU@`*T%F@J<^UufAz1`ex-6j* z2TwB8BW%Bl!lEoAy*O0-y!Cy&91t>LFz?%;_bs)n~-2lb%)uf!Nm&7(J?sV=YBp~7%l~> zB5~@v$c4QhJ8*t$fb+YZBl^xR?G~1W`e~6IB zZLO;f$CrQ0=W$C~ZQ};G!?%D94Cep4KXS6QG`4Z16|u5){a4O|qLaRZlhUvM zfAgNTg8c83%_G%JH;H_davp0*)uhHUz?z?;n5wde!#2K3kBO;bc=6jqmY+nK3a=jC z+slUQmNr|)7rFp!ZC9d*fFN;>porUBS<{!FuyJFMoMKKzIhaSelxg`4sr&^DL^TkB zrD)`~dy_cc1ej&0TA)XC_8)vYZc1Q8XHUQ>X8q>SumUCs;PwH#_$ersFbe$T6>W(E zMUa&ZAw1I~NNmw1ae|kXdM(=!%%VTZ5d%L7w>#G~y&(l#uYu>I9zv zkKRNI$Hd?&g$;e!$q_+9@3`b!prSzaNY)5PWAY<|{5fSo+D2?%L~*F+WTVLsID9jw z8uc>V`f^VaN68CTV=3T=M~q0y;~5dpmc|)B?DpVwTN5yB)*iLg?v~bL0>hf|!@47a z)iU}5UxT+d#?6Zp!f2Qcp=)8ELN9HuHB8-B+y7BUpbA2~cu47x;b2Z_HWWB-W?C$q z(jTiVe#m*Tre|U{u`XU^0`@1TdvDyR=|9khj%uD|2n9*Hu}STw_}xqE z^J3Bl&J~M&?bCu^vob$y(M90iaO}5j+Odk6xTUl2%imuZjO8Szxqy2 z6{9G>UUGuO&DnVO@chD|*U{D8LV#k+th8+1x_YWHshz#|+esvE4uJx@K0;a555_&5 zI~|i!*u$A}9H=vR0t=y`H7CZ9Ein-bF!Wyb37npss+R;w(b-cpi`jrVG>uMWLBporf7x8x&k;1a#G35e=zE`r8Vc47Z_*iI3Vbqe9LJ;YEiGiM`jmR5u4GKw) zbZs?R_)bi%Ov(FAjh9@UV*67uKxgT4+oI$HWe8jOOfpB&`{X+BYCKgtq&lh@Z{m55R|x7jM@c(~)@nO=V~? z(A(#6f>Y-=y!%G!sORfS>It`j9NGCchbWdMvp=TS0> zVK-8}fShMM{LMDndOro-1;8*1a6Iu3$v1z};vdIJ|K`G|L}^998F29BPgG7GVy2a} z>~A;pEqo=}0)5*b`sHg`qe+?ohm}L~JKe6toZC$Lr3kf!ukQDY9KF50AK`xbgn^zhEOQt@`39GASIJ$D(WM5aFY%ddQ5$Q z1y8q~No<{r6q)M${h9F#{Uitvwy~1AM0<`vB|)_KK1VPdNfALZvr?*-jPI`D#Aw5} z(nYRgf+aR+-hc8-ga5`uX*g!fFSpz{jZ;IbjJ97{0biAWJ8tgzMQlcUChK3*GAtdY z9tpr=sK1<}N`O)iLy!^UoWSz0`w4opRwiaev-w}sGW4X?IjaD%tOvkp5=jq?VWw7w*^Vbe2XG0@QnY~pSrT(anXVHFBu-x*FkMvl z36Z4P!r*&h#K&gD84>bMXdyhMyG9OCO*uCFLp#%w5~jSHOKAl*m4pmCQbQ!V_XNh+;OskFuGrS%^DS zqiY1Kf)uVdwxZljI^oi+Y_~waHtcQEPZPu#{VcAt7lLL6njjDJe3K)mY0PWjdD@Cl zN{Lm!L!&5Kchc*Mx>H#FvtlxdR8#gsHG@*DN#<ZvXGPX=a$!<=YQSu(HCSH3XkjhUL)7c`YP(R?3rbHF6qt7+ zU*_W#p~S8cAog=_V|ub59=G6}MdNbU7&wD^-D;`%nr8DWds~3nD`@xbkKe4#^{9|g z@td_xIsu3C5KyhSL}^%xY2%5Kio%4@fdT;5u7b1xcumUy)@Bq{9Fx${f7X3pJU{}l zwx`oC)($I&jYJYXlU2dPY`)@BDIWC(=l?9-+nGz)-={BE4HwakVuwG`{cI?CS~cA6 z(gcVE+S*Wb>b|KCeq}&W^@>U&x!1g|{cenal)>$qSRxySG^bFb?OJ4W**dS~Y5=yH zl`v+ap_JxF;QoV)RdMfBR#z(1fsV$dg#C`*vBhf)mX2n=rab|9o6}@ga5GqG8T$fk z6jm0Ql#6XW3pgavo`s#5qyg@lsq^Vl%69W_3xi@)%Vf*|Fs=c>_z%%Xf6}&!zLm4_ zzghc#D_CiOf=&CaU^V}Z!1P}TEHC=@;%og|H~fh}=f5D3H|zR{@m~nk|345&>*yxL zpg`FIIi7CkzD9!@DAFxX`RPZPu%F%I*sCSO7;^jG?IOe*nF!yEzdqB$VC zVVW>xQ?u8u7l?W5MiC#9Gu-7=&)N&~?*f+Q4*|=MP%Z?J4XT7;Fq^LU7HCP~+>+G7 zFzI|KfiETi*%03VxFx0IXs!gvhBSa|=&Hrz@<a<+nFRg&wn7Fd$wc6*qLzqB}H6Y}a zh2vOA8C)cc@Se0HCQ{Q4mvY|d9pWztw1ti6M*4-o$G;#jXT$Lq0z+OYP?rA*fid}3 zc1L%$0cgr5dM&T_T&!Y-Bf2XC=AedP>aOX6K1Y1bvh1)!##s0E4#1wnRPG~dJwEmN z%i;j#O1<-OhX=*%`G2stuJs#xc>k$fA5{uScmUXY0$}eS_SpZ#o{+7fv$e5}(|=>Z z3Y2_67xkxhE&Ho>mHw?=6`X%**Prc|UF}yd>j92`XjjctV^h0UHP5!~Y12?8nH9lu z%hx_Oh|UyDiU9To3Q^#8(NL_oYnF|P9oG0bjb7s#-!zcN`6QnvCzMW`JTz`H>pbQW zoH;%MN>Io`A zvo;#UfNefSOcla~z=!Ke?f6aYn&Xq%gz^~-{Fix&UBIg>p~1q&2Ez)JvwkFq!~|RF z=R&STl}ZOVku+AMke{S!qKI29&pD@~ky>vQKeUseJjz4cfA4Q5i<)mG{1VN|yCjS4 z7LAKnV1x|NuHJze&JNhNyUu;HuA%YNFOdmr?D(8RU#=PnRDs45r=%w!P89}Rk)XLH z7~o9mVlh*S$5av_i@f<2v_3M4H8NFI1cMz$7sod?^uc%YEd0TOcK@SYApqJH52N|+ z7Yn{2_zR``L-0cc2kqtRV59jE>&dEl{wPUidG&QYKilUI?tZeqv%9Pw0${KrZ%70A zx$;pBp-wdHJeqTCzC;L2op_~6GpmuAB141UWzVTD0ZrLPFY?Vj8@u%6MD%gO9DL8f zHN78^(Qj?~kzH!M;q3Gb)NOzYVq`7#$bC{9KW)!^DN!`|Sz$MoVal{*Ah3xzyP(1o ziM3q)r|2&0<=?>ABEi4H1pwnY0E`U(f2*>vjnO|cA^?q+s!2srGxHESRskVC!n=Q02rSy`bGx=peCFb<0l@_L{2bhOzUaUUw%RZT6#?}r!rkY zOOL=HY)luRVJxQi04Yzbuo;{}{=)x@5n)Z}96tEC-3+Tn=PxoR=7SYcFZ^P}l~G`z zErqXCfSOEGApoe!Omi3QTp)m&%mNa!squ>uQMX)<05uu$d};IBcTEfyY2;8DyIgXF z2waQ45VtNhk_Z?yZcy$D;3%NJ?nWXSw>AG#H6@BBszyXOGvGMnn~X&nC&4+1B-!5M zENjp|YI1WUpeBodR`rJ0TMh)~mB5$reEdjk&)`f%d;N6u{R0_30g!@mZ;`YLF z1W9u(Raj`L?CAo@bI@zAPt`OjWOplv0ekud0V;{YdWmd$u60j6EtJ!S#_4CKh(vQJ zT+keJRA&%BmTDd?fjs&)Tz$;U7hT+zr$lg#9@{sBVi4Y>yLCf$iz{ju2D~YGlC#cq z3_1^@Z|=zD`7|J#PP=9%30>1qbxN(y1M89i#F*RN>5-jLN^p<-GExTqn-~RLO~IRE z*l~l6EnHljz`69RI7Vt^o!WPKVkfS+tosD}vqooK$oH(AWTeb2cf~oLNQ(>pzI}B9 zU2e++z~V9hi~o?P^d~V&8h`l*0G9uz!{wx+wBT>QOr^g75b#v&F981h9{@(cAkgSl zRU{jE*U(7}+c9T(x9$I;Ly;AMe92#Q=oL#&5rDZsAqecj7fKO#!_pzO{hJPz>4M@w z9{Uq`v)RK{mgU0H5}V|(jo{7oXkkD-rEBZ(Q48C?LX#_n&xm;(-@yK&L-;@F5JJgw zEeC{ug}*@)!m@%^e}zdJpQG2Wn2XB26bf(mr$HJOjXEITp%9u09P)i3zGYh*Cf`85 zY^8QVfGHPBNM^)Vg>x28oKl$wsNkXLV_-B%!chGB;(dTAX_(eec3*8|=oi34vDZ5u zi36hpaa_Y0ITa;vT%xDW4eJ8{09vM2C%ayg=urQ>2gyG`yVyI}00p%2T#p)Ye~LoD z)5ri(=&>QmfLiQ8t4Je#&^YqZ;nlye<#TDQgt?X~9LxlcG=Zdf(7E@kVu~b+yVdRN zbBy(t7uuqt1_W|yfYA=nxAUkgrybTQ&oE?^? zgfR>+i#7$r5bwoX)q?i(i)t1I-KcouGA^{WoA#n#o)XFMsX^DAW6p=l4Bx5BC-{6k zZn;=R1bd*E9G-gA_?NOsW;WO-&JE%H_0!V1ak7A9Q`I59T&~)xp0SO>PCF8F;jRbZ zSf?;di5vl#kR&_J_ATsWVm$xLNYYF__)9V7|6Pp7exZ+@A^@zRobQif ze9O`~v4j4XVw~XPazN(3Awcae_f5bumc7s~>nq|F_3)6tQtWA4S)m-e0O`5+2xBd1 zv>~Vc@dR`*47}N66DPQTO@dkT{Su3AU(IP7AFMx!w6R*095T;a{yWrqJ36COA-xeNiqXwIoF^BL>0vh#Z4-?ug?Sl^auL~ z{SnA`wM5OaWpT!*xZQfacKD?npO8ziTvG)7Kwhs*cy>dsM(*+oAY1K=VQb9+sWZ%k z*uru^t&uueA26r^gx1E@%oh;N>(RI}9L*`Mh9l)IRR3}`!ON0=JDPp7N_EL9D?7`) ze>$2t07vsKzY4fu04OA35n@RD%fAI-K`uytpS|2KL5NmEy6*MYV$Az&efPLcJN(Ll zGXB?MY(4bb(e&>?vRbw-y6u-h#U_oDZYYHY**lLpYE@6qmx*O8L5V~)+AxRj&obQb z9*X;ABxL}MB#1)`-;Nk^o24`EQQ#QlVlJ}vD5UdP1{#y$-?cdVbmGLJLGnG|_J6_f zB8qWL1OP(_z&!E4TkOAK_zx|zqO>g_o1^oVQY15T#E-0`NxU*Pro1#v8?z)X#uDQS zdZnmd5;FW|ixWo8VZlz%b*0&MvjeklUJ*fNM}?Jp)WB9hIlE`@=g$;IZ?7OR<3Mhr zwk6s~#~Fy<7JCK2V)qCv{YXwbKZ^5)|EI<7_`!$cMwaH2aJ6QsFan@QhWp(kLj&n5 zTK%fWRT&b1dh9fJ5yFKGsK-snA)D%edW?=0ePamdP{LfMH@_WfVz_9dhw}gGkt;p| z>ha^fciMVR?eWRnAL?-JU+S>vU+NHyAm>kYIEWKDW&KMXlKfVO5YzAgb!hm9Is~@g z?Ux47;rid`@B!-Ul3lw7ph0nK+&_j88mE24#gwd zcTY6Jk}{?N&>?Ci86xy=Iy8&5_!k`#Tbc)b;(1m5O^1#rZY#&R=+3AZ#1c3zDk+ie zzt!O%J#s;Ay#5Yb>2mO(F4PM0;nr(eku>kNHYcb0*p+K{6fWM&V2e(W0sVrSC4dcW zPx^?Yf-wGH*A7;4bAxA8FTW5D?>=u;o{-pVd`kP*#S9fHxk_j1j%y*SDf6<6Z`N8 z7BcjqA!&a{A7Vcakdnuo$W_5aS^*JI3QKI>F$V6Aja-1P@+XhP>A}l>xt@@3XbVGwhT`KE5lzjRu;=s2o(7ym}062g){83J;Oi2MogpZ1`4CIIqJ{OxvB|ZxQp+PP5)uILk)xh40k|&V)Q=yrl&5q^)|ak zb-@9N4pEIpzh@(0@N;c3+UyaUtoitn_L9W+4CAPGIa~{nM8m;KfBIDtaZ7yVR42ZS zF3BU!!JML6I6C$eooEo1KS)VpugyYD5AdA3x&)=w!)bS$R{o4(Moe@_6ugE!nxFh` z=*H-G!abAh;W4Jw7<{opayJwwBefNT@0I%3%fRq~&(&`;bLHn(RasHLMnCPpaGBxV z4L>NYYFPGb^h3qD+Bx7p{F^9!8xXCQz49`CkA7g1tTPqv=kmv3@0z=_k3=jW-^ld# zodRn8zj8|FwlfB;4NHCKXJGc=%-&+6JoW5s_daCN4^QtPQI)tlv^HyC)N`Uo~#kpCtxwd1` z26#a8P*T*hU(&tYDgLK;b4x}6tp*512LP@~_CLe%Url1@hkxw?E6r$CnN)3h6ly4q zLQh+Y8d@bO4oDebv5uHcP*XEZe_JD_9wcH8)#`jGK}Sz6m6^Oii5I(&`9!Os)($rgDskV;~tT@cEpYkq0mQTkRY;P zHrR~`GTFKa^2Zd0*EE0K?@c>r3}5M|Eg`OXo4-y76h*lRpL# z9aXy?YXi5C3q6fJsiu@Cj%cQXY$y4#TZmWsEyR8luoM%OB&khd7I9#{MYSq+t|;hU z#uFID`0Dyk?%XDHYi%CUVA13pu1a`6;jdpjoUffBRKC2Kj#lc*Sd|`lfbq|d7rM_f zlmZ3N9}a2S5mjN4dl-&=_b1ghi`j}BRZ?#;K-av@fma=P_5cpdyErD;2dL`75Eo0TDHe7o#39DdT*mCvVGBqh z7{aU(G|M=lQ+{4P_!*IhYREjxFoNi}Lvxm4rJQb7Z`2c&N~c(nx0}L-`2++y&`+c} zQT<=`fTBOqBtSTOo5MZwTs?V@2YmtY&r_<5fwEeT{+V~W6v(B28Yk!I4&J%dU;bql z|8`g*|2hd{^TDXx7`JCtv;`VoPqb~>Gt9%4FVf6p=s}!?zdSB)2_HVLkj9RzT3}7ejJ}m|zkAn2+G-`a&Y`y7) z5=s$*3lAJ*{~8xbqX`)|1=BBH+T~m~TnLwNTg&@XZ+nePL_|mjD9qZQG+C8!&6h|D zv2odiQxmXIVtEd*pGSAYwrmdA50W)d2$P3119@5{@sH06IL(%jZjs68)i_Rq1k@{mm1PD_5yozJ<@9!7`Ovk62O zqcMZcORIq?07;=KU?rtLlyU48=K-PrQe*@}FW1wc?4z8PWzHK}(2grq$qcN3B#gy< z!4uwNVIpkT`?$O~$h`u=$=U5fpeN4M78td7!;1$7I7|>kBdkz)d<`1-TLM>l-UFg=Sc25n+4B``=?tav#F5ZDvT)Kx(zB z@dmwPD;SHb#tnLF6F}0!i)^}*mVt3@XCw@4L2Xl6?kZj=jPAH?EEfx9ty-3$SNfyh zLVj8wFOo1F#y3VWc2^vCqw>(y!66{vfMo3nPU0`UB-*t=C=SDbsZSoK9gosNx6T(3 zGd2W*CvZCTqP8;YSOnA6XsjQ0pU%(BYgSP(X}9W<^u}iC`yY2whbt~QfhBi0mVUQOdROZB&^3qu+92iZDlFvOtVNDx**;v>k~BQ z&>w4#qkj5UzWj`qp={i{Q@3gZS3q-iSP~?#f5JT%=%RP7-xjIT%&*43o+|V zNbyYPqL8?H%H{0F@QU*b;;*_gnN0bjC!k9k0z4o7x2GX&V`=F8ui9vvDC`Vaq#kgiV=3WeTMX7N88NQ3#$GNj)Uq zHN1-n!viqAw_)BE7mS$793*&vqRcWxF#TO-59%S!z*3k-%0+okr35TD#cg%|P}gU_ z=79fZ8YTsHP3g+2ff3Bm+u|NDxX=Y5th>-@5$G%bG2UGTbw!fXedggTI`>&#kwy|= zYq*m~;OcP@$%M)DB`7;92U8$n6Z_XK2`>N?!_I zg6#A)BXp+h@EFJ)PWRQvFi7wTX&xdBC#4Q0%Qdqne;^SZy1|fklvE-?pKexVhfC#2 z*TDd2c%4sGj2cu}4x_mHy~58iR8H$uHQjDzw_&bdf`Jn)YX`}s3WoiW>J2|m>H@f& z3u9pGp1Ho`xOy9{4&HGsJClp@k0FTnYacjcK^^DrG%dE;(G{aG+C;yl_~iLKCk+t~ zgAbx#b+9@Q4nk|Gl?K|~ns1ybOFPlh-Dv|3#*LfPUcq43p}VA-?Jv&X0;)uiSptu!;D=-fwO~(_bbg1$c)Yx}b6^q)xH%v2-RL zK;q?U&H;mEE9}1RaMa~6YV&x`w_1Y{klC@{eJ@_amnbj`C2a?LOuctcf2~3M`*P~{ z0`e=n7HJI-zde8f%>OO_0TxmID|!KU2Njl?(0tD|R(4BG$qzl2)FX#9i^YT8NWepo z5bd1Owd7Am-Ehhx{MQ>2Mapu=5F<|Uvc1PIrL`h5a z6Cs36$&w7A(y-qd%txl0?K?PGEtT`Q7yy2#b92?gSa(`!!gI33>c+;WvTQg43H+*F zk8f@KW8ybQkzt@;MmL08g20d;bx4;*ka%!68S|T?DBl8fb?)C~P?D?3rT?5dp5`R2 zIM4zV4h}GSjUqD`difuK!twEI@aP@NuqUQa=|859$)oc~gd@r}me$HB0!s^jFQT+NX4G#~n#@n-O!*Xa=l*TTm;NWj>gW*s z=0FDlb>fe9aN)v&tE$wcXh2VgSjA9W=_!R+10k=UZGGkS!=S=}{c;I`%Ka>-<0Z}B zPV~6(F%26tDx#aFXvxnO`FnXF=*wL4)yA}Y*3YQWDs`35V zjYzoqFXZqPcbqm{^u{H%WkQBG+TXH_Gx$7q#^nhm;@ED(E?Td9<|F#0_E4Pg2yFvs5Nbwy1v&Z_a5dGsN{x|>7RYl%D1JG=}OGF$~ z5P2C(6uO~4YU1*4<)$+hae7#qu+@h@LvpCzI*bhpW0sx*_#Ym}(m7GHvb-E%npzK~ zF}z{xZomK-#Xn7K;;9BdfQc#CYc30iKVo8Zn@O_0HwOs<=8fM)48>5xPSP|T(4wY{y=0gAb+ zE)fJkF(>&Z@vfLJKbt!F59Ad~D*lGw+-U(W_U$|T7K#4{{D$^V`0clg{SLpqyV#Wt zgzxa%k3ZqJ_qRBwG76=$TMbDhktkSU`<$ z_B`#%itgV$S)Gpn8c`-dBO2YhL){Co{F;KFg%6CFIs$wpK+&e>C{1` z!~9c4e?Td8Y;_=uyF6RIGlYzrdhty<*>Z(h_HJIJcn5nZe4b&HufW!~ciez~V-=P| ze_$2Y-K}VsnIR%^339<-=}ZO+OKZ`|MM|?j1F#Bs75d*;MN59E34KL%?_aSBab0i# zR)O#vtMK}lik|rn_Bg(SJ^IjGNEq4w6fHfIHsfBYTbCa+wmV_^=ah^qesjT)i}`(S ze{jJ;-vBCl#rP6VvTnK0zFKYEyQAIyj#enBZme&7oq-*IsXeO?*c{)q6WlgPO|e{@ zS%kiTufrp%GEOw0k6^jvdHAGc8-hFf3p+I!QSZ#jpU+tLkBHTdl20{K9g-qkSp#za}yu-0Ja2#U4 zP9&4HA(^qF@H;m$vj3SI*R;Qo_{CQ*1e2*6Q{1y`|6hpu%0CeGe;7LdH=_O)O!T>E zLvQl4;U?2>{Tzl1>8$JNFNBY(i~qhp{Ie-l*$%ej7ef(rvc`MA0KgYt>r@fv$g z9KB>S{n6WlPd#}@z3t;9BWHoT1)cWzPMG}-tUz1SgMH`I&j3OAzvYlW`1Jpt4*y{) z0w}XTOvS&Y!-RjO!#@b0xj#(BKM0@ZjS0#BMfd4Q|F*R8 z0hTs5WIpV>srWkvwY;}}bBAwPZ#8X7(A28+8%4Voq@>89~`7w>?=o`k8@~K zINOqMS9rKX&ohAqq{BZcGuwYphhieRlF~)pav1M1_}f%u{C87v<^PYV82lGgkxt-Q zRW{x4dz+iv6;UtY3x`+p{g-V&{K(zEq{Bb7^7nM8VFfMTq4GFjX@W^#w({NM`P9=c zwWwP4VAK-)H%`&(uy4?m?MU^`_w1iQJ;b))5ZN2Yrn9#xvJ7^ywE87k00>6`UJ8nO z`paKB>B6box=-f?{}F^zYd4@=fFRude+J<@b1QE5KN8_PSMev<^V`|}YY=```CAZb z9u-l3Jfk12Wo_1>6VSbQM;-sD$)@jJ;(H?00wlusE^){sUGL8>kwx>*n%w5}pP1Rm z|H91vttS5`W`=m_lw42_0DHjs%inAA`g=`Y_+69T{-Y+3cmJy%Nk2|MVWj-EpDv-AQ zo{(53oN(@C3-t{d&roMlk9RHRhI2=7d&d6MQ!h8-H?9yMIXVaAN$9*%x#*j~on)AF`*CvZ7a(s==?TxbN2V0`&)Yj+3%{<2;j-`zXv3Q?Tjt#o&NQ(2gp+I z0}`RX2PD98YygMSfP!INj;P7-@eH_V?gpm%QdW}jwhbT7br%(mCm)cc?`D?mvy(O+ zJirDWDn`Og8XK7uVmD51ZmyxRSIu;p@9s8N0bp7oI-pI%46 zvfeGj@n5P~ynze;SFuIoc~j_UdBBDAY7mQ}IGV$Ffb)KI0Ngu*;fD@PGSTdGD3J}~X;wqS?gCNC@e(F|brm)2m4a1J+rg6?#(EZ$OV^?D z?mJsiN#@u)0wFf66rsst#b7lFMnbv+AbSqd3X*Bk1Ey%TVMWXnWsId6XxqLlB7Ej0 zgy90fwtq}VggT-rW(+&mKQR02e0P4hKpMxGvW2^bY|fgn%M)kIzC7X$sl$B09ScjRY{HBU2!P<6ag9tZJS( zBBEuutnqfc3j$&a2WzmH!euhiV@pJs!TD2Vy4)a$EZz%xi_{&=uHwAoEyn_YK zpTR)Bx%J|Sls;YkJhm>)lcmZ`Dl9jFxJ9C9*e5I2B+f{Z z$(nkMx+wSRwgVbTs^Yn|h4;BHun|9?I`9VqYIFLnb>;RzerKe&Tz^X(WKX8qlg8F7 zFF=;UL8N{WmD2^3bd-Df@r@y^y%#ry!LjjW10ARN)!TahiZL}-h1Z)6IFL)-4y#DS1P0p0hLxp-&HK4G?UV=|$0?L7gjVwc449B) zlYDdfZuS5+vNIXv7!oLnYGE^HrHoO*MD=cLo8&O{rTH{08 zvUhle1=z?>pJ=h$r(-$1-%=^8@l*O$0~(4irN@9Tc-RIgd!TOkE$Y>thMg*#KV4OB4EkL3*y89NR}jAC1y9MaU^RA zx)4?wQ>;tyxS=+euXW@-DmF?*J7^|U3p!sfXjqsVolrXW(k!2~N7&1)0RPx-Uwbj+ zjm_AKM5V$Lf-%(HzGDrO)OeiE8hBXeu|$VFWfpo9*M>3NG&W=yQ;#!3h38UL5biiB zyN3vBK7Sv<)N>_*q=b_50e>Ccr2-0%r$f$ED|EdyZAL>HBel(>*ZTxhIUF5<<>k#J z)F1JBRuR!6cxykGQIllIaAl7PZ0ICAkA2jV{LZnzkueAWOcgWU5z8{Ck@Lrc>v141 zF6aAuQa7_Y7kaWeIhi=?ba(OCU1u7(a1@C}Kl13n3(ktEWa(UHmWGB6YS|oQ-*7AC z3TL#vV4s^_LU+F&>-2TJI0?CqE!VywL&@z)a;8A4Sh1Pt4k`9V%lU4 zAAc$clAgx~X_I}}Fq-RHAND6ij=Mom1)Rva%=LuoKX70%UrCn zX}C~Y$7oDtAF_6DuwS}L8(BDC&hl+o2GZRCyRp*9rSb6^I0qmHHTVU;>*1fK9S`tf zTbl;kwjV$@AH{ksTF2znf%J){Riv*9%Ah34&pl49I-D-SHfU$tG%n1eR5ah+3Tqof zx-1Ra_@z7mc0=IsW?R+u3yht@mgC*=g;f5+`S|fNZ^OPXT|83TYDQdN5 zAzvBQL9|dq#?sdE=Zoswh}GbauBom@>NY%2O0*VF4(0M=9T|+VI;!37MjE9sN5uV? z#1@&PTHaSzLt+AK2)LPbJE#()K0=q~_?0EOsC!X-#InRjrH?%<#jou~DBBX83@5mN zfu2>%&VUVh$2`k!W$q)Tral_H0GAs}$Px>n22e+)ixf_Ic*tI!K5ycS?OFJ&*h5Q* zFOLqJaSQ8B;4rfe>n1K&*;pQT4*q+Z2zYLGvD_&}lec4ym5)1fZirM(4O7}aQXU?+xX#=;c6vn0_fu3xjaPYTc?e5#Rg$^NAfL$RZ4{UqF>tS#4Rg9YZG6W~ z%CZbu;4i_j2kBtN2;7t!5P3=91%T7z>}cF55EF*|s9-)(Of%oX#aPBr#>GOacfD-y zOO+uYwhIU1VaKR4m4zkj%iTzrbQ3uiw2n`r_HCP%f$KsbRr>W*3isByuvQg&o}Ec_ zG5I6FuJO=bX_`s8qfj&@3P>7>%Al8n)V>>qqRmXluhCStgzFCN%GiGu0j=9JQs6_!ISJn6Wza_Z!8feUTyXi_qu(dX0W4+659+2UW8(JSqnDjDxa6`}BWGQb}GRByT_Xy|8vvTJ{obi59tG@Ti zfL6SW_$ks&s@4bf?nu}ap(Li-nx&_}@!s<+>=L<4jUxG&qE~w*xkwj7vW8$-YSe<1 zv!a8eoTNl|`%fd#m>$hI!v#{}?yFd0=H0C(3sKu_a};>i|7if=XN`;=ms-58~pK>PsEgkUvB-?v)(~2mY{r@ zca?s53}M3D6*KOGJFgDuio&6D-UAA+vCO)_(|3!H{CJ?wca-Pt{05?s<4fl^du+VR z2CU8$MwwVV@K0wv>JLn(U^Dxzwu5d&^^0Duf{q#jSaaR@sH}tq%VHwKvgME{{ z(iQ!E=La|U{Z>ysuai~n7U24Z83nlPmW1?)8A(|1s-kWCKYz4!Uu8(XL8QwO?Tf_Pc_8HmN(DaFWF zZ6S72_~Is?(ZXhLtfyEj*M%50UBDI- zG~bD;<{zrsg-U?K^K~|Kr)>3Xr6R3maj`7{nAN4WB2)Emoz|jrZZSo~7N&VpV&CSmh4$t|;Smu5T=g~A2^Jugp;T3kH;e5@5&?XXN-)4iO zMSTcdf+FAK;|eC#U?NBv7{o-#Q-xOp@O^Y_5YVDnLt+sDad_6SA|M(cbYUpCMQCj{ zJt3058?GQ?Sy^Kzvo0yksNU5grQ(aVl>_#eAia-)j=~}&jrZ0vvTu@P>C-2ctD^AR zv?c=vk#Puda)&P(LDdtf#v92ROz5fL=@z7{)w!qMmpuA_NSHR7;^52CM9|J303 zPLLYc$52{G4RHiaB1dn^z7L%QuJotdmoLMwi2ois(Ew1^fT<_OZTWVs=^2gil=};Q zJv{}WQschhxCS=_sUnBfzS$-QVIOj0)ebUMQp~R#Ch~jmBgjq-lJvvKvn8~KE2TIn zsXVDFYCSB+#cK~vVw>Sh?ao}Y2RX(cu(5{Sey%%GkRL@&g%?oUZc;AS<<#WCVa&$u zKZ1H!EFnu)JA$Hz`G(oo%iu^nT*VY zvTuDp@M6xo-DWdkUC))VSxK3 z&m#_Lc;M?zDki;!bg=df#x~c-V^J>x4UfzrCb--Gny`o<6%nawibLyl*B>OwTPpbt zqd+sSkV1Q?zRYJ=G#I|qE_6RvXe(#izZ4;N$0A$UFYezR$cO)W z^6+mVE)8gbeuw#~udVj7RPDk2BYKZFMI)*)QJeQT9P}_x-`CABAXt-HXVDZW?Enbv zD;Xy!%HEgL;<6|4mm*GMmc@OYv1^m)S>4}%A9Qs)?k+an*F^1kvsTGA`%U*;C3Md( zpPmrQFDByFh;S&oW8Bdcw-uETqRztdiDZInIO7I{w@kkTa;83#J`lc5U;9BPutk%e zJNmC48w6s!%^nE|BXM4?q(5=$p_#Ao!2?O8dhKtLT?9?!8>Dy|?!;^z+Mqm5QC(E_ zo+K=7Dm_7MYaoycjQCKUiH2hTa;?O;`4vqXGhv!C#<4J8SylBcK)e!{7}Wqxn^t*x zW`lSi8&)2`Rt4aEz$}l*ySw%bLGT6IE`MPYv0qI9%|-a1_DAgNlL7mT7>PacK-V#~ z9(Y>%Wa&Oj2N(<(rjc(h^37OKD1~H|g9ljo?KD70u8k{?cr$Y?aUT)KyYmhTo_F!z zt-&K8LV6GK_LADaO)jskd$%~Z*L6YqcU(bucX?I0-|5}815rVof@PaS-vLd+eNO96 zm2yV|!-3gn7NyPv&%fZq6JE%RYVk+*z`QIbipY^%qmwE!jLc612{Hy_g)TPSGr#$9 zgvn8Vhadk4vk8>@%=zjvtw{PtW6&kxhXy{Jc-{#*yG3^GcP;q%;)$W~6ijw$?-od< zcw#U3e7KA2uYR#h_406Lro7E4p#Tv`ag=Y0bv7E6{0??~%ghzb9M z&UK2jQ8a56okir)yN7{OydZ2Vf*12ELO;Dcu9obg7{2v84V%d8&#n^Pdb1Y&U&HNb z=FZHp53xckK1~BR0man@eEIA_5$5(OecO*zQv4A5bk+NpU=nbcVJBu71?g(L4-Q-& zbzUxfTzu-U1ZN$6U3zDD2u*SBe%Nj#{xCc}lw$R=`gcw1o!Dpi`|P{8diVWjuXo3D z`J3zO2=|SjJh|^)$2K+$%+D#W*{hzd+}|?uX*wFMjzT{`<&}gX=XgAn(~-P+@fWADp?IR3c&FogDJijoof8Ku)-R-~Jpxmv>KkYsX@IUjcgD&CBQ8M8~y?bQ37%aih z-H}p0{CP`W z+S4sA#dx>A5s8>XsszD;<6MF)QRlfA^D-*wk@MOfBVyBWerl2>C~Z7J7^iA~e#+q2 zha=oKi&*&LA|Q$(51*9=v1rPtAZ|FE1U+{&5wuZ;YFFXjA?W##ZY9}mK8dR#tcs`m z6e0Kn$%l#iTy*yu0QA&0OtBm6+){WJs!@ZjLXz9bxgKV=L41ZpGb#Eow#5|G#4{y~ zo|YZB2c>1VBf1)aXoXE!kFPW)B?$X7KC(%jQgd6tQEv>Yr;ea}`UQ03H5bCp z@I;T834A)w0iq335D_WJp92SSAM?wmy+)!X+Qe&e!Q!u(+x_%6y>*XY`(3|o1>cT8 z5kfLH>LcjC++2^pd3HRT*}rX^f07)u_xHSy(|Fc_FYWC4-@6~j7t25bBW+(5VgB8 z;z;)6LDGLgx5C=A32qJ$>q8P%DZm1Zqn2Awwr&IO-rG89*k*k$^s?7uHvOo z-b&)xx9!}Q4^sTl1;q!3Vr@R9=@prcf-vctmCYTTXmWvNQte4=IVa6S)AP&R?fiAv z)Lfu^&;a6-pNs&#%43}@Asdep<&>^Fs)-&GydgdAaeU2_DhprMWhD0j{~{s!jw=uo zH=Rdf-!<&s-m?d?NwL}$sRsC|0+Y}nli24q_=0tdEV3LK;cn(%aXM-Q$|GUDkwD>d zF?a)k**rAGUZ*QQUNo4p>XL_#jD9G2O-LQ#sDj3^JxNLYh|RE6z`s7{t}ReDt#rfl zxIMKCESWq=p{KquD-q?qO%@Z1sXo9;LRW{g?$w9|HD)&a1 zm%m)RR@czD77TAn3Q8Xh8iD02PVsG!dUc$67V$9$-AF{z&4|W@GG@yCiR5kG&7H>b zY9exwYdzQ28G^XLW;{5#1^0wwnk$GLe|LA}$u~V@-4T*d_RUN(MDOQyJWA1SD)7LCS6ajoD zszoK|$Ej25(~SHN;CUsVoSa2sIrvBHBC z=UZ2#(G6HA2$W*Sw6JFVoY5KqDe40_d0hiuv0g>HC$hGm85q@Q(r=g0TZpQ_qoO-~ zRAv$aW8OM4c=Vrv<7g8D1`CMT`^*S8fGZNqEBos+e$_{aX2lb@2i#1a6AL?5LEbfuEYecwZRd5K<6*OjHs74?u6j^d z66-^M9hE2Szd4)LxonGM4sIWU1;#AF3HJ=Py25VlN2r zeH^U@SpxS!3s4D&P}A_ypyW8X zs_o^S0WG+!s2RPM!)wmr&rV+yJ53H|LKO(H!SKws&Mo|i{h8U~iu6TeABX+Dc!u_D}ILi-A1vGF~x@G#j31UK!7#{v}xXcb_4F*bJ ztBL(JPZY({ovyuZJ_nS@OFS>_kIVP$RbcyQOs->oDK5Y{i~J>m>B%A{^?Y6htj(Zr zWJ1}s9E=sO64%3G=fs+fKDIl0E_QOMt+b6>!P$mBt+m1nW!;ukx-@`N}b)p6m4 zZxkK58Xq-@p*DEL5+6{aVT)3(`7t}kf|s7fp0HPRqWD1?_PEx988bB4eK+H=end@7 zrvmnNvfGN_2ejZTn){(xlr>lxSL@~YN#Tle1rQY)6chn#oTyo7df`RqBw8{{^(PFP z6X^CX;L}WQM&ioN+gjDn%uVr6Hu@Hnrik`xT*S>D{WMDiAp;l3=nT&IFlZkF)ucGT zoQ@MG8pWIQ$+S#c$`v|7Qu%AxRP&7Ma2Tb8@j7NhCw%;OK7`VB|_Uxx3W1m`uCb*_D+<9ghqzas|+p znC1$FZ7*H4(?ML*CO}h<8rI5fI$YF8+%gkq`*$XVaL}aSCIo|B7rtM3Vu$U3IptIA z(^?XRUMF=j$T!-g5Rc$XqaUaks6hApdY)GD(`@F;BV@c$9Sm2EH{plyR)Watq13%N z6zzaekNk2!+T{Qwc*l4o{MvzaMb@XZ2z~S!b)7k(;DqK`68sga$NWpT)>ga#t;8?B ztY|}dO^CHBs9BUL0ZYv7F?Xa__Tp0<%qKba0;AiorVn|T1J`vp6&(deA1W-1aLVqk z*|rC;!@fCQP!Xenibn>6(y1C8U)#f{tw&{p!q~E<@nI02o|&Q6OeY9~*Ggl5gId2; z7Pb~On**$Ju1ctG#MH@4(Wo$Wbbt~@?-AFImRW22V1J8UZ86_ihSUCShVLyDw`pOg zYD>TsJ9*8L84^+aDat}*)yejDD0u2xYyT3DUR-)CC3J$6ZG^zH!Y#@V=cNbHh89da zinZntb#*AkX&FiMgjcthy}s!5VfVSVFe~53IQ1aQn?VpOy9j)gbP!g|v z@Hb^=z925yTk2v$Q%KF?;+c>~&l;mi=6>cmp&)khuNdZRSv2Wf%>hu74Xb47N!2>{ zqIw$W*jyhjeBY)OrwF%F{helxBk!(CT*SmPE4lH+H^EGR;R~~*qA8UY%MBQ_pG}mE z;XvuqxaTt6y zO?@&)#h}G2qAN1tQ@o&CFzLBdZeW5+73TUtDeUrDw-!_aZqoF-LSgQ9cq+0XFndw` zd3kGS&eyt-_K-#L@m#yiOkJl@bgxzyNc>E-sK)@}ni40!Nps%V=C&hwzLcFWTTJ@4 zL3D9gpMjdt^ce)|TQ7`8cfpTkr_`u)jNu3v%i^No8#Xl^crq^C1IYu(Xtg7WOwU`_ z&bz!LkKuhCvvUv1Qdzh9S+_9vMMCKf51IIPyvOXcfwLiGV{vSOWq}4i4ZmffHhXB1 z8hN5c+StMf^nUB{1esU5w8|4$;oLa`D=l=oSUkXVP35Uzf{E+?fIVDTg@RxQ<77~8 zyo2M>R%U&=@Kn8Z7MvdENv`oJ$|K4CQ*$-DW1Z<)P3K~UB+6*-GBaK3B{qdpC3v0J}SDbqC~3i zh=AoNOa!kX=1uX5R6LRgRWgThN^0X+$9xNSw!kUvyT`=XLbIQ0s$x&iK9-cf)9HGF z%_I6{I-9;u5Y3m`rH?bQsyX#%v((&v;cl`sWj|8W*g$K!K4h7N5t?g0^t-y0sxoPE z6iW#P3Z&gNl1CB`g0ZP9)R+$&>z~GmFnyf$Rl(TG1!ewXuXSY-1>?akGNSdVpLYNI zZn;NQ_-X~>ICzzmO;mslvQNcC2Sdeh3c=}8OlIx0i{)x&P`>X>AJV(uNBbZ_ge`mgp;g&ymmu7 z7NS$L6t}rmC=jo`6YJw?3ShFr^2)~{m>!MC5Qt)m9f&4(UOptvl-wB^3644$##qOB zJ9q@&!)t_ad*Cy6Q$t8Tp<)VU=w(g%rp$rpScjoi{B!TK3!_jno9y1VYds#qKu_{9 zp`AAQZgRwHr4yPc>1Ah0WylvMws?GAx@Pkw>rHX6sF~E8_|gm1i#{ZRy)`A^JZo}j zOWCxH$kt?9FoPqcgGZUJEr(xAvEuo-5_ngx-<`gMe|p+%Z;POH$x_+lkc6Wma`pE%5Vjn+jk5wnl%XPeNG z-gXTE$nmla@=?#@rl+#eROF7>p*wOw{F>GEZsAhc*^Cl!q6j=! z!@aq_9+V<2%1#1O4{yf-#GQ+L9u-Uq);V9P2;uGh0>CR}`My_)tKn9S*5kW`e1j1b zoD)_@>9j<&8n(Z>fU<6mEjgTcD;6(TKdH2@#?I$Vxl&TzNmR{})|i?7O&}_99q#6~ z3gcKNR_8Q>&$h!n-;^pMj0WT8fOXBa+KzS;m278HgHP{mH!o?gnXL5)meoal{ZhVRKEXIS{$#Gobc0$@*sdGl6H-@mI^Mt^F z&*#?9!0tWcf?T!i?p%^{VbI{2G5%asS55JAj}^)K<29n5){-EDPUUu9&1US}zS6K& zL6AQ-TUu0kxz{cKO8{SX5aE&w-c6$u1jaWXbu&h55C%)&_UuJ-xIF7DEbiA7uItVv zn4x@Ic;5;gj(d#w;d?${vMyzbX8VBBB<3+y$&|P~5#y39>q*GV&?8aOY~&V^p)V1S zlask>XRXftHY63yMP%kMqm|vR&?n5`M$4NywCs1DSjaG$yL2`&kX6*5Z|I^WO=OWR za+xP-?!Ia~^^NIu&G^P!khD6H-Bo1m3pDj~$V1{EJ(FY4!L-N3Ca;{jn`EYR>C{FV zDI^CdV(XMhY=ByBA&;6W4l5gqAp7a^ZNNmcZO(6wDyqoEW0@SB6*F*?={9(1y>Rp= zCUjjbe^!(*+VDnFij|5G6k^$wnszLr`YtCc;84co+j`IVt*-`mxTZ0OW|?WAc<)U2 zYYq*Qw^;F;61S-~b3>mRXsOU+>IoTVa$b*Fdldt>9%2`9#3fHNr*iiM>0!^7sodsr zH5Hkv%u_-9Lkm5r6*w#wFfxO*Q38H8MQ)}9>MaQ8gM_!MZfI>KmZkk1jPK!Lum)+Z z5N%$+z9Y_QMjgJ>*AKM2G(_nJItRD2__9JjLsVor=J@0aqL|kLRW4mIBQbth`N)u^ zD|n}mQgLD5^88ZHQ;pUT0@>E-Z`0H|x;k%f^{Llzd44V1+J_ z;=ycb$;c2@_TgdnXU`0qq|V9VUVy#i$dB`+W=Q_1?3VdkIvAY&wqmn@Dmo2oBTzSi@`6g-w9Kc|X36JeI$c&IHif4;LXldfx7ZC=4j}6`sy=;L2vgZHts$LWYp?qi&P0> z#FZvT<~fn|*EH}U{EYVIP_4#hpg=xqI($J*Z1;Te(|?J*OnBg#U=V|BQ_5~L%$Ylo zCaAQrep~T8Fc9{0hHZlnq%7s)!LpgTWkkmWNxo@f`iAuUEwoU1kQrgTmsh~gK3nW` zMN0y3@szU?td%}s1_hf{OhV9&cs9GuWiQS(#;8Z|JbQ#C^%ha~+r}6W?A}TRGP|A( zt)ylhPNk#^$eAWgCeU~nex^)g`t@mre2U{*5t4m*fj(}Y{#*$cM~WVz|EY!aCFj(6 z6}-~v4E3I4ND#iVjmku+qHU{J7ZUhYH%)%%jI#nog?OBNCV?y&xb+0dfEhY7k86pt zi`alP!k`EPLfA%rrl2un^oO;f+&E8`JT9$5wi=lMJU7>?BA6l3we!+1j5j-T@PrqH zt;3s*tE@CH(E5TG#dVYq+r+PRL2thl&KX&};E%{&pMmP6`D=uH;t1cu-h9;DzVGT{ zois^7$=-7h5v1G*y^u|yu^tDkPn?j*8$NqB-?1wOlPE5E4Evyjr^B*yT=D!$usUVC z6GdUI9cG~7pwopG>bxT*zsvywt)2jJgZwm4Z_xfxNP!pL{#J^hW7v6LZSNa;yKbXb zmB9K*$J@p3ogyMZ<*(P10^jMUS9?NjCjx`kW%?c8AGgY75Sp?=nUE*Y7 z7YG_$DOUXJbilD#wTp|c1#U(R2W2za=OBI*pVwoijosUnXz2J!o&7!zw>Y87>n$CI zlbt)_K$%@If+1;>-zEhYmRx17nrziVI*oy>Z#xbx!m;LF?Z7{B*Xq!ed;}%`sgZbPFt8g=wu+OB}>E|UYbhq!pG;V-eUTSK(PhC$ z5?0%_<(Dtd>EXE!b#;lkuU`SLmsBU1%@w)DQ_9k~2n2{ValE+24ESVR+t`;nwG)YM zuDGC@ELWV7W{_ep21!3g!Og{y@)7o?WmU{fJH;;26m&tOE3vgInT3Wjs9q(3R1?Km zNJgbm8ceE9(ZNsh3f;y=RZ99Q9;skuIL^elg-p~@%PJX0gVmLOYO#d=CdK*+iTFAv??-h5bK~V8@<0I^qf&xft8WTMGeVpRg-fEDAyd;Gn z*s@bRQ4JFU$ZfD}Pb2#HVIZf)s5-i?jjw|6jghnPxjeX1E9e1D?&*>U_ZzyM>;<#A z%d%O5+skEUk*q9y%q`LNe8|lWjMOy7iCs@K5jH8B~JZB+CH2#G{^vK!p zk{f+G7Kof8Nr2U7#fp=5u>k|5)qN@V8_tdS+}N^Vnf6p-Ewl?hOSLixmgi?NS$xD| zIon}A##a-!!))*avA(E5p*Go#Vm0YST@g9GMicb~gx3=Tig1SZL!NXbM6~l1^Mn@m z!j%7yu5${~ElL_}*|u%lzGd6CZQHhO+rDMnHg4IsG}YbzOwV*oKYb_Si#SiYckZ1h zbFK8QPOHhnLXVrSx1Omnq0US6#?6t%@B4v0<~*?9oM{$4@g0oNAYY9qF2JPKv3wMc zII^F+2SY8|celK0;>{^+2yj=uA){)0=;f}6rp{}%yKL*fMiV$n0y#O*F{R{~ODPL` zirq12h3^8_HO(?UI)V3nkbw*2Wb6N)BEMI1WyC!;kf}D%T&2zbp?D4cZGH(^9jv4c z0Y-uKY9bLS364E1I-z{~$>0-V6z2F1Z*fal%elDmTyRq#?L4(F??K7HMzThd3 zw9kot4@T|LQxa|gKMSS&W#>$IKn3qHsr@`L`#3;}dIOHkCJrYJw|0iA85{Tk+8F9b zhr}8UaBT-uB04DAz>Kq`jYg00m&z`NeZdh5M25eHuzc@5(n^wm}*B0XFK2%XUk*hT%`yTSUDWhZwhN(-f%$ zpo)Lt4=T1xIe*o=qs&qvw^P;r=n6Y2bSps|EgQ}#;7{+Zv49HQ76G2gOH9c{3#RjL z%MMk)_oC$6I5ioy5j9AnPD_1LRDb1yUS*(WRX z}PM%-pgi1A1jw+dccTisY~>)ESEH`C8?*L3Nm$j@824)^3?s5r~$H*PHK` zPDbL)^QjJR{8(nCLgn1N(bKGZt<+&Bp4~q_?JlE%-jhLvP%mC?F)aMrC5yYH8sYgU z;nJ$`K4zOFD%d2!T7rS(fN-SZz~(0tWM1;^S?3%vPWT&E!T-dt<%L;$T|zf{J_Jx$pCN`6?zLj98^yuHl6_(J+Yewo z!v*al3h{QpIdvNY{0zYXpyjZg6~y!)bVku*pe&b^N10+}K0aGUc&oWYiIC{a4AO9$ zr1ou)SCe@zet)ExX-F;bEg(_V1y1MzWTfc;Cd8=plHJ>SI zTn2ex;&({d-mes+rm5JbPLe9p>#ZQOkeNfnxZ8gDhQEJ_AJC_ILQ4QYR z1F?@I%4Yv{%eoRty>aL2E}I^okC45xe& zLkdeE_RUZ&HC%#>z$wa@w)|~OT4QZ z?T*QE$N|0y^amLSY&7brO9l0W%Cxei9yv4?mKx%f>eF}0u8INPvmAi zoNea)xoCf5(b6uv$JHn>Cqr7CJXs9hLPyinL_!=Tw=(3mKh#kcHPU9MqJ!UqDsG0%OfpI$ zOVeqI9UGy->^Js(l#B2yV~GQViHa*j6n6;hrq{dy)SGPa znISi_lrI+lJfbw5RF=}IYL^AYHD(|2MFCXKG6WFzU7@h>Kg!p~ss^IW5|=w;dV?v( z3{uX;o^VKEI{wrI^00XJTlQPOf^brigINolUqKmf?6hHZu-KiA^mRLB6fKJxb#GSfm{sP|^&n-2?{QqwlbfF+ z=4sA;;z?m+Uw(A~%Cb3O_uW}x7_c9%B1reZJkwJn!rDwY&#r|O+I7fP+II9v;>O(G zH!>s7d~>_P#=KsRv*h?RCe_hNWc(X4$`nk4J|R=3B30-}IudV1TH8d1Su6!CLXBWF zw#Ea z$BlQ`eC@e2zzr^rv*(K#&9d1dNq?@|=rC1Ftw3kuGNt~7q8(ZUD?UG{$tKR5BN_s% zz)n&U&y8I(U74-pYylR?v2uU}0nQ*^A)igxWZl})%GT9!SC-_PtzE4XrnZj-7H&71 z#IZqWPwyIx~D|RD0${-bn{Q@S<5sC|Mzf36|QH1Yw1J z2gk}<;8=v+!nk9dZDi-FAPYNgmU`@q8OOav)N?vgJa^A@mYl2~FEvkH2pSR@u_EpiHO=>AN4NAi;IJ8&N#DD{pQ zmMQx^!K+^~xKi43BNI+d6dE@ZgEE=M?JW;GEQ*WhV{)RaBs*3+Hdoy`DOcK=S8Ws!N$*pGDzn}Fl=lvdK->JWE_9eO&eZKd- z(#%R=BcPwUnv7!kj&%|=nkD?Ol=(coe&(d`$Wa>Hw zotl-zl$GY=rv>_2m-^NO_pV`{jG8D#JIH@xmSWWWrQrfg;U89E*;dwTo7cAU%EH?T zvh}v_qoeqI3+wCTiw<(axBKHy|Msp~=|$~EvZO6o5!%c(y?)#bayg8*Yx7GJW7R*) zgq5jzlD#eyl4vJ0M=L*gJb2shef!Go`#+|2N!0zC{(;>H{24d@FYFgb6DK=s*Pm%G z#s7IwcKI3l`e)0tjpDA|1_Od`-O#uF1hO+rKA%k$6>X7plQ6+isi_SXx>RbbD|Dn-=vXsv!H#z?1#!UE~ijw|wvbjj3k^8y;rQ zE2GF!U7!Z4S%5ltC_EeArUbrkj_XHII{NF(Q(!5za7Siu?E-7X5Lyy>2`)pJOMT*p zp6bJGeiycX__8uOAw}Gz|I_*Em)6|UZSqo(NLxT?etQ7&)462}cgk^El-H=#J z^}hF*h-JWdi?WcA5|tS%+V(R3whQd;E+m+29B+!~Ez`(y37$IY{N?kPA#Bu`O^WQn zIeWkvtmyoRE~jh}GOt&V$s-W|{HPXwEXat^n{mZVzi0s-HEJ7#d59QUs)zX)=ERZG zNw#c4^T(~TldpmU1+&rO{N;(OMFS*9x|;ZNo%$AmRt%Z}jeB2Ly6ZozQCDkFI>7vm zowbMxeLFkfp&zz^^nr$rxJu&FGL-gOMqmgH*qAJ-9IqD0JiW2wVe`uM)rdiFbS#Ph z4%Ht$Ix!=W%zipS%M5~x3<6ZHdrOM{BZDjdOYYOt)2jF77mEqfcB2d*q654N7uoyh zNwg)IBJQN%8h1vkI6Y{$RMz8VwZ#LCW%fQh>6hWCNe#_&8v1yBOrzJ_1>EX_JplfF z#A|6XSf(_*>i6%#5+8LVF7Uw_Pr9E741)fzUHAWvLIX1sYZ(J0a|>G&I!Ege)la(( z4vepkl)wDl6Ogn4f*aR-&5e{t;h#+&gwwi zzImsg_m^??zOGO%XG%~nZup%&zUYRj^7=Rwc%7FVu;_AX4 z6R#i*6gh}7Ru(q<>*v|RyJNwD@kuM0Q;Dh*fAzMmqTDZ=xN(JP=p9~FuZRSep6X*J?gCy@I|Xd6%^p}_(BUm1q-{NN`$ zbfOrr!jKY5R>#BLk596t;Dn;)cb`mVVtgt-kL+|i=>|SN%LN7iwUMF2cdG7F#GD=|G)#H1= zZLkKVlT_q%(u%Ubd|pFWg0RQSX{<8mgd4qQytK9H0<g`v%|3s-Z_ zHR5h!-UrLQ5|w{lU*;_0crmfJlW6L_*iDPwfPiZb{xNuIv%jj2hJgf`DceSo_^r(&w z_m~Nvxs1sx!8yo4*N`lF_i_~s{Lg_PtP7yk6)f8mSsWu$r4o2#iP6`sH;I1Kj6Y%| zgh-#vVizm91X}gOwo%Sr>DM-{>NZWtMIk`#I6 z75F#;<>83TWi!q!q>cHeS!-n+nR>9@pAJ#~z*Ua6 zdpa;!GI#5zL&IURpjG&Y$p(*D*Kg*Nn+Ec+l>q4SfUw;$2IYxQHPhY5j>40wGX|sf1Ac z-(&C`w{>)JD6c?U_E?~N$aG;Db!3@xvcp7JZJaT;f{Y+$;RySd%3vU+js+Id$&s;| zHS5- zD3@*9{{aufp}i{3gz_jaG2J#-k^ z1+}d*O)QIt+C`(!v=cQS5OM%SY|pkYw5j;}l~G~g^z?6~1oU5@=sOfS3~3Vl95%<@ z*b_~Z`^${)v$ookwr7*NJ3_DA<~4AiwMBU%oq#$eAK!HiQur~zE&qa18j$Z@xDeT1 z^hJj*jEX?e_)R$9vllOWO&V5rMj9wX7^|YYDhF4VQwL9}z8L~kgNr>qeif$ub9uQ{ z_8jM-!1OZob?X)Eq)K5sv{ zMZ1^x=`L<+zY^S?4sr@uhdCZ8YK0jz4)7KZ6A%z0pxqx-|Zi z_ShG;GKc7w`kZdN)__Y`^Ek{I8G%tAOf|IVVz_7Ut1 zUgNPk*i{XETfg33!h1b;Wue$zPH<1UHpI9x3ucmJDsGN`La!Pge+!lfrM3WCPEI1i zgiE5>!{-AyUbD@Oi|WD1faDA6=IIgsO~G*~Bna%qFrIM`wWQ@umy1ta%rR^mJkk7j zNQ?$jnQ(-liU`!&;w)M+1$^FU=RVOwc?n0KQ5eug4!By*uv_5!%)uH*yA&G%^!c04BTvhP}1rm*Tytu7&w4F+(d@sRC(c0ykvLX^yon_hs4K zi>1933>5%O3PYOQQJ*-_lcxT<(EhRoQ>2>xJ)RUeRynItmBv~5uHSTtF(E_R)=QW@eu zUeg@CWI13jlw*ew%zR-N^#v8#n~e1!58|}V;8Bnz-M~KCRYICTk)ROX243~iP}Oe| zNXOnJ&zI_k$54JYm?WZu7|O|T!uW^g$;9ktIv3Kbi-Yi?=acu+#D;P|BYrxDz3V-s zHDt98)MWkl`Ck)xV$}#mbW+h&76HcY8x#w*w&jEja+IlYCe_P=8vem^ixW7j0__Bv z0h!{VN6ie^8b-B|Og+?Srfy3<7RS!yR&@4Q)}0Y74XkBHVF|LZit*Y4-nwdtexWF= zA&-!}f|v}<1`S#Le#)r2%00t~H}5!j9q(opT2#gT`Hku*Szqf{_r6=iPMbk=Z-iP@ zdq&kHqYqyoy;}!+d6Q}p!J@2j58KvZu^h8sA#|j@FS5SV7<#|SvT(5&Hu^Wzn7(r@mW$6QPA7lHIG!5 zMk_|1zk^yJpUBWiwn7Te=N*BydiDYa;-BZRf2F9dM_Wbe}2D# zI;rBrd}4!DIej4KnS~sqiux7^f9^g`wqao7x+RtbFxg3ddzd*l3z47;v2zFQKiy%0 zfpMbKeM4u@_@#2_y4ID4^)}bB-N4WK{fGKOjj>y9^C$1Z6aH7$qmzlFg@Lt&=RY7L z|L5_axp!J)J86>z!LP4R=xE#`#tE2y&dLtL?L7Y6hartxI&5FCAw_tpHJU~bEB$?U zJ;hk;OA;p^*dyG?i{GznLx(a6oqlEv+L5#|=jbwqNSA|5FDK9Mfz;bas?>sHJ(fn8 zDAylO!p?ncAzP$aoR}30v{ok*P0hm)2A-ND=MgJBz7@EXjM%9uki3HNCOEqYm69nwfKw;{gH9iq16GO!j?BZ{0%={DS0nv7?({6t?r*w8T7 zUL?hL=^h$YW(MFSjsn~t?x*M(#8ZNw@it6S#`qmH9@WHLFDowMTO`M_N0h!h>x}1r z7PY}W%vZ5jE1oLdi^^0``PC`U1LvFnc9Lq!Yll`HbqYo_ch{~Jji$hNefy_V9H7R_ z#@E-uKOWFGa$MiE7F;4vz4Bfkikc{I{|0bqm52uEl~t-pNdqlXk~P^PwgT1hN!^$c zTL8Y?G+vQB-Go!N(GqbXePUp#egtzV6FHhnL$*m$LNrOK**w&MOA)bwrya>*N=Ukw zyPaRwZU9%5C)*Mawun2EHFPQ1Dg7@K(`b1CTOpd$e_PDB@ zXseSk_jIU^(Io%s(y5<{tc z3c3m?iC?Zk&;*HJVWb9;7AgdF)vv7L=B%*^z%+A8;i>#A0qi+wWwUt$q_HWk0Uj0W z=_C3+!=hH79$CP*2dqZrHeRP5;SI6Qnaf~#c0D6;Iti6`;_lha``bHREEqs+LTqCm zmANuU)@Axr*rvWV-;f4>g%X1>G_8&g0+1Ca8H2!P_~=!Lz}?WTAO4D*aOr&mEQPOq zD$J!uark0q2xgw)$R;Q3AftB)231oY2+|mJ1tw51<*02R?^n2tYpi(wQL7-Esx~` zheb$?S?oF)I>aDx_=R-oSZ?N1hl=ZtR4ci_-*ahLI z?oV@1roeac%j`fn02{g+q$u4vJ2a-U5Rg$0)}T(3k)vTRO#qg3F-}$o?Kar$rN0{V zv{TY{%=uvsc&v@1M`%mF1;}CmL(R(yY{#ynBH0diAtoVB4`R!I5AmMpWrKM72_+Mc zji38_g8Qp=9Peimucd#RiEGY%9SdJozkA(o0=FUq^c5|=PitPTP7TM^6xzN@Km9u5 zjq&c8T8&hZ3lU?Z=vI%ndd%rnE&tJGgf*X;*8Bl3AtwJ+CC0`-hl_8Bp6KOx%Kd zs?%)kI^;_{zr;T1m>uPkx9M{@vxGn$o#CDB<@@JL+C(eHhSN{Ujk_7;I zeYc-$9hiY0C6iV@8qCr zdF{^0n-?S_iONr{otIT?Y@&!t2r6g;)(T*ij-y|<(G~&od)%4XjZ1hTRKg~wjE?+$2rZsFX&ATFOvJvJ1v zPI&anS*^0{zV013&6Of?9KgA|Sfg8l12}7}>w0Xgz)kdCzj=s;-$TS~BGAP*Fb9Sy zO;=vbQgOo@S;GGw{bQI}w&YV#{iiK}(=d#gyeDS3HCijoz+JJdYCeTR(Fw}cS0Cxk zJ4wCglpx41D^@qKl-{$pLgyMI?U}{4iH`rOW8dBx?9a&EV#1oMNnc=Zzs<`izhKUM zT^&^F9iBYc)!KO%rMoxb)-xT5*wd}F?I)x&O@dt*Y2)egHzVVvHxEI>_hA7xJXNUd zmVnnqyNyQd%*K-n=!N&8*kr*6_-13SmLK0a`#N}#&Bj&?B6f_UW z?eUtSK;_Hp#b3AqEBLjuV?ASin4i;O55(4Fog()N7UqC3dUl4a%sE4d04W4kxG^4l zP~$9cl!Ap!J^}Zc8$900?WyVks(5B}Y#w;~I1_ia^9}h+qGc>?j z%p=xch9ULCPojxpI+Y%RU}b-Am7P7%h_#s|bo-~lE^%j}?T4R)ZWGtTcuc-qce!Cy zqc2xEHOpQ8jJ|*6jX5nR@dHKGJ^BYfNykAX)D5;iQFz!|Owma-NV%WsU;HF$+x9Wh zd=pE{(zyi^ZR}yU6wZl1B1D{i(D_U00PNue(b}&_!UimjB=Gwun-{*o@B^N>H0=94 zfy-F7YLi{|$hE1D&!~aNXuV;RU5EsvqAs@;$}525M{F|E8)rmA#E=z#NY0Ura_`ft zBdQHCg~;H`K{x}ER0dkEepDwB|5BY?m3@1!3-$hDq4Hd6R^0>SI$+8Y%rKf^D^4SB;% zvjN93(vVe;tMAD7+aKM`$9~|<_}k;*u`?4dzMif&&6xV+VuX9y2SFhxVQ*ljWCPs$ z;PV`+i}`IMgH<0t_v{S+zpF<7@a_PdlRoGC#N-#me+8ThyV%(O58yOLMdu%YQy=*F zZ~IenPWzG{z-gkXQxXbt=K8N>x*x!)j)t|lA~9vvM*MfLl%yjGw}cc18tIOg`^+an zQBu+&F-L%u3R`9!G_)XB$V*une_v6bo<1FO3@nLdQR~bc_UoM1~}1{BCws-d}`wAfM!YQ{KH z-bw29fd>c$?)*$w8)3?I#k`ORPn_L8}BU^sQo=O;e0?t!z8AXZy>yR-}tY$J-k+ z>!`S)bY_zt;bAGLKZobrgJA&TduCBZt(DlgcyF(_A52$*h z(x5%xK^}&Iy<^eUm<6D4Y=UDU}Q-oN^PnI%?Oj3~E}- zNh1Q0o>2!KBVtM9EzDAo!NEslETQ!_ z(L!@X-BE&3$cyPyOO0YEy_uroedHQ~`MK(vxNq~#?ldoJp5GdLoldvV^CR7t_K2d( zCBuC`dwk*I7Y(g&Hz%>zz*^3Le5d_M+FqiL?ack$mcO6DM8SH1PpYk*9P#SPtbHSn z`?Aa~9~XJ&iCYVqy%V*l_%m!)&D^XL<|hNCx9Muv2fcVhCDi&F|$=XHho*>oy0o zD8`KoWp9*j81xL~qzd^iQO?y6)1MCvze-;%Q??5#sYsUY3v1cV1@!R9?N!JZd$Kq= z|F>cV$^$vzf2>x#>&v#5LRumI+y|&Nrk}kkYghdyovdQGcOuil9 zS(c&jX~*6`g!51?zelc)95}D8oO3HM55bMgg=u>nHeDt~P(cc|%l>s?e*zrz4=yGW zso|zOZ)cl_ftj8sx#ke>?;>BAGDQ$El3YAf78=6isX_M^u8cppv~_{X6)`K<5D<>R zbx8xS#1=AMaZRl~3Q#feo>+=3*U7u`@9{~({N3;WcVUy#CtQ>NB%IWr#r4~(eJE#rBBqPKaSUfXy!5IUN6NnO@4&Etjtowv2UqAjBh~{EY6?R zy<^@;5(SBovMFl*$bpGjW#QJ0zVdlE{`Y6`dI${?ND|0`-EvAE7#}F$9Zs(<{|3V= zdg9WwJ@5dkVx3Z*?D{dAr+WVdns@w>5FBrV#K#;8y=q&P1wuifUO{vggM z;m+K~W@#<2V|?QvsciNovV{ex!quF15u>ku8>}|I)NE6~jR=HvkEchOeig@rG^5B~ zUPCHj_p^fmjprF@hqcdXbmj43y6%bg>l%GI%(04Xdn(p4VsXj4@7fmOM-5zg^gs^R zuvaQ-Cy>}t)AuAvB&Hk%cDeehrqnY-^l?rd7E?Iw4lIQdyGm4z^p}kA|EIw%IafnM z=qKRjegf{l@FD*r;QsS$bMph%L-?H0b2w1aEb%W?ky^pWvSNZ|C03G9J1znSZvbLz zlA+A}+jj$FZP|JUX!L%)?R5k9{jn9`)Sl#0=I+42ATg_t2J!&yNlSo|F$jmGF>V&w zRA7}`IF~L+%%rRj2VpR+s5z2F=`B{$fx_?rl)eVw*6lc3OCwB4kO8&KIKN94ITc0&Say_>^wHma-9~J6+MqH{> z<;Tt|EjYQ=EL{*X-p&?zgY9&%ZWmenVFS=H0@7{(q;N1;#t@NM0qPOis>c?mxz!n=VW$)5E<~6HOsZq_ zk95MKz)~$hal}j|7@04s;$YKN8KG}#AgTLbyj@G1Brq`f8go z?P~k&dDXP3YeS04Nq^RK0SaTB#ruS|keFan6|j@nc~l~pV8LMk%#X{h(Q+_56(6t? zn@pxrk0dG4YN4E+a`=6VLAwt|chcm6&czr2Q|=Apbiam5dCm4IBk+4Xiz! zES&yXmKnmx`wp7ea`9^-ycN z)@;Z^oDJvb%uLat;NU)a*@LWBmb1>Dpur{n;^5_cbCzzKMVQDK!7@y#N&`FVf8i0d zum>y2E&$^egMdz*D~fTyg>KoG9IWsI0*BR~3k3@^s}-%09MgbR)S!S>M6@M12KtAC zfW{s&Vgh;~LmGj~_VhZxHTot}dV|Q)5_2EZ1m!WsmMCDI_0DvS{bGU&&RiPx`!A=YqO$s=}XS5(Hx6ZYfAw8z;V zVekyToWCW*5-TF~*%su;?td|NKLkv4a7O4w>zo%5_AsKFNrf8;$3?2pOk@Hj<2i<` zNYWT*3YPB4PbGnZTH<7v@q$&z?%)bM-D0|YZY}O83iOTpqsvkbAI+K1lE{I-nH;RNOsb3UAZrKP~_z1S2YUZB zS`&;iO-SXbC*g66d5EIg^SsUNgwU;Bvd58zmK* z&cQ7mvY?W?6AvWjiaVXUAaPhMKRq1kLxUY-O{}&GcT?$85n)LwC=Yot_b^PFI;U4(%$z;ouJNB z@D?Yy&MQB@>TAtUBd_(}CdqY1!tG+2Q&kD>6BFgm7AXb+pX~rxTA05h59r8 zU#zZx_DcYKTdE5ZLWu&;Qn+%`uVc0Z{Fp4kemBTKiu?6kEMC8eBy%8afwCAM-$ zmjWat1!RC|0Hjqd-|smYKKgm?bBPDVBb*Kt`fjeKuCBW8p6=gYJ$t_OS#-5cuOEdF zo=H!8^Y*Xqz+Z1h43ec9&bT-w506L_2J=DgNTePtu&LuL6M+XrP@+-xK;MUYe+Tr6 zXyO)0+*vTdcxmR2^Exl(5QkhkVNUj4;)_Ml|pBpZ`+859nU! z|N1(P8y(1+zg#W)efW8&aC`4k|9)?8_wD|DO!)D{gMs_S`#Slxugf3qZ!{Tepo6iSwyiQ&mn~=`9_pEWOfH4E;W<3?}3tCmZp#jATmHSapDl$lL;KC1i~D3 z>VSy$I8g|V?dcA`_JOw#Pz1iZ@69%rbn7GBM^vsYyuRr50?@)j9q1RFwof1n{!Twi z0s`<-@pc7!i+)*NmnOo4?TvqePCpoOILW6B5w)apP9VD@rsSvcvWcHM&*a$69|<+O zX%r9lk|vQUovMLGI;@{;vj5bi!Q}y8{X8&R_;B`nPbFMiO0tzxy{j$zms<}B+KC)B zlxW(VCm+TD$K>d1m*D=kGg}lHglD$&rWzO3V70lX4Sf#H>1~yWRH0;*Yi25NfVu#D zi$K__dC0T@S4?IqZ)R{4gOxTGiv|c*3n*ZrBsElQBn{N5jb;JV^oF=U@7o>+PaTR4 z786S?z{L@7bXwTdf=z(r)&RayslU8Lp!o^lFVH^{&M{E%F6)@#Hl87VIcEFbc&2jo zhO%Q|a=F&vVxx7VMd;XKqeZKu1IbZ4K6jqp?P7oP`pU4p=-vFh8ToilT;kL3aq9%= zAwRCq^dGriF^;;?_fE4VPzZ^36xZ+I`D5YTMxplJkEP%;)Jmb~MeB60@V_$BZWM}M zXaD-VMXYXkQn-hHe^P=!G@yc^YdE)VAB(y;$Y_fNd7}2k;rDiay`K#Wx`loET^h9i zQZ>yr)xSe}%|9N_;Yl?`SYez--c&_ z!IRRRW(pjK!D(ZXB)CM1f)ZpwL^Mc-rC!Jk0uA;kK_C&VjPF<^Z<{PNz%PMP%@N*z z-bipzpR&?^>^~xOKsyKAfY&@RDDKJW5f?z@GtKDO1%O0YC{>6f!~h6+rY;*v3L;Nu zD79)B#qJTj=gP5Erit1DLM>0!J!uGV;)_t%0Kca9JtW`1%ry+5bwqV#KRRrFr#MKH zHOO~L2);(8&l0s0h?$%2=0UDzIr@IAqw%E1c`9z~cR zB&OvFwhMQgG+2YO=gr{2DYK{nV&Ifo3QB*sq~ZV0pJL(&lTy?tyxNFjr+q+x0ug+5 zM}p1?Zm;csxU{&aPc*@U*uq9~X+SeP_9FNOSYEk37TuhY?aY z`J&=e0^sk!#t7EO1MUZK?&?NT%hS!2L9lf0z?}O86GNH-nvx~3@59p6(nYcam(~n2 z`=#XL_AV*l_3Sy(U_MhO)q!9x(TTEv8;SuBv*#CP9h4@Cj3GqmR{XEWL%(34y4-(Ox%+u$d5@cv0JPl0GtMaf^oidlf|0i$sPx)_{_If* zNwNk^9sMlGu`{>cdxM4pldkW$zv@d zN{ib$aVT4RYb2~|fPxZLOq=ur244wqk1tzCP9+-}VpawyrrZufAH68M8E{vJJ zYGv6B9s$^rSysnFvo9WuQ7vo<)D+ApOkeC3VI!5>qs`oxR~us`GzJ5ySrzsu?6dlW ziugWbqBr^P_TMJbM#Jz;DV|NyIQYLXNTQ^s#6;0sZrafiNm>vq84HD&ohXnSV1X;c z&Jp$j3DiS~Xf6=IMmKSHkBCZ!H#5oOFE10!xD8k3QU+8`Q%=Sux2dKqAP9ztL0QcN z>c#qySOmm4Dm)HIaI@W>DY;=IYYD1*(zkTdJlUr`2ouEk*u zww|b62!0QMT~!;ZJIiorZ%+&~VOUfc2?eBHX@+-5AdV-w(Qwn9=(WrQxdEeeon#&}&-aIM-0cPI*sIWIJ@~WML>cb1va@Bj)y$fB z2m;Bmv%!|58EK5y=$`={qH*KOzeS~;6ef;-M~-bJ9f86R(S#7z77rg91ECxvvfm2@ z&%a+`CvegE%?+n_oI;1!wlkYkpzXUdFIy%?kk;d(zbzQB7daFqQCY(ESW~~Z9hSo2 zv)a1vjr?v#K&d~T$U`w`Y-gB+#qKX_ojQw2H5GPZBGN3ZAChGr+^SUB)E%&i<4}u} zgR0=YD{=qh|LBo@98yX-<++1LqKH8;0wAGSH+cpsH!*}0?4kj3YJuo1xF>Tuhybu^ zF8_YW5VA9h^F;e3!W**YIaps_I+ZLqa8G7v_A+0ipbZsDhkVvm&F$T^L1!7$YEU#A z?>g*;)e^F(j>nAi4z{@hg(@unGB-1lsO<QiTbL_xQGGJWTlx)WL`QNGKYe*HA zrG_9}-ZojUm4dk4akcwdX(AZHPaLBjcux@kH@}6?Nzhu7>Z+R|`#fPn%T$7DSX3qi zo`NScTFaMsf0?|;wFvlu^q9ig<=t-(YsOkY;0~$bQeTsGlI_0V#Zoq|k zg*Et0Ioa|q0ODrgJru&0ObL$b4BsQCNgn%{IU>sJD2dZJXGlRZyu=!5m4)#zED;H* z*jQUPE2xBr9to#VunUw(@UQ2l>n~igJiX~thjOJedv&}p^*6kPP4k0Td!U;}B|T8Y zL8*bJB3CoO%bze;2Ptm;7N;<9lDTsLY%s;6b3?LQscETsh~a9LJo9b~2*qsj0*3V) zO<-SW0%S}*gF2JC<2FA@7*}B~Sm2dIr(-A93-oCt(kZvFmFZq9rj+7Z^DdgVl1I4P z=Zf)kmD|L%6;)QBN0*^&NA`9(m&o8lq;`5gnMmsGnKIsnL-E;?UupA)+1DdQT!|*} zQ{W`0)LSkNo5Te=;V`PsztLn-GclcDy`(NfMAzVoxX+f!e(c4TKIu|U`{^dmiwy)wS82{uEIVIP$K)!HI!)otFg0;>niC2Fx}lLAq`T3 zv~+h$N_VFqAR#T?jdXX1ba$tOAOa#GAs`6vWp{POMSRDf{_h9xGiT16nK={pOjNZj zUZBGR*@So(`#ov)IX}hPflN$j>#o*sN(<1{tL__GAV}=w_ZAF%gAuG*3wJPc>O}hD z-?WN*)DwvfxR?&JQA(Mz_-5oj5LN5$L{?ccq^_fW4x}=t$cr;uUWcMd($w8Yqr(tSfKUt zJ0%!i-kTfwUQT(nDqwJ_+BI->ZM`4Auz=K)r>)NwrW|B9e7W~}DBx>_rYmxz2WImd zju$B(-sSMP71CO#D2Jx2Y^QLM9bvDMq+o%-wSDRHZE!3;l#4?xf1@uwUAW)X98p87 zOruy{Uo~E8H?M?0F}}pcH>o~8c28FEj7_Lg)6;^^Xp?23mIRA?%KE@@m$%FKSgRRx zAbC<)&7}9ax>}DD>Kt1MIew>0bNs?*ClC6WkY~}dGRM+|T?yWz?DJ<%Y}M{FRi)Fz zi|><>$Z@9kpU@kpPvDrT)H>H_5$hakf?Po%ZbN`xdep6Y7!ivWEFR-x;$dUU|uL&OU^IlQ5l_x% zwetQcQW$JB6?y~V^ZVsNa^>5l9|qJ=*VN3vFbj^fOD$IjnxkUl$GUS5Wy$t`fN)ZHkfalBO%;VgOjMit7dpt$pr{I(=8=ho3gcp*(_K}bU3ZpB? zv0)MPh(whrGu&_kF;4N|v9k`bT9m7N9Mf~PP4&V&`pA;qJ$$9@#7x_{&Iz63{vq8G z$i<6OycPkpjRg?x@nqWj&7W!|*2_Q9L7$pX>#Ct6=B<%gS_MFZP2aOaFc-jLNEYG>E|sLF9Vrk2diS=3}-^y*kqfLuT1u{@jD9I_!&6RDT1V6)EgsB~M zFn5K}h{Y({Y3i*~!fTY|b`v0ArYcoh5@{!Y$vaBLbCDtEu787y0g{tTx8*wDWU6q% zvin@5b;fvqTY>*fSyPJMnU=zVdkWe0IFS6_3?Di?s@5<8|C)(>1WKsPrh!;vKInp6c$BIrZp#?hjJf+!ROeL`<>_1-?;A|$ z=DY(EnZh*xtJmy7*k`7vU-GpmWU82xtQ}E}8TtCGYi!@fiZ^Sntc$$f-3*3Ax`egt zqy6l~+W>w%^dDO*~GQR`-sdXq(+7rnnU}8 zaO++s@kU346+IKq3R7QZ1{>M^=4leeVvkjiKnUeBFTGYc52PRtLT}%`RG7=m`~9G# z=t$czc*mz)0sROv5v)_3wv5R9mLdsMZC6hBhCYgN$k2DF6(y^OV3VzW9wLA6*iAa$ zkUW{xv=5n=UzE$JCz{CidAQ^Y=a8C|4t|w^un1X~8g8ykRsDn}Y-5^y?W4uijEQ4Y zm?M(Nd>*#8eR&9n1#0`E-5hw)7X?(Yv#amLnVdZ3PkQ5bAv50i`qJS`Wg)kBfjx#x zK3q2)u0ommq}WCCc}31FI*y$B4RfI(TmYKo_P9Lz>ICPQ2^IQm2{WhGgTOV94*M24 zSXNzinM+pazKoZZEW{s8H8msCOrQ<>CYlIBIaAH0d16{XdB~tpd>_K`tg@C(pirQM z2qfg%JpU9)g-Ol5JK(AiE0IU4kV8CkPFl;QQZ3(Y)4VR&E=yk!lU$%;5}cO=e36i28qKA;cIrGtMCt_C z)mqb6q~jWNz67|hvNh@;VOS?1*sr^};XN*juC!*i*&ADZ0u{uA+EkB3q`;P}^5Dxm zmfj{x-Z^a);=1Cu8pHy7ii01hj*AAwC-0{vMJEL|V$k&4bQ(Y@Qx`-CV%PY+hK)($ z(%ekl3WvVL?AwY3Kgc3By`PCf~1I(F@G7xjh-n)I3SmIU^~ z8wv9A&B|@IOD^54ta0Hcm3OI3-@-bcMRo7?F|t$3R7CkK%J$JeH@I?o@Wc!0+vWa& zl;@hv;RjA(GyIP{&=*$sU34Qdj^kf^MwRSBR5OK(c|V9eMXANq;lw%QS|aytAk_nPH8^KI!LoNDxMHJ2DDRYa4qMJm$KGZ z-?oy@Q^L3?%_$d`ueLTyKCSopZax_?TTkSF{aCsirD{B1p~Ll% z4!$fjd2|S)cg2O#nS|Xe@!|^0aPt|NpiJfG0oVxb^YoEUjAu*OX0M8e)emUf+&HDu z&&cFi!CTDKTm@%{lB_Q`KfM@wRSAR|^K?L9nYA1W^D}S5g7UN&;JC~=Sq`0fo6hwh zuS`7r886rC9ryE#v-t)p)_X`P*{Qr9&9xsApt_;aQRM16;KER|WEEa-3)Omd#hj!z zh;6CHH7X;*#A&=W6I0qLFhyz-u{g;fO&kR+kxsF1LtHBLQe6G`fakHF$Fj(x_(~2@ zovv(6Ko0`wd{wTCW;0Y(-fL=y2|9six(gx965df$EMad>bVVRxw{# zpj=Nz^4sPMv^F*|s^f{8o)wkOdLL3y+ixDeg6lKVo%SoKipuH3*`GE3@J6edZy4A6 z%y$$4lov%K2<1SCL=tTuLbHhQXOR);q)(m)t#J8ZEb> z^GVq^@m|tY#uHzJH8=7{7NaV!^*cQ-+qQ+pE{oEzRvv;H?j0d|pBue&A8=B$o2_+h zgzKJXJ@O-yDT>KZ4xV4coHEC;r(u1K*X_pTRZ)sEnQN5##?31N3yb!37#}&K80x~QBdlXCI-ow~1E zI5|~v!tLZD6BGNCo;FFMJCsNq#0B=(wEN{gr*<(>EVFF(w9$MQDj$|seXNr-YWHcw zUUSi^m-N^Yx$ubt_d>mveTGyUs-pu8FLGyXpJFmrMs)EO*8$!^-`>1~*B z!Nyjm?=Q&E_F=f@L@Jp|PGWf)c19v)J20Vg9knvt3H2W)btZniOjQ}8q16xB-oH4o zO3zuWG+qRott~w=_uu@yInvK5R-Wqnl!M`*u#3Rm>Vrx}tR|)9oa9KC89S-CM)&f8 z>P~gTOT_B>&q1t^xv8ws^Ir>^-{b_zs?iWAVyRi8`(jR{csWL}>=>s*s>diBI?{BZ z!`2lP(2otDu1I#2V{C_0o^dT%6#9rHw-LZ930B#5lEFxiL39d2$CgQTrzDgiV)(>5v!ew)%h z8T^UQ8qL}xKGlh@?&o#~o+gs~J~={R0|Sfip9@>G9e>dnD0I+E3bxt<-#lkHRo zSs4pl9rPMn#nHCe2Z?_YJj+P?Oh_IuBYj&U~JcuGN@U zpxO~Yuo%455aQK%YdsxHU-ZgQbkJz53Z1n{@WDB9@hFUOqWw!{$oQrB;_mqcq1d3H z`|nZF4aqpzmC1Fg`w=Vgt;z}ureZ%9rUbJ_l;SZg$dtIm+Ni2MlC2MlA4((0`!xQ= z&9pzMKv!6QtoCYheT(HwV<&L{TynoZBRpkWGjuNH6L*FkIU-hUcQ5konzPR`a>zDg znC+oE^W*JBiJM|ObMKya(L88Z$f}U%U_>o)NO88anAoqgkT!C)uYUT)NVtH%qq&C0 zp4vtmC_d=bTNosyjKbeQG~H+E!&fmh^`SYLtPKNI|Z z$C%9g;4vO2>mg*?HUf=Et`pLS50p=vhhyu^mt`J3@ABP0b<0=7_Z?hN%CLOi8Iye& z;$Z5Nw5MbvWZXXP>gf8Cu5>*m5PWENoafvbdN<){#A=A={G*!tW$>pb4DWfz;Ok+_ z8-yQznWAESC{Y-X@6^53Z|^Op0BpN;Ys)7z`0mtTmFL<){J zu+Pnxpf8MN!%OgL7~HljRXb-V+cg4 zpuGJmUq=U13kH5WJ6#vxSfIZ7Uq=VrqvU0QGci5O5vNGGk0F)DLvt-HqAJEs$K+@u z7&|kQG$&qmezC|UkviYyYRn#GD$D2`{Fj#4#_YNd7_zJJW=1 z-w8f1x0CT82x%yJCCo@js~GYzWHyw3yw4$4T6Xr7en(>3%JKXMO~IW-uxbv?vjvWW=X&)vK&Z9m5{iUv7X|rG_(#F^g4#UdKqm zhw67s^hncQw@J}_N>Rz3V#L_u)92=t`eDnhyU;p(&}v1LlZyz}eqN}FZ2eDp?ih?qA`@6!Xx`ku! zKx@y3Gwa^jG9+@=D7@8;uMRbPjN=deNd_0XR_{Uk06UY&5%$}7l<6Jsb0pyhb@3?u z1d6=t+AC~(?SlB~@cv~3hfLeTpeSVcy^7C;#);VNaCuh7?)Qa-!=&-+)rtl?eXI;Q z9pT-t&WPM_!d!Rbw$>KVmyeAIYXSpq*e2Xr$8{)`%rtUw!W6H;?Af+DYr8qhCKG#gS99ok)cz>3hgm89$c~^WPM2-cL z0nX+Nn;b#cco($6pT@j zGRvLun|V*uhKyK^UY1bvl=#bVlxRDSFt~mjpn*zFaE~Mj(;vMsCuPam8SxUvQEPPz zW;1vwhc~NgAB`&Er}%zVqoOVmX3g%QQw-68}=`h zbQKtls}4XT(f}RonJ;hULt&OWB7AkGd#))@<&o}j@zL?r(>)U-c;~HHAvJ-DyDlFb z(K9`>v`QbHb{M1)+o1$e^|udSst8n|==DD;Q?CW*_K`=I;Xg=N;IKdrO;Ik0^W#ek zcdnDKw{4outcdGF1B?FT

IW>s`oeQr)7hmNzcCYt?mm2@>;ih>`B)*L; zh_~9(^5XI$Sw62svmzTCn^`f#WKqnnrFk?j+gsLtNqoa+iA#k;DHV>YRq$8Is-Eh- z>$6fenR!@C#jl$RhTMeh#r$_dKuR8ids8dMW1^U|R&+`)-_z^|8R0<69DqF)N}Ih-58Cf5LXdN_gG5bmV|!ulwLOwprI;!}hPq z#>y>;B3r|8vEwb{!yv3XwAwui>&2^lhBNA^l|;@>N%Ql&20v|^E)XIYA>}-z%3Z{+ zsCX3kL{l%p*UjQ%fvVPP%3Ot-0@qPL_k9{U|7oTarI>gsrj;Vg);e%he57&^jxvm1 z5}phtm`Y&zgILNh*5**h)b71WpaaZ{@K@SG969B-@7ave8=MF7-1Oip6tWQtbhjO7 zN%7vsjXL9mgu+h-z&s>#j$-r5#GFY;I4K>BMl1j|Ha1?t>`N+HLG)>pNK58x1Y2gD z%3(Eq5@f+`JmFAM9J5ixZSJUQGL<35Z9eflO|eyGv7i?m&1Cxd; z$yx#2+EaO+XK^Tr(a?$Km< zBUs>*bH`Ln!%+5UX}B9yRR^A)w)dm1L>4EQu)a#i_j4O;54}gJNH6`Rf;tGx$8{D? zoX%@R_&L{dr5r+Kh`N?dg(b(hK*$tR)9`w6M= z3*;iQIz>8(L@!Oz@K8y3)A<`!3NZGugl(bN*BBmHj*ifMFS%tFeJs zejJ|lvYh2L`=xMClafΠi-t=C0|0>Heeyw-CePw*&8=rZ>g(X_(s;!F}azI}E7L z%@P3h?8U)xoYO=BHv$oGBOct|i`yC606}LAt*$>a zQ274*mos6hsC(^9d|T4C{n(756zRROaiLD(N!=E>l%rhA;a`*l z3MK7O9zg_ldFQ4x`|bf3cH<3a;7UO--ji6%luVyZz`}d$^$1j5b}*g0NLiY11ab~U zLQoe^gLV>u5?& z(9jGB#Fdc|a za%Gpzwar+Zsx4fUrA0jXa*x-Whh1ho1mn;biea;30Gr{3Yv|Pre|Z9lW-svyDHSpk zpAn%}{Vu`Y)o{sCc%;l|$WvWWsQo}m@<^PgUm05Od!-V7JU$5tk&bMOv?P{cF$l^; zP1m6yMY_;vRYWB9BUa0a(6MdReQ?w+WiWKw-HUBbmSE<0fWWG7@yMPWSR-NQfiIqT z((&&3n=c8VJCt^hgY9%#x$%k8Qe|b*?(Zw?=o8px^BuH2BpKV;f>I6Y1$!jigsLG( z>NP~(!<2$@P>rG9c?gFr0e6)kA~Kx06U^jHMSVmO)aVm9-|hk*Mt)uC`8dS`6Q%A%`)+! z*3aCi;N6-i&aOGLFX#!I1@0M*(ahT>ahX%+qWaDY=L;Bq3Wbg*HG0ir7oWTG`{1ft zw220*0xlw=E$sc;%<e-bzYgJ+rDnF@DAT)n*!P2J4I+ww|;{LCVwq!VY@NH9*U7>w%~J z+}k%?c&R$|cJD(qx-jFsg;%-a@)VmAv+>lOn&>LfbEI_TS>YZ0z)SgXbyRnq zhC#<817qabuAb7;Rf9vp_Zj>136B9$JzcOdKFc-{m6+PQ%3nxqn(xgB;Be{;q zTAqK+6C5)$pqJIMlIZ{D1$|V% z(4pt7F@+u$DE(<06E2|=2|HM>3MPTT;EYaT7WF6Gh8{{}avoQqa1TAT)=KtQ8OB0x zRq%|XNzvzZGp&go-o@+@TdW)T@6aM0bLoqzQb`Fw_< z&<=11NOuywj|CuJ!KJoWO9yO9k`H2lw=($nEr1p(_`Ih#clc8FPMh?(?b$=~aEbDE zzP8Wa^3AW;9^pXhPVzU2M+U6jGw{`X{87M_>01ylqNmxgbTlIo)oZH=bFE@fHny-M z)AdlBmfVq(c)U%)^*8rdaj4}!+*diFp7PO@hrwiB z+$)V5d2U#2TeYkEK#JE1oR|bUGv4b&Xi&eX6V=IUHhKMX!5q^hGX!{_Ty|N$GB$~Z zrLCl%8Nrlm^Jlz(M07`bBSYI^$5+|katQluw*x}5i^q~a79OhGkK4SH63gCxmvP#v z+Z>H}M1Sxg^x#>pA=}a;&m4CH$y9662Zk$~_$8!md&`z!au7qM)o#V5X+@>U^429P zG3xYFMAJ14l~3z0V_QPVn;(d=wcda0++Y`#;eC!_R@eYhoEz8UMdA5G+WW-zoZsqj z{$WKx2*V`1N@A!ap|72WK_a@Mvsli9mmV|{yAxVq6sal7@~>z!r_(~+>NHkRg^KFT zVw=@;gTpu|9el=Y>gFWOEX!veN)$YXlo^noVD1o@_Y{cj{D6k6{KnF_(Plxq8q&fZ z^Q{3}_TtCZZz@x>rm*L-#U`%1dtg&k?fzX8{c=NYv?ui?JaoJ^&gk4RYi&;+a*r*X zV7_9uI4o3=kT+M-z)_bvG^ZqAkhed3S9_$Vfi#IjfL_gscX!3>91>&!p4?pIDVjdo|YM<2EjtFcB; zWUv*bA^5mU-Az`vxD@%t$uzNK_)vmSx0Wtzh9>3Rmg0WXN7t6*Oj!e6PuFi1Me}E) zj<((5X-*F-18Yax%Q~m6O9c*IYsL(ZKiZj{Zi7vAfSC}i)SJ;3XPev`Zjjb6WPE|E zJU@`GUOzTx)i@7jI5kHb$xk>+NIR;DyF=5V*;*M@6`f(9w62nIc<51tVtbyuJIXmZ zht7$YX5j!wIB!y%y~9n_!B<&@c|f0{mzoocJJ$xvvcN;-$#=mFWuvo>v|fiE8?g9^ZcfWQIkO8>_Q ztf*`BpAmRS{p#5~JKEKoyxyg3@?W$%uBd^l!e#Bx9P^5fJ1_IpfIze?fG9&~=XT8U6(WDjo7 z1=%cAXH*Ro%49#}S$U^jn>a|OLoqaGI#RuMF?|uO(@YJs_fN38xDk5G-NooW$&E?J zd<^VV^?^+ohlCa2kBkR}3F`|u4=x*_Pl9a+U-g?v7%^R8X5P>vg*3&{@P2C=ZbLJvn6+e9HSxHP$ac=k3YvWie-t)Y~&=UAq3?jvV0IB_$96aFb(sCeh4 zlrzM{J$jfF(1&qIUZt`dWWg|mq6qUs0fQT;*xWcXVg#X`$s$s#&fHk2L=<|`j?}X8 z9yQ=pVNhA)Hew6;;jjEAoXpVH-Cg%fo^cWH;eUIP$z@VYFuomSRO&i+ugVqphEEu8 zk?TOR(N*m}0tU4A?1q|;JxB&YOh0!vzkfL^TBck?kn!O9{oVFY-nIfg!NDESSdfg= z%=%-|J#|e?az|vLBe|M#B+HmN6v9*5%^;4;_c5s6J{`@s?g4#cd_b+&8wTkutV6`0 z4bsPm_8$2w-wrh<`nh-5!YF6#&fK(+&9apcs#a%CEXA?fiu_uhBu)I*H!eKclC8nX z$#-o$hmV{Be7D@ipZVjuBA-To!-5iVLZy0F=`IMlNz+R+@Oql~NY&!myB8I27Cb}O zLKd@5;qfY4TCCb(sudgw`M++#MXJh8)`?!iCI@a$;1EcDiKwk*nte-69u*KWOfyet z3{MTJWjnm*^3BM0rfyehcxriqlP4~R1M$91G|0W&@!2X}aoo=I`_tNUsIyI;*AHjYaValSt`2$h(nk8hI`$l%P2JT0BmfhJO5YE0&!e;?`5r(iH6LkO=E zE`2^9#=`iWVQ^>~`%9bCv3n0mEEfwt5U1r>1gDA^&6YispcTsz!LU#jRr{LeL`3LY z%egyVdGAT+2$5&jE*~EWh`A}j)~0rIR!3lu2u7Y@7;DRi`(iN)A~pC>@!p*+WxnM8 zPi8pRFqtBR+RU)(kQ%IwBEx*n%8-+KO7aXu-&35_K;t`h^28$B39c{tgn`3kBK8&$ z(`!g*bY^;Eeeh@K!!^NQRh)AZiY_BfVucue)k=qi_R!Lo2;RgLJl({ntbMwG);qcA znW)9wgx`)kLOn?tH=Sqf#Gw2TcTT}D>P+|kv3a^Mw6tQ>uxd~if;(3_?qxr6J|nB9 zU)gzCO4(T4!Q?Wh~Z`$R@QZY2I~H{I&whW4!8{=vH) zgc_gxgVgnbK|_#a**dNW1(I_|%5ZaLhA`-23YK6Hb`*9*+*p)UgW=Xw?1B2+YEK#C zhb#Cl#Z+0-;5aC8_nhN96=;qSwPePMW3$CMWZZeQy`vgJ<;HGNjC%&miX=Z$G{z9yXuDjr^ z#e=B`a+sv{QYpdMCKZei#bo^!v{fwXhB)-l?DWT`2I3Wq3o^MPGZIo`*R%wrZeTKX7w#IF=7u z%)@)rKa!ao0}JY&k7hhN0@-OO1AVB?XIMD+u@eTkuMfiYHKT@)X}dAZ#CCs&8B)-0 zZzPB^K3JGBCYz*bGi?dnTGlMIo(M!zcQ`bQI?lW@c&kA_Rubf9N)YI7d%Q{acD=~+ z?1{FZOA=74$vK&O8t4Kx?NDmZmzv92D zF|6c8|JcI}&sb_(unf0564TTjTt7NrhT4kVvvUud{`s5G>63dwD#BfruAiWgS?Ksk zctN)xkwXqc3|!uxXBN$hys;EZB!fu)w z4>Ll2E%62v@dKl3ixs4E@CwDt-mp4I8($gMfCU?E8sbZ@H`u<+@$R98YZRG&(EVx5 zm8MVk%G{q>$^kR=wWm7Q$;b&E1YP{dIEKDotcl@M+#zi9ndo;s8sTj0W!#35Q`kqe z^M)cj%Lbq_=5ks_8LlO?sSuA99r9+<%^ay;a5`i`L_3nVz;A-nQ%#;lyTgKud z1n;N>BM5(i-mlF2^?~h#f8{!tlB|aDBbBeVP}(^oD(g<8MHmtR82zEjPb5*6L9|C01)0>4y3=rN zS8?xSlt3hvXwA2oOwxWbsf@LmZ1%n9weApLO=c1wPOEiAxW!&m(55Lgx^kI3Ai4aa zhzcyI1&C4`qR7DWCRq8z+Y#wqL7}C{aQQN2Z({eEKqPF)eUtYhThLoCkf{F(iLcOM zxvz#|u8c&JL#mO~O3lHX=KX;W37$AC$>)PJjqW*WjL{c&lZ-q|GH8*=`JfhLX>tqv zF}4qeGcOKOrnSTu!puH~E&CV5s=O|Urda&i^!_D(jwJOHaP9N5Ue$`HQyRFFl&w|* zi>Mt4<(9B;g<+rI@fPc7VwT}DfKY#Zng*lIRva(8SwCc{w-%_r<>G29C0*XB!u)Vl zNraS-+(IlZe>gDo1z$P4VE(OX-{T(tt!F333d8dW#b+ieR~nui^7GEAl*mEm~HVt8K6RD-97vI93>~EwQ#CApQ^a*6GqJDTciOj91)~2Um#1tiDiJP#_ z;_Wfj>vPd>&?YRh;{DUSdqPCBA44rc^_n) z1WVnizLRIrOYy-s(oMO*Z>T7!%+3Q$HN?n(MI-!7PiScf>#z$V!7Y)&qv_mG{PEE4 zo^kEK*_8L&#i)VCg_Fbei(RZ2T2XU5#s`NNU%vS;mf`g3db~$_0%B%YMe(Y73oDpa zNfka*A)+#%)CEFv;1T&EV+CrMlu;RJ~CMVo| zjVK#rXX=uNmbCA1m`?LSUZsqup!1L#8WBzlDpPy_pnk*OL~)) z1m^abb=JfB8R;1;*c#z3Zl%irrVe2s8XSt9w)GQF%&v+$V)zE5%af~Q;z1RIB|gRy zzkU9q6E+Xj57}@P%;FSe`i+UhH0Z%hrmcq#dZ&`C2NspR_-mf@2Y6?wRb^;E2PYoHPrEqo%XBQ=A$pwPuI;i_1MAca8%QG zt8PA84L(Sv!NV~_>u9uo?1p#2oxSwP2r=)a%K2AJCDGw`e$+e+IBbFDIgFLn7G!WN zaagF@QLU5^hn^Y>^V`JX=c%QHZN!_I=Zmxzl156M@#BY-OLE9N=z)1bHDsHzY(B@P zrBBLY^l}%M`MN!iVL5Owvw6ZA(PC-2(zUlAGewLV&?nFs=JaiQQF>HvJ_sF=1WQUK z)9c8bsEkQ|Iy+KYCgF+mK1dAoINsY-E($~Hi7sSRGQ|b$r(G<}nV8r~NCe3HmGNzU zc8v;^^6W5>Pv2%_8$<OQ{2Rbrp&dszan< z4tvu_eo6DIE0C{aldrhBR(NDYVeEX`uy*3i>ii>(ea9V{rY&)JL#u+I2^HnS!8Yc- zY+23c!Nw?#>U#0p|BdOaC@ma31Vr-+g!y8 z84wJ(YT5l!u;3>s#2TP}U6mbN>Ze=dV=hwx!eA`%C%A%6dPN*e@#>Pril!Fa`e+5C zdy?-UWLT`jY{-SYKiBH{s++`&fW}E8i!(NZ;X@q1U_U0QPD#`p%INxrVBBMmHmTiH zOlO>sA#AN{&WaEui)l$M`9$hcy(>y~J$an8YG7ep4{Ae9iqbq0qbR9D9W(HuJ{dE& zI#yXgYZULUB^y1mH) z#>YM8i5PLB2o9=|q3szjSBQ0>2W38I;7rvLOL{!BYdx7RwWP4hv84je3-!4ZT9_V# z8@vKq=*jA{;7$QHL1qhb=f|q$0f|uJDPc&q4WA^HPnV)fp5NOTMJWPl#Wta8N^mF0 zYe?nP*2qTEg{@(4)x3nNDT9A!8xym|wy)o^`fB~#>F7PP&N2~V_W*ENZ@&*bbSw^p z=zaLH{3X7Jkz#0yB2I{i*``M;2SKmW#ou_s;x09On+NAk-^FVW`6#Gd8+OE_I%{}E zy1Be|um7d4H&ip1cX??H5B$;dJ%+99N*%E^_ExejGgx!Dk?0pR`7SFBua&Y5GfGG_ z@aw@v1XXb4t@QlIv30$_aUfZhznOQW`D#VzF{1D}fLue^HF&_)@QkrJdw6VkhFA(8AR8SLR|_>+Yd2ihyror4pCzt#zEOJOL1)ycq(mLtF%9cS*^k~e?T^EoofWUiGirmu`ZB_1(i-kJ=p5( zo>A1H4C4eUS3GW-Ndvm?^c(>~*sQ-^5L0SsHR%arH)63AT>a zL4!}{ca3maii?dBd0LDPzlG-~;iPk|LE@U^ zxu)W5GOFy9el)8{MPt#hv;o7}OpjFAC}=)8c2&JBX5z#<8?3C(yU-|I%G_JgZA7&h zF|n{>*p5=&d(n3I9z}N%zEb{Cg){fZ5d6|tW+TEwv`V@9u2($O57ZY$i$5>b7|OWE znTxc#nUE_fdF+_l+OWrGO=;EdzIn$@R))QqLQ7edP3R+C*;>F0M-awzb|||V!azO* zhlKJ`dW(Yg)4R__ixS&Rs%tr%qw`2UMku}@=?WOcLhXZXnNl-8haKMKIl2PDOuD>4s`3G3+`zN7*?C@VdN((FT zKMsC1QkK{CDYPH2N%PByiwG+!(MyZ`O8|m6 znG17%f8+WW*meFVU1wYeJX_lt|CHx{-?%C2{6BBF=vrFbcx@7_ z|IX;I*X*ry%?nYeu=U@;EN!fTFt<0v`jvE3 zSwz73g6nGiMcX#IcE56dRTl1dfk3V$dU89*3OHJJQ|G^ma}yJKJI2t??iXeMYC|_m z)pa|@&QKRP>2^aqcXPho&T+6dH?+FDSok-z!wSUgyG6_P`npzDzX)`F)zFW3frpzF zZ*@E9$HhZGx94ArcJBxFFMGV51l0QXs}z1_{aR}`i}(I^*7xvOcY$zj2t^3=Ik)KP zufwuGQ?94oKVAeLZZ|XZk&5d08Ue zMHIKRu`ty)b@=z1$6wX(O}&2C8z653WSZ|-;5W-_063lcds+Vu8u)vOBiG*h5db>K z0@zG9xG|V_aRsd{EP&42)Y|G!6<<`m36%!$>_Giruj+hHSzaK3&Hi3&A!~g{OQ7EF zga_O0Ny!56YkR*bdm_-0{T{ApXzOTbrEdtF8Mkxzy=q9M(cTmR_;lb1@J&2B;a&Le zvi||!R-SKa3gEA=0lhw5{5@rPJtV#hF9Qq)zZd#qq6rfhpk7DIzNzq4(z~dCHE#H@M^&A`g3L82{()P z;~)aS(?CnV-tYaMvb@NE#`JsmzgGSS%|JjZm0g=6K2Vw0VP(IkEU##w)B1e||7hk9 z^io(z&uiTj0!5(yjs||SyqJK@eoyDOFx9oc-Bzyskhb^AuRQ_eG0>6T)Jh`rt=Jn* zo3x>WiM4^8oiz}4-NEHQ9qe@m;?dR45I_cdi-W(fRIR4E4t?1W7SrFF~-b}y~Mn`!{ z;H) z>pcIb7Oz!tW+Gk~0%%zd&@%fEMO<4Sk@6oi_z8Y9kn*qEallYT#|JbM4%o`gx&%}G z|8u#u0EMrgf_wxDFa{KW^+o|qH2#R@*8^r2y7~@33i6*Ut}WgDH40HZ76?d$&9B{V z_R~LR@dIuBUl-`QhBppFd~*T$9Rd0IZpdHg_-DAIy@R!-poOlz{oh{JpK>bzr+Yz( z3<829_p3dXIsY-Y>yt8fWpHgz--N~)jDg}90e$7oUV$p$HyQjhArrLz`{DPNbgiy@ z-Q!p4fSY6txav21%jKZ|kN=-4z83pxgR`3&sDTc&zduL&UJ+Sdo{_g=m0WBLg$(WW z?M(mb6mEse&?OpJ0lF{)ba8V;-Hp2y`h80I*Ua+wdbk!iBg`SL7wB)k0Q{SJ$V^~x3t5?oZ8G9>oNR27S6~LkXXF77P>Q?0U>4@B4qTb4^$sDV10+c!ch)Q-t%*EPU znSamSubnn=gZ~h(^|@=N5_UZyef=NLexd#j;&&(ZntUr4e%o1)8!)RXp!7GZKjO_D z+oYzAfp0}3~>2=q%5zo&O6B0 zyxT{|>yG(V4}y#nAnFqY5D?NI$ZNBD+4BeJty=YK4Y zw&;(Qq{IqQqLfNBi^E4|rczh3_{^7q=k zW_K}pca{TOCP2G4+g9f0U2J75Q+=Qn+#$8*DrV|8KoQA6rTx1WcHIn8cW))D0n5#| zmcMOY$t4@GfJvanC~lb6)R$XH-#gWxE@=AI3Y=r+8h8Mm9s-td(?M%Kzm@sV(xHfj z^|SB&;9pZHrRycdTg6sAfzaXvvN!^=xH&JDxx6cjpXSBChc91kL#zVu6rlM6^T{76 z%WDb*^tO4S3_xL&q{@Lru06PHwrUw}V ze;58Iy#UJvzvpkk6p)%J9B@r(?t=f*ZXE3H zXp1-#g>l1*#Y$0f6(>}ccHIaj4-e{dWY1jAF|S} z&2Ipx(woE07pA+={8suV)^@)a*%+0l*%z?AZopdt=FUG-mRBvyt=v1dZ4xVSg}{NL z^8oGnW)pGdyp{ER7f;a?m?0VhJ?X8(c*jxLxi?Vs>xnq9-t{A8dA0N0js1I*@NWOB zMbwTs6`&*irvXOt{Xg{ohZTR0io-9Bw{SJmf)ZXbS54{%=s3yU`=5ZiWtVCxziT1e~ao0wV| z(%V`5?CM=_%0i>|_g;VdOppDmUFbdg1Na}I|D)t<&-vHu?Y|pFme>4?Td_aw&?^4e zti2We>z%m&qCx!sfArmk_v`(p|K- z!Q#(#cdg`K@6P*|yc+iZknh&PuQ#FnOSg->lm53CedAuVpC!L;gTLNF^)Fi?=1%rM zI{A-bTtk12PxUV}r21CqZPBWJmh@W3zs4N;7m8H(d+4tbh<>I18Uy2B>g1bSsXy=X z|2S%Ys}_C@X7DfCsP&KN+d>=sEc>;ZfBjbT-wZZ;{+PjSUvB=K!EIk2{F{O6*dH_a cp`0Jb7a#$D69mK#_~Qf2j|Ufk#X^w(0nQTU4*&oF literal 0 HcmV?d00001 diff --git a/shared/.classpath b/shared/.classpath index b8e179ae..977652bf 100644 --- a/shared/.classpath +++ b/shared/.classpath @@ -3,7 +3,7 @@ - + diff --git a/shared/src/freenet/library/util/SortedArraySet.java b/shared/src/freenet/library/util/SortedArraySet.java index 853c33ea..f15ab5ab 100644 --- a/shared/src/freenet/library/util/SortedArraySet.java +++ b/shared/src/freenet/library/util/SortedArraySet.java @@ -3,7 +3,7 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.library.util; -import freenet.support.Fields; // JDK6: use this instead of Arrays.binarySearch +// import freenet.support.Fields; // JDK6: use this instead of Arrays.binarySearch import java.util.Comparator; import java.util.Iterator; diff --git a/src/plugins/Library/index/TermEntryReaderWriter.java b/src/plugins/Library/index/TermEntryReaderWriter.java index e64da5ed..7506352e 100644 --- a/src/plugins/Library/index/TermEntryReaderWriter.java +++ b/src/plugins/Library/index/TermEntryReaderWriter.java @@ -4,7 +4,6 @@ package plugins.Library.index; -import freenet.keys.FreenetURI; import freenet.library.io.DataFormatException; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; @@ -53,9 +52,9 @@ public TermEntry readObject(DataInputStream dis) throws IOException { case TERM: return new TermTermEntry(subj, rel, dis.readUTF()); case INDEX: - return new TermIndexEntry(subj, rel, FreenetURI.readFullBinaryKeyWithLength(dis)); + return new TermIndexEntry(subj, rel, dis.readUTF()); case PAGE: - FreenetURI page = FreenetURI.readFullBinaryKeyWithLength(dis); + String page = dis.readUTF(); int size = dis.readInt(); String title = null; if (size < 0) { @@ -89,11 +88,11 @@ public void writeObject(TermEntry en, DataOutputStream dos) throws IOException { dos.writeUTF(((TermTermEntry)en).term); return; case INDEX: - ((TermIndexEntry)en).index.writeFullBinaryKeyWithLength(dos); + dos.writeUTF(((TermIndexEntry)en).index); return; case PAGE: TermPageEntry enn = (TermPageEntry)en; - enn.page.writeFullBinaryKeyWithLength(dos); + dos.writeUTF(enn.page); int size = enn.hasPositions() ? enn.positionsSize() : 0; if(enn.title == null) dos.writeInt(size); diff --git a/src/plugins/Library/index/TermIndexEntry.java b/src/plugins/Library/index/TermIndexEntry.java index a239c9d9..792e452a 100644 --- a/src/plugins/Library/index/TermIndexEntry.java +++ b/src/plugins/Library/index/TermIndexEntry.java @@ -3,10 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.index.TermEntry.EntryType; - -import freenet.keys.FreenetURI; - /** ** A {@link TermEntry} that associates a subject term with another index. ** @@ -17,14 +13,14 @@ public class TermIndexEntry extends TermEntry { /** ** Index target of this entry. */ - final public FreenetURI index; + final public String index; - public TermIndexEntry(String s, float r, FreenetURI i) { + public TermIndexEntry(String s, float r, String i) { super(s, r); if (i == null) { throw new IllegalArgumentException("can't have a null index"); } - index = i.intern(); + index = i; } /*======================================================================== @@ -40,7 +36,7 @@ abstract public class TermEntry int a = super.compareTo(o); if (a != 0) { return a; } // OPT NORM make a more efficient way of comparing these - return index.toString().compareTo(((TermIndexEntry)o).index.toString()); + return index.compareTo(((TermIndexEntry)o).index); } @Override public boolean equals(Object o) { diff --git a/src/plugins/Library/index/TermPageEntry.java b/src/plugins/Library/index/TermPageEntry.java index 31604ffe..f884650c 100644 --- a/src/plugins/Library/index/TermPageEntry.java +++ b/src/plugins/Library/index/TermPageEntry.java @@ -3,18 +3,13 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.index.TermEntry.EntryType; - -import freenet.keys.FreenetURI; -import freenet.support.SortedIntSet; - import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.SortedSet; +import java.util.TreeSet; /** ** A {@link TermEntry} that associates a subject term with a final target @@ -27,11 +22,12 @@ public class TermPageEntry extends TermEntry { /** ** URI of the target */ - final public FreenetURI page; + final public String page; - /** Positions where the term occurs. May be null if we don't have that data. - * Specified as SortedSet for ObjectBlueprint but will really always be a SortedIntSet. */ - final public Set positions; + /** + * Positions where the term occurs. May be null if we don't have that data. + */ + final private SortedSet positions; /** ** Map from positions in the text to a fragment of text around where it occurs. @@ -54,7 +50,7 @@ public class TermPageEntry extends TermEntry { ** @param p Map of positions (where the term appears) to context (fragment ** surrounding it). */ - public TermPageEntry(String s, float r, FreenetURI u, Map p) { + public TermPageEntry(String s, float r, String u, Map p) { this(s, r, u, (String)null, p); } @@ -69,48 +65,34 @@ public TermPageEntry(String s, float r, FreenetURI u, Map p) { ** @param p Map of positions (where the term appears) to context (fragment ** surrounding it). */ - public TermPageEntry(String s, float r, FreenetURI u, String t, Map p) { + public TermPageEntry(String s, float r, String u, String t, Map p) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); } - page = u.intern(); // OPT LOW make the translator use the same URI object as from the URI table? + page = u; title = t == null ? null : t.intern(); if(p == null) { posFragments = null; positions = null; } else { posFragments = Collections.unmodifiableMap(p); - int[] pos = new int[p.size()]; - int x = 0; - for(Integer i : p.keySet()) - pos[x++] = i; - Arrays.sort(pos); - positions = new SortedIntSet(pos); + positions = new TreeSet(p.keySet()); } } /** ** For serialisation. */ - public TermPageEntry(String s, float r, FreenetURI u, String t, Set pos, Map frags) { + public TermPageEntry(String s, float r, String u, String t, Set pos, Map frags) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); } - page = u.intern(); // OPT LOW make the translator use the same URI object as from the URI table? + page = u; title = t; if(pos != null) { - if(pos instanceof SortedIntSet) - this.positions = (SortedIntSet) pos; - else { - Integer[] p = pos.toArray(new Integer[pos.size()]); - int[] pp = new int[p.length]; - for(int i=0;i(pos); } else positions = null; if(frags != null) { @@ -177,19 +159,9 @@ public Map positionsMap() { if(positions == null) return null; if(posFragments != null) return posFragments; HashMap ret = new HashMap(positions.size()); - if(positions instanceof SortedIntSet) { - int[] array = ((SortedIntSet)positions).toArrayRaw(); - for(int x : array) - ret.put(x, null); - return ret; - } else { - Integer[] array = positions.toArray(new Integer[positions.size()]); - if(!(positions instanceof SortedSet)) - Arrays.sort(array); - for(int x : array) - ret.put(x, null); - return ret; - } + for(int x : positions) + ret.put(x, null); + return ret; } public boolean hasPosition(int i) { @@ -197,35 +169,21 @@ public boolean hasPosition(int i) { } public ArrayList positions() { - if(positions instanceof SortedIntSet) { - int[] array = ((SortedIntSet)positions).toArrayRaw(); - ArrayList pos = new ArrayList(array.length); - for(int x : array) - pos.add(x); - return pos; - } else { - Integer[] array = positions.toArray(new Integer[positions.size()]); - if(!(positions instanceof SortedSet)) - Arrays.sort(array); - ArrayList ret = new ArrayList(positions.size()); - for(int i=0;i ret = new ArrayList(positions.size()); + for(int i=0;i Date: Sun, 28 Dec 2014 18:40:03 +0000 Subject: [PATCH 017/180] Moved the reader/writer and entries. --HG-- branch : eclipse-separation --- .../src/freenet/library}/index/TermEntry.java | 2 +- .../library}/index/TermEntryReaderWriter.java | 2 +- .../library}/index/TermIndexEntry.java | 2 +- .../freenet/library}/index/TermPageEntry.java | 2 +- .../freenet/library}/index/TermTermEntry.java | 2 +- .../freenet/library}/io/YamlReaderWriter.java | 14 +++++-------- .../freenet/library}/index/TermEntryTest.java | 20 +++++++++---------- .../library}/io/serial/YamlMapTest.java | 2 +- src/plugins/Library/Index.java | 2 +- src/plugins/Library/Main.java | 6 +++--- src/plugins/Library/SpiderIndexUploader.java | 4 ++-- src/plugins/Library/VirtualIndex.java | 2 +- src/plugins/Library/WriteableIndex.java | 2 +- src/plugins/Library/index/ProtoIndex.java | 2 ++ .../index/ProtoIndexComponentSerialiser.java | 3 ++- .../Library/index/ProtoIndexSerialiser.java | 3 ++- .../Library/index/xml/FindRequest.java | 4 ++-- .../Library/index/xml/LibrarianHandler.java | 2 +- src/plugins/Library/index/xml/XMLIndex.java | 4 ++-- src/plugins/Library/search/ResultSet.java | 8 ++++---- src/plugins/Library/search/Search.java | 2 +- .../Library/search/inter/Interdex.java | 6 +++--- .../Library/ui/RelevanceComparator.java | 2 +- .../Library/ui/ResultNodeGenerator.java | 8 ++++---- .../Library/ui/TermPageGroupEntry.java | 5 +++-- test/plugins/Library/Tester.java | 2 ++ test/plugins/Library/index/BIndexTest.java | 1 + test/plugins/Library/util/Generators.java | 1 + 28 files changed, 59 insertions(+), 56 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/index/TermEntry.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/index/TermEntryReaderWriter.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/index/TermIndexEntry.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/index/TermPageEntry.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/index/TermTermEntry.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/io/YamlReaderWriter.java (95%) rename {test/plugins/Library => shared/test/freenet/library}/index/TermEntryTest.java (87%) rename {test/plugins/Library => shared/test/freenet/library}/io/serial/YamlMapTest.java (99%) diff --git a/src/plugins/Library/index/TermEntry.java b/shared/src/freenet/library/index/TermEntry.java similarity index 99% rename from src/plugins/Library/index/TermEntry.java rename to shared/src/freenet/library/index/TermEntry.java index e5832b5a..0a9ef220 100644 --- a/src/plugins/Library/index/TermEntry.java +++ b/shared/src/freenet/library/index/TermEntry.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; /** ** Represents data associated with a given subject {@link String} term. diff --git a/src/plugins/Library/index/TermEntryReaderWriter.java b/shared/src/freenet/library/index/TermEntryReaderWriter.java similarity index 99% rename from src/plugins/Library/index/TermEntryReaderWriter.java rename to shared/src/freenet/library/index/TermEntryReaderWriter.java index 7506352e..ba0c5c5b 100644 --- a/src/plugins/Library/index/TermEntryReaderWriter.java +++ b/shared/src/freenet/library/index/TermEntryReaderWriter.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import freenet.library.io.DataFormatException; diff --git a/src/plugins/Library/index/TermIndexEntry.java b/shared/src/freenet/library/index/TermIndexEntry.java similarity index 98% rename from src/plugins/Library/index/TermIndexEntry.java rename to shared/src/freenet/library/index/TermIndexEntry.java index 792e452a..907879a0 100644 --- a/src/plugins/Library/index/TermIndexEntry.java +++ b/shared/src/freenet/library/index/TermIndexEntry.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; /** ** A {@link TermEntry} that associates a subject term with another index. diff --git a/src/plugins/Library/index/TermPageEntry.java b/shared/src/freenet/library/index/TermPageEntry.java similarity index 99% rename from src/plugins/Library/index/TermPageEntry.java rename to shared/src/freenet/library/index/TermPageEntry.java index f884650c..323f559d 100644 --- a/src/plugins/Library/index/TermPageEntry.java +++ b/shared/src/freenet/library/index/TermPageEntry.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import java.util.ArrayList; import java.util.Collections; diff --git a/src/plugins/Library/index/TermTermEntry.java b/shared/src/freenet/library/index/TermTermEntry.java similarity index 97% rename from src/plugins/Library/index/TermTermEntry.java rename to shared/src/freenet/library/index/TermTermEntry.java index a627fcbf..af119f2f 100644 --- a/src/plugins/Library/index/TermTermEntry.java +++ b/shared/src/freenet/library/index/TermTermEntry.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; /** ** A {@link TermEntry} that associates a subject term with a related term. diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/shared/src/freenet/library/io/YamlReaderWriter.java similarity index 95% rename from src/plugins/Library/io/YamlReaderWriter.java rename to shared/src/freenet/library/io/YamlReaderWriter.java index 64e8e6a3..d5537b37 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/shared/src/freenet/library/io/YamlReaderWriter.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io; +package freenet.library.io; import org.yaml.snakeyaml.Yaml; @@ -29,14 +29,10 @@ import java.io.IOException; /* class definitions added to the extended Yaml processor */ -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermPageEntry; -import plugins.Library.index.TermIndexEntry; -import plugins.Library.index.TermTermEntry; -import freenet.library.io.DataFormatException; -import freenet.library.io.ObjectBlueprint; -import freenet.library.io.ObjectStreamReader; -import freenet.library.io.ObjectStreamWriter; +import freenet.library.index.TermEntry; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermPageEntry; +import freenet.library.index.TermTermEntry; import freenet.library.io.serial.Packer; diff --git a/test/plugins/Library/index/TermEntryTest.java b/shared/test/freenet/library/index/TermEntryTest.java similarity index 87% rename from test/plugins/Library/index/TermEntryTest.java rename to shared/test/freenet/library/index/TermEntryTest.java index b0df5295..c16de44d 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/shared/test/freenet/library/index/TermEntryTest.java @@ -1,14 +1,16 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import junit.framework.TestCase; - -import plugins.Library.io.YamlReaderWriter; - -import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermPageEntry; +import freenet.library.index.TermTermEntry; +import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.Packer; import freenet.library.io.serial.Serialiser.*; @@ -50,11 +52,7 @@ public void testBasic() throws TaskAbortException { l.add(y); l.add(z); map.put("test", l); - try { - map.put("test2", new Packer.BinInfo(new FreenetURI("http://127.0.0.1:8888/CHK@WtWIvOZXLVZkmDrY5929RxOZ-woRpRoMgE8rdZaQ0VU,rxH~D9VvOOuA7bCnVuzq~eux77i9RR3lsdwVHUgXoOY,AAIC--8/Library.jar"), 123)); - } catch (java.net.MalformedURLException e) { - assert(false); - } + map.put("test2", new Packer.BinInfo("CHK@WtWIvOZXLVZkmDrY5929RxOZ-woRpRoMgE8rdZaQ0VU,rxH~D9VvOOuA7bCnVuzq~eux77i9RR3lsdwVHUgXoOY,AAIC--8/Library.jar", 123)); ym.push(new PushTask>(map)); PullTask> pt = new PullTask>(""); @@ -76,7 +74,7 @@ public void testBasic() throws TaskAbortException { assertTrue(m.get("test2") instanceof Packer.BinInfo); Packer.BinInfo inf = (Packer.BinInfo)m.get("test2"); - assertTrue(inf.getID() instanceof FreenetURI); + assertTrue(inf.getID() instanceof String); } public void testBinaryReadWrite() throws IOException, TaskAbortException { diff --git a/test/plugins/Library/io/serial/YamlMapTest.java b/shared/test/freenet/library/io/serial/YamlMapTest.java similarity index 99% rename from test/plugins/Library/io/serial/YamlMapTest.java rename to shared/test/freenet/library/io/serial/YamlMapTest.java index 414e8a21..7c194304 100644 --- a/test/plugins/Library/io/serial/YamlMapTest.java +++ b/shared/test/freenet/library/io/serial/YamlMapTest.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.io.serial; +package freenet.library.io.serial; import junit.framework.TestCase; diff --git a/src/plugins/Library/Index.java b/src/plugins/Library/Index.java index e76ffe6a..5e66a515 100644 --- a/src/plugins/Library/Index.java +++ b/src/plugins/Library/Index.java @@ -3,10 +3,10 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library; -import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; import java.util.Set; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 63f59df8..f5701d0f 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -29,8 +29,6 @@ import plugins.Library.index.ProtoIndex; import plugins.Library.index.ProtoIndexComponentSerialiser; import plugins.Library.index.ProtoIndexSerialiser; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermPageEntry; import plugins.Library.search.Search; import plugins.Library.ui.WebInterface; @@ -46,6 +44,9 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; +import freenet.library.index.TermPageEntry; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; @@ -71,7 +72,6 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.security.MessageDigest; -import plugins.Library.index.TermEntryReaderWriter; import plugins.Library.index.xml.LibrarianHandler; /** diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index bd10d4fd..2ef82b2b 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -25,10 +25,10 @@ import plugins.Library.index.ProtoIndex; import plugins.Library.index.ProtoIndexComponentSerialiser; import plugins.Library.index.ProtoIndexSerialiser; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermEntryReaderWriter; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; diff --git a/src/plugins/Library/VirtualIndex.java b/src/plugins/Library/VirtualIndex.java index 9648fef4..5c2fa1ff 100644 --- a/src/plugins/Library/VirtualIndex.java +++ b/src/plugins/Library/VirtualIndex.java @@ -3,8 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library; +import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; -import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; /** diff --git a/src/plugins/Library/WriteableIndex.java b/src/plugins/Library/WriteableIndex.java index 158656f1..de3e7f48 100644 --- a/src/plugins/Library/WriteableIndex.java +++ b/src/plugins/Library/WriteableIndex.java @@ -3,10 +3,10 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library; -import plugins.Library.index.TermEntry; import plugins.Library.index.URIEntry; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; import java.util.Collection; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 5ada5104..66a8ffe9 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -6,6 +6,8 @@ import plugins.Library.Index; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermPageEntry; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.util.DataNotLoadedException; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index fa141910..99be285b 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -5,10 +5,11 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; +import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.IterableSerialiser; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index 5e68b91e..d65dbcb1 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -5,10 +5,11 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import plugins.Library.io.YamlReaderWriter; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; +import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.LiveArchiver; diff --git a/src/plugins/Library/index/xml/FindRequest.java b/src/plugins/Library/index/xml/FindRequest.java index 563946ce..a9b13dc4 100644 --- a/src/plugins/Library/index/xml/FindRequest.java +++ b/src/plugins/Library/index/xml/FindRequest.java @@ -10,6 +10,8 @@ import freenet.client.events.SendingToNetworkEvent; import freenet.client.events.SplitfileCompatibilityModeEvent; import freenet.client.events.SplitfileProgressEvent; +import freenet.library.index.TermEntry; +import freenet.library.index.TermPageEntry; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.ChainedProgress; import freenet.library.util.exec.Execution; @@ -22,8 +24,6 @@ import java.util.ArrayList; import java.util.Set; -import plugins.Library.index.TermPageEntry; -import plugins.Library.index.TermEntry; /** diff --git a/src/plugins/Library/index/xml/LibrarianHandler.java b/src/plugins/Library/index/xml/LibrarianHandler.java index 33fd6ca6..1ad680ec 100644 --- a/src/plugins/Library/index/xml/LibrarianHandler.java +++ b/src/plugins/Library/index/xml/LibrarianHandler.java @@ -3,10 +3,10 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index.xml; -import plugins.Library.index.TermPageEntry; import freenet.support.Logger; import freenet.keys.FreenetURI; +import freenet.library.index.TermPageEntry; import org.xml.sax.Attributes; import org.xml.sax.Locator; diff --git a/src/plugins/Library/index/xml/XMLIndex.java b/src/plugins/Library/index/xml/XMLIndex.java index a31f8fb1..2b5e1acf 100644 --- a/src/plugins/Library/index/xml/XMLIndex.java +++ b/src/plugins/Library/index/xml/XMLIndex.java @@ -5,8 +5,6 @@ import plugins.Library.Library; import plugins.Library.Index; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermPageEntry; import plugins.Library.index.URIEntry; import plugins.Library.search.InvalidSearchException; @@ -26,6 +24,8 @@ import freenet.node.RequestStarter; import freenet.node.RequestClient; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermPageEntry; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/search/ResultSet.java b/src/plugins/Library/search/ResultSet.java index 2a2916f0..ddd0a176 100644 --- a/src/plugins/Library/search/ResultSet.java +++ b/src/plugins/Library/search/ResultSet.java @@ -3,11 +3,11 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.search; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermIndexEntry; -import plugins.Library.index.TermTermEntry; -import plugins.Library.index.TermPageEntry; +import freenet.library.index.TermEntry; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermPageEntry; +import freenet.library.index.TermTermEntry; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; import freenet.support.Logger; diff --git a/src/plugins/Library/search/Search.java b/src/plugins/Library/search/Search.java index 9edd9c51..6928db2b 100644 --- a/src/plugins/Library/search/Search.java +++ b/src/plugins/Library/search/Search.java @@ -15,9 +15,9 @@ import java.util.regex.Matcher; import plugins.Library.Library; -import plugins.Library.index.TermEntry; import plugins.Library.search.ResultSet.ResultOperation; import plugins.Library.ui.ResultNodeGenerator; +import freenet.library.index.TermEntry; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.CompositeProgress; import freenet.library.util.exec.Execution; diff --git a/src/plugins/Library/search/inter/Interdex.java b/src/plugins/Library/search/inter/Interdex.java index 86226621..63585655 100644 --- a/src/plugins/Library/search/inter/Interdex.java +++ b/src/plugins/Library/search/inter/Interdex.java @@ -3,12 +3,12 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.search.inter; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermIndexEntry; -import plugins.Library.index.TermTermEntry; import plugins.Library.Index; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermTermEntry; import java.util.Set; import java.util.Map; diff --git a/src/plugins/Library/ui/RelevanceComparator.java b/src/plugins/Library/ui/RelevanceComparator.java index 84ab9fc0..5f923e2b 100644 --- a/src/plugins/Library/ui/RelevanceComparator.java +++ b/src/plugins/Library/ui/RelevanceComparator.java @@ -4,8 +4,8 @@ package plugins.Library.ui; +import freenet.library.index.TermEntry; import freenet.library.util.IdentityComparator; -import plugins.Library.index.TermEntry; /** * Compares the relevance of two TermEntrys, extends IdentityComparator so that two unique entries will not return a comparison of 0 diff --git a/src/plugins/Library/ui/ResultNodeGenerator.java b/src/plugins/Library/ui/ResultNodeGenerator.java index 865d39dc..7acc8895 100644 --- a/src/plugins/Library/ui/ResultNodeGenerator.java +++ b/src/plugins/Library/ui/ResultNodeGenerator.java @@ -3,12 +3,12 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.ui; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermIndexEntry; -import plugins.Library.index.TermPageEntry; -import plugins.Library.index.TermTermEntry; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermPageEntry; +import freenet.library.index.TermTermEntry; import freenet.support.HTMLNode; import freenet.support.Logger; diff --git a/src/plugins/Library/ui/TermPageGroupEntry.java b/src/plugins/Library/ui/TermPageGroupEntry.java index fbc87ba7..a4fa5204 100644 --- a/src/plugins/Library/ui/TermPageGroupEntry.java +++ b/src/plugins/Library/ui/TermPageGroupEntry.java @@ -9,8 +9,9 @@ import java.util.TreeMap; import java.util.TreeSet; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermPageEntry; +import freenet.library.index.TermEntry; +import freenet.library.index.TermPageEntry; + /** * TODO make this fit the TermEntry contract diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 18f633bf..3de0781c 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -11,6 +11,8 @@ import plugins.Library.*; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index baf7bbb4..fc7b62b3 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -11,6 +11,7 @@ import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; diff --git a/test/plugins/Library/util/Generators.java b/test/plugins/Library/util/Generators.java index 0b27817d..d1bc5d84 100644 --- a/test/plugins/Library/util/Generators.java +++ b/test/plugins/Library/util/Generators.java @@ -8,6 +8,7 @@ import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.index.TermPageEntry; import java.util.UUID; import java.util.Random; From 76954de35baa41180ba544fc9a4c4ae69db5df22 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 2 Jan 2015 20:30:30 +0000 Subject: [PATCH 018/180] Re-enabled compilation using ant. --HG-- branch : eclipse-separation --- .hgignore | 2 + build.xml | 142 +++++++++++------- .../freenet/library/index/TermPageEntry.java | 8 +- test/plugins/Library/Tester.java | 2 - test/plugins/Library/index/BIndexTest.java | 1 - 5 files changed, 91 insertions(+), 64 deletions(-) diff --git a/.hgignore b/.hgignore index 8905237c..82422341 100644 --- a/.hgignore +++ b/.hgignore @@ -1,3 +1,5 @@ shared/bin build lib/snakeyaml-1.5.jar +dist/Library.jar +shared/TermEntryTest/test.yml diff --git a/build.xml b/build.xml index f555e14f..04c40bb0 100644 --- a/build.xml +++ b/build.xml @@ -6,9 +6,19 @@ - - - + + + + + + + + + + + + + @@ -48,8 +58,9 @@ - - + + + @@ -83,25 +94,6 @@ - - @@ -117,13 +109,34 @@ - + + + + + + + + + + + + + @@ -131,7 +144,7 @@ - + @@ -145,69 +158,83 @@ - - + + + + + + - + - + + - - - - - - - - - + + - - - - - - - - - - - + + + - + + - - + + + + + + + + + + + + + + + + + + + + + - + + - + @@ -218,7 +245,8 @@ - + + @@ -247,8 +275,6 @@ - - diff --git a/shared/src/freenet/library/index/TermPageEntry.java b/shared/src/freenet/library/index/TermPageEntry.java index 323f559d..e8e364c2 100644 --- a/shared/src/freenet/library/index/TermPageEntry.java +++ b/shared/src/freenet/library/index/TermPageEntry.java @@ -25,9 +25,11 @@ public class TermPageEntry extends TermEntry { final public String page; /** - * Positions where the term occurs. May be null if we don't have that data. + * Positions where the term occurs. May be null if we don't have that data. + * + * Retrieved from the YamlReaderWriter so must be public. */ - final private SortedSet positions; + final public SortedSet positions; /** ** Map from positions in the text to a fragment of text around where it occurs. @@ -84,7 +86,7 @@ public TermPageEntry(String s, float r, String u, String t, Map /** ** For serialisation. */ - public TermPageEntry(String s, float r, String u, String t, Set pos, Map frags) { + public TermPageEntry(String s, float r, String u, String t, SortedSet pos, Map frags) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 3de0781c..59b4a30e 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -5,8 +5,6 @@ import plugins.Library.client.*; import plugins.Library.index.*; -import plugins.Library.io.*; -import plugins.Library.io.serial.*; import plugins.Library.util.*; import plugins.Library.*; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index fc7b62b3..d63bb2d3 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -7,7 +7,6 @@ import static plugins.Library.util.Generators.rand; import plugins.Library.util.*; -import plugins.Library.io.serial.*; import plugins.Library.index.*; import freenet.keys.FreenetURI; From 4cb7b8e2ebf29292817555a6e6fa10e1d6362af5 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 03:37:10 +0000 Subject: [PATCH 019/180] Fixes of first problem found when running on old index. --HG-- branch : eclipse-separation --- src/plugins/Library/client/FreenetArchiver.java | 11 ++++++++++- .../Library/index/ProtoIndexComponentSerialiser.java | 6 ++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 49d06e68..710d5663 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -7,6 +7,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashSet; @@ -147,7 +148,15 @@ public FreenetArchiver(NodeC byte[] initialMetadata; String cacheKey; - if(task.meta instanceof FreenetURI) { + if(task.meta instanceof String) { + try { + task.meta = new FreenetURI((String) task.meta); + } catch (MalformedURLException e) { + // This wasn't an URI after all. + } + } + + if (task.meta instanceof FreenetURI) { u = (FreenetURI) task.meta; initialMetadata = null; cacheKey = u.toString(false, true); diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 99be285b..06c042ea 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -381,8 +381,14 @@ public BTreeNodeSerialiser(String n, LiveArchiver, SimplePro ghost.setMeta(serialisable.meta); task.data = trans.rev(serialisable.data); p.exitingSerialiser(); } catch (RuntimeException e) { + System.out.println("RuntimeException"); + System.out.println(e); + e.printStackTrace(); p.abort(new TaskAbortException("Could not pull B-tree node", e)); } catch (DataFormatException e) { + System.out.println("DataFormatException"); + System.out.println(e); + e.printStackTrace(); p.abort(new TaskAbortException("Could not pull B-tree node", e)); } } From 147c28e6e41895a2ac8ff3206d85b30f3acdf69f Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 03:43:44 +0000 Subject: [PATCH 020/180] Refactored to simplify code and include object reading time in the logged time. --HG-- branch : eclipse-separation --- .../Library/client/FreenetArchiver.java | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 710d5663..06fa0736 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -202,23 +202,24 @@ public FreenetArchiver(NodeC FetchResult res; // bookkeeping. detects bugs in the SplitfileProgressEvent handler + ProgressParts prog_old = null; if (progress != null) { - ProgressParts prog_old = progress.getParts(); - if(initialMetadata != null) - res = hlsc.fetchFromMetadata(new SimpleReadOnlyArrayBucket(initialMetadata)); - else - res = hlsc.fetch(u); - ProgressParts prog_new = progress.getParts(); + prog_old = progress.getParts(); + } + + if(initialMetadata != null) + res = hlsc.fetchFromMetadata(new SimpleReadOnlyArrayBucket(initialMetadata)); + else + res = hlsc.fetch(u); + + ProgressParts prog_new; + if (progress != null) { + prog_new = progress.getParts(); if (prog_old.known - prog_old.done != prog_new.known - prog_new.done) { Logger.error(this, "Inconsistency when tracking split file progress (pulling): "+prog_old.known+" of "+prog_old.done+" -> "+prog_new.known+" of "+prog_new.done); System.err.println("Inconsistency when tracking split file progress (pulling): "+prog_old.known+" of "+prog_old.done+" -> "+prog_new.known+" of "+prog_new.done); } progress.addPartKnown(0, true); - } else { - if(initialMetadata != null) - res = hlsc.fetchFromMetadata(new SimpleReadOnlyArrayBucket(initialMetadata)); - else - res = hlsc.fetch(u); } tempB = res.asBucket(); @@ -229,11 +230,11 @@ public FreenetArchiver(NodeC progress.addPartDone(); } } - long endTime = System.currentTimeMillis(); - System.out.println("Fetched block for FreenetArchiver in "+(endTime-startTime)+"ms."); is = tempB.getInputStream(); task.data = (T)reader.readObject(is); is.close(); + long endTime = System.currentTimeMillis(); + System.out.println("Fetched block for FreenetArchiver in "+(endTime-startTime)+"ms."); } catch (FetchException e) { if(e.mode == FetchExceptionMode.PERMANENT_REDIRECT && e.newURI != null) { From 83f2efd61c414737defc248995b4d6fa4b194291 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 12:25:50 +0000 Subject: [PATCH 021/180] Added more stack traces in the log to be able to spot complex problems. --HG-- branch : eclipse-separation --- src/plugins/Library/client/FreenetArchiver.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 06fa0736..8fe17415 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -241,12 +241,18 @@ public FreenetArchiver(NodeC u = e.newURI; continue; } + System.out.println("FetchException:"); + e.printStackTrace(); throw new TaskAbortException("Failed to fetch content", e, true); } catch (IOException e) { + System.out.println("IOException:"); + e.printStackTrace(); throw new TaskAbortException("Failed to read content from local tempbucket", e, true); } catch (RuntimeException e) { + System.out.println("RuntimeException:"); + e.printStackTrace(); throw new TaskAbortException("Failed to complete task: ", e); } From e035a8e64edaebb94b45ca17c95c0d9d8bc8d872 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 12:26:35 +0000 Subject: [PATCH 022/180] Reverted the type changes in the TermPageEntry to allow old indexes to be read. --HG-- branch : eclipse-separation --- shared/src/freenet/library/index/TermPageEntry.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/shared/src/freenet/library/index/TermPageEntry.java b/shared/src/freenet/library/index/TermPageEntry.java index e8e364c2..a090aaa1 100644 --- a/shared/src/freenet/library/index/TermPageEntry.java +++ b/shared/src/freenet/library/index/TermPageEntry.java @@ -4,6 +4,7 @@ package freenet.library.index; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -27,9 +28,10 @@ public class TermPageEntry extends TermEntry { /** * Positions where the term occurs. May be null if we don't have that data. * - * Retrieved from the YamlReaderWriter so must be public. + * Retrieved from the YamlReaderWriter so must be public and + * have the Set type to be able to read the old indexes. */ - final public SortedSet positions; + final public Set positions; /** ** Map from positions in the text to a fragment of text around where it occurs. @@ -86,7 +88,7 @@ public TermPageEntry(String s, float r, String u, String t, Map /** ** For serialisation. */ - public TermPageEntry(String s, float r, String u, String t, SortedSet pos, Map frags) { + public TermPageEntry(String s, float r, String u, String t, Set pos, Map frags) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); @@ -172,6 +174,7 @@ public boolean hasPosition(int i) { public ArrayList positions() { Integer[] array = positions.toArray(new Integer[positions.size()]); + Arrays.sort(array); ArrayList ret = new ArrayList(positions.size()); for(int i=0;i Date: Sat, 3 Jan 2015 17:35:02 +0000 Subject: [PATCH 023/180] Some eclipse settings for uploader. --HG-- branch : eclipse-separation --- .hgignore | 3 ++- uploader/.classpath | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.hgignore b/.hgignore index 82422341..91bc4210 100644 --- a/.hgignore +++ b/.hgignore @@ -1,5 +1,6 @@ -shared/bin build +shared/bin +uploader/bin lib/snakeyaml-1.5.jar dist/Library.jar shared/TermEntryTest/test.yml diff --git a/uploader/.classpath b/uploader/.classpath index 62f50c7b..19dc2af3 100644 --- a/uploader/.classpath +++ b/uploader/.classpath @@ -4,5 +4,6 @@ + From 64610ff7a154f6f0352bea454225ca39ab95acae Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 17:57:20 +0000 Subject: [PATCH 024/180] Some small simple refactorings to prepare moving ProtoIndex to shared part. --HG-- branch : eclipse-separation --- src/plugins/Library/Index.java | 4 -- src/plugins/Library/VirtualIndex.java | 20 --------- src/plugins/Library/WriteableIndex.java | 32 -------------- src/plugins/Library/index/ProtoIndex.java | 43 +++++++------------ .../index/ProtoIndexComponentSerialiser.java | 21 +++------ .../Library/index/ProtoIndexSerialiser.java | 21 +++------ src/plugins/Library/index/URIEntry.java | 12 +++--- 7 files changed, 32 insertions(+), 121 deletions(-) delete mode 100644 src/plugins/Library/VirtualIndex.java delete mode 100644 src/plugins/Library/WriteableIndex.java diff --git a/src/plugins/Library/Index.java b/src/plugins/Library/Index.java index 5e66a515..53f481a8 100644 --- a/src/plugins/Library/Index.java +++ b/src/plugins/Library/Index.java @@ -5,7 +5,6 @@ import plugins.Library.index.URIEntry; -import freenet.keys.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; @@ -24,7 +23,4 @@ public interface Index { ** DOCUMENT */ public Execution> getTermEntries(String term); - - public Execution getURIEntry(FreenetURI uri); - } diff --git a/src/plugins/Library/VirtualIndex.java b/src/plugins/Library/VirtualIndex.java deleted file mode 100644 index 5c2fa1ff..00000000 --- a/src/plugins/Library/VirtualIndex.java +++ /dev/null @@ -1,20 +0,0 @@ -/* This code is part of Freenet. It is distributed under the GNU General - * Public License, version 2 (or at your option any later version). See - * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library; - -import freenet.library.index.TermEntry; -import freenet.library.util.exec.Execution; -import plugins.Library.index.URIEntry; - -/** -** Represents a virtual index that gets its data from another plugin. -** -** @author infinity0 -*/ -public interface VirtualIndex extends Index { - - - - -} diff --git a/src/plugins/Library/WriteableIndex.java b/src/plugins/Library/WriteableIndex.java deleted file mode 100644 index de3e7f48..00000000 --- a/src/plugins/Library/WriteableIndex.java +++ /dev/null @@ -1,32 +0,0 @@ -/* This code is part of Freenet. It is distributed under the GNU General - * Public License, version 2 (or at your option any later version). See - * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library; - -import plugins.Library.index.URIEntry; - -import freenet.keys.FreenetURI; -import freenet.library.index.TermEntry; -import freenet.library.util.exec.Execution; - -import java.util.Collection; -import java.util.Set; - -/** -** Represents a writable Index. -** -** TODO -** -** @author infinity0 -*/ -public interface WriteableIndex extends Index { - - public Execution putTermEntries(Collection entries); - - public Execution remTermEntries(Collection entries); - - public Execution putURIEntries(Collection entries); - - public Execution remURIEntries(Collection entries); - -} diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 66a8ffe9..7542ac93 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -3,8 +3,17 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.Index; +import java.util.AbstractSet; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executor; +import plugins.Library.Index; import freenet.keys.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; @@ -14,7 +23,6 @@ import freenet.library.util.Skeleton; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; -import freenet.library.util.SkeletonTreeMap; import freenet.library.util.concurrent.Executors; import freenet.library.util.exec.AbstractExecution; import freenet.library.util.exec.ChainedProgress; @@ -22,21 +30,6 @@ import freenet.library.util.exec.Progress; import freenet.library.util.exec.ProgressParts; import freenet.library.util.exec.TaskAbortException; -import freenet.support.Logger; - -import java.util.AbstractSet; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.Set; -import java.util.Map; -import java.util.SortedSet; -import java.util.HashMap; -import java.util.TreeSet; -import java.util.LinkedHashMap; -import java.util.Date; - -import java.util.concurrent.Executor; /** ** Prototype B-tree based index. DOCUMENT @@ -160,14 +153,6 @@ public Execution> getTermEntries(String term) { return request; } - - - - public Execution getURIEntry(FreenetURI uri) { - throw new UnsupportedOperationException("not implemented"); - } - - public class getTermEntriesHandler extends AbstractExecution> implements Runnable, ChainedProgress { // TODO NORM have a Runnable field instead of extending Runnable // basically, redesign this entire class and series of classes @@ -234,10 +219,12 @@ protected getTermEntriesHandler(String t) { long specific = root.size(); // Number of pages in this entry multiplier = Math.log(((double)total) / ((double)specific)); if(multiplier < 0) { - Logger.error(this, "Negative multiplier!: "+multiplier+" total = "+total+" specific = "+root.size()); + // Logger.error(this, "Negative multiplier!: "+multiplier+" total = "+total+" specific = "+root.size()); + System.out.println("Negative multiplier!: "+multiplier+" total = "+total+" specific = "+root.size()); multiplier = 1.0; - } else - Logger.normal(this, "Correcting results: "+multiplier); + } else { + // Logger.normal(this, "Correcting results: "+multiplier); + } } Set entries = wrapper(root, multiplier); diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 06c042ea..9ccf6a6f 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -3,9 +3,13 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.TreeMap; + import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; - import freenet.keys.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; @@ -16,12 +20,11 @@ import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.MapSerialiser; import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.Scale; import freenet.library.io.serial.ParallelSerialiser; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; -import freenet.library.io.serial.Packer.Scale; -import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.SkeletonTreeMap; @@ -33,18 +36,6 @@ import freenet.library.util.exec.TaskInProgressException; import freenet.node.RequestStarter; -import java.io.File; -import java.util.Collection; -import java.util.Set; -import java.util.Map; -import java.util.SortedMap; -import java.util.SortedSet; -import java.util.LinkedHashMap; -import java.util.HashMap; -import java.util.TreeSet; -import java.util.TreeMap; -import java.util.Date; - /** ** Serialiser for the components of a ProtoIndex. ** diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index d65dbcb1..c9a9bdb0 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -3,9 +3,13 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; +import java.io.File; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.Map; + import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; - import freenet.keys.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; @@ -15,24 +19,11 @@ import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; -import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; -import java.util.Collection; -import java.util.Set; -import java.util.Map; -import java.util.SortedMap; -import java.util.SortedSet; -import java.util.LinkedHashMap; -import java.util.HashMap; -import java.util.TreeSet; -import java.util.TreeMap; -import java.util.Date; -import java.io.File; - /** ** Serialiser for ProtoIndex ** @@ -173,7 +164,7 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) throw new IllegalArgumentException("Data structure is not bare. Try calling deflate() first."); } Map map = new LinkedHashMap(); - map.put("serialVersionUID", idx.serialVersionUID); + map.put("serialVersionUID", ProtoIndex.serialVersionUID); map.put("serialFormatUID", idx.serialFormatUID); map.put("insID", idx.insID); map.put("name", idx.name); diff --git a/src/plugins/Library/index/URIEntry.java b/src/plugins/Library/index/URIEntry.java index 554a901d..a0f7a487 100644 --- a/src/plugins/Library/index/URIEntry.java +++ b/src/plugins/Library/index/URIEntry.java @@ -7,10 +7,8 @@ import java.util.Set; import java.util.HashSet; -import freenet.keys.FreenetURI; - /** -** Data associated with a {@link FreenetURI}. DOCUMENT expand this... +** Data associated with a FreenetURI. DOCUMENT expand this... ** ** FIXME NORM code equals() ** @@ -21,7 +19,7 @@ public class URIEntry { /** ** Subject URI of this entry. */ - protected FreenetURI subject; + protected String subject; /** ** Quality rating. Must be in the closed interval [0,1]. @@ -39,17 +37,17 @@ public class URIEntry { */ protected Set terms; - public URIEntry(FreenetURI u) { + public URIEntry(String u) { subject = u; date_checked = new Date(); terms = new HashSet(); } - public FreenetURI getSubject() { + public String getSubject() { return subject; } - public void setSubject(FreenetURI u) { + public void setSubject(String u) { subject = u; } From bd7f64a8c65e3cdc785df68357af3430fceeabed Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 18:39:29 +0000 Subject: [PATCH 025/180] Changed FreenetURI to String in index/Proto*.java. --HG-- branch : eclipse-separation --- src/plugins/Library/Index.java | 2 -- src/plugins/Library/Library.java | 1 + src/plugins/Library/SpiderIndexUploader.java | 14 ++------ src/plugins/Library/index/ProtoIndex.java | 16 +++++----- .../index/ProtoIndexComponentSerialiser.java | 32 +++++++++---------- .../Library/index/ProtoIndexSerialiser.java | 20 ++++++------ src/plugins/Library/index/URIKey.java | 22 +++---------- test/plugins/Library/Tester.java | 17 +++------- test/plugins/Library/index/BIndexTest.java | 6 +--- 9 files changed, 48 insertions(+), 82 deletions(-) diff --git a/src/plugins/Library/Index.java b/src/plugins/Library/Index.java index 53f481a8..1af3a391 100644 --- a/src/plugins/Library/Index.java +++ b/src/plugins/Library/Index.java @@ -3,8 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library; -import plugins.Library.index.URIEntry; - import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 02ec7936..866fa5b1 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -548,6 +548,7 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali if(uri.isUSK()) edition = uri.getEdition(); indextype = getIndexType(uri); + indexkey = uri.toASCIIString(); } else { throw new AssertionError(); } diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 2ef82b2b..d886a77c 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -527,11 +527,7 @@ private boolean makeDiskDirSerialiser() { (LiveArchiver,SimpleProgress>)(srlDisk.getChildSerialiser()); leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); if(lastDiskIndexName == null) { - try { - idxDisk = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); - } catch (java.net.MalformedURLException e) { - throw new AssertionError(e); - } + idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); // FIXME more hacks: It's essential that we use the same FileArchiver instance here. leafsrlDisk.setSerialiserFor(idxDisk); } else { @@ -768,16 +764,12 @@ private void updateOverallMetadata(ProtoIndex diskToMerge) { * on Freenet, essentially. */ private void makeFreenetSerialisers() { if(srl == null) { - srl = ProtoIndexSerialiser.forIndex(lastUploadURI, RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS); + srl = ProtoIndexSerialiser.forIndex(lastUploadURI.toASCIIString(), RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS); LiveArchiver,SimpleProgress> archiver = (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); if(lastUploadURI == null) { - try { - idxFreenet = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); - } catch (java.net.MalformedURLException e) { - throw new AssertionError(e); - } + idxFreenet = new ProtoIndex("CHK@", "test", null, null, 0L); // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. leafsrl.setSerialiserFor(idxFreenet); } else { diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 7542ac93..41008762 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -14,7 +14,7 @@ import java.util.concurrent.Executor; import plugins.Library.Index; -import freenet.keys.FreenetURI; + import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; import freenet.library.io.serial.ProgressTracker; @@ -59,12 +59,12 @@ final public class ProtoIndex implements Index { /** ** Request ID for this index */ - protected FreenetURI reqID; + protected String reqID; /** ** Insert ID for this index */ - protected FreenetURI insID; // TODO HIGH maybe move this to WriteableProtoIndex? + protected String insID; // TODO HIGH maybe move this to WriteableProtoIndex? /** ** Name for this index. @@ -94,19 +94,19 @@ final public class ProtoIndex implements Index { final public /* DEBUG protected*/ SkeletonBTreeMap> ttab; - final protected SkeletonBTreeMap> utab; + final protected SkeletonBTreeMap> utab; - public ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages) { + public ProtoIndex(String id, String n, String owner, String ownerEmail, long pages) { this(id, n, owner, ownerEmail, pages, new Date(), new HashMap(), - new SkeletonBTreeMap>(BTREE_NODE_MIN), + new SkeletonBTreeMap>(BTREE_NODE_MIN), new SkeletonBTreeMap>(BTREE_NODE_MIN)/*, //filtab = new SkeletonPrefixTreeMap(new Token(), TKTAB_MAX)*/ ); } - protected ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages, Date m, Map x, - SkeletonBTreeMap> u, + protected ProtoIndex(String id, String n, String owner, String ownerEmail, long pages, Date m, Map x, + SkeletonBTreeMap> u, SkeletonBTreeMap> t/*, SkeletonMap f*/ ) { diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 9ccf6a6f..54be3993 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -10,7 +10,7 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import freenet.keys.FreenetURI; + import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; @@ -83,8 +83,8 @@ public class ProtoIndexComponentSerialiser { /** ** Translator for the local entries of a node of the ''uri table''. */ - final protected static Translator>, Map> - utab_keys_mtr = new TreeMapTranslator>(utab_keys_ktr); + final protected static Translator>, Map> + utab_keys_mtr = new TreeMapTranslator>(utab_keys_ktr); /** ** Serialiser for the ''targets'' of the values stored in a node of the @@ -98,7 +98,7 @@ public class ProtoIndexComponentSerialiser { ** ''B-tree'' for a ''urikey''. In this case, the values are the actual ** targets and are stored inside the node, so we use a dummy. */ - final protected static MapSerialiser uri_dummy = new DummySerialiser(); + final protected static MapSerialiser uri_dummy = new DummySerialiser(); /** ** {@link Scale} for the root node of the ''B-tree'' that holds @@ -115,9 +115,9 @@ public class ProtoIndexComponentSerialiser { ** {@link Scale} for the root node of the ''B-tree'' that holds ** ''uri-entry mappings'' for a ''urikey''. */ - final protected static Packer.Scale> - uri_data_scale = new Packer.Scale>() { - @Override public int weigh(SkeletonBTreeMap element) { + final protected static Packer.Scale> + uri_data_scale = new Packer.Scale>() { + @Override public int weigh(SkeletonBTreeMap element) { return element.sizeRoot(); } }; @@ -180,7 +180,7 @@ public LiveArchiver, SimpleProgress> getLeafSerialiser() { ** ''B-tree'' that holds ''uri-entry mappings'' for the ''urikey'' mapping ** to the value. */ - final protected BTreePacker, EntryGroupSerialiser>> + final protected BTreePacker, EntryGroupSerialiser>> utab_data; /** @@ -226,12 +226,12 @@ protected ProtoIndexComponentSerialiser(int fmtid, LiveArchiver, EntryGroupSerialiser>>( - new EntryGroupSerialiser>( + utab_data = new BTreePacker, EntryGroupSerialiser>>( + new EntryGroupSerialiser>( leaf_arx, null, - new SkeletonBTreeMap.TreeTranslator(null, null) { - @Override public SkeletonBTreeMap rev(Map tree) throws DataFormatException { + new SkeletonBTreeMap.TreeTranslator(null, null) { + @Override public SkeletonBTreeMap rev(Map tree) throws DataFormatException { return setSerialiserFor(super.rev(tree)); } } @@ -255,7 +255,7 @@ public ProtoIndex setSerialiserFor(ProtoIndex index) { index.ttab.setSerialiser(ttab_keys, ttab_data); // set serialisers on the utab - BTreeNodeSerialiser> utab_keys = new BTreeNodeSerialiser>( + BTreeNodeSerialiser> utab_keys = new BTreeNodeSerialiser>( "uri listings", leaf_arx, index.utab.makeNodeTranslator(utab_keys_ktr, utab_keys_mtr) @@ -271,11 +271,11 @@ public ProtoIndex setSerialiserFor(ProtoIndex index) { ** Set the serialiser for the ''B-tree'' that holds the ''uri-entry ** mappings'' for a ''urikey''. */ - public SkeletonBTreeMap setSerialiserFor(SkeletonBTreeMap entries) { - BTreeNodeSerialiser uri_keys = new BTreeNodeSerialiser( + public SkeletonBTreeMap setSerialiserFor(SkeletonBTreeMap entries) { + BTreeNodeSerialiser uri_keys = new BTreeNodeSerialiser( "uri entries", leaf_arx, - entries.makeNodeTranslator(null, null) // no translator needed as FreenetURI and URIEntry are both directly serialisable by YamlReaderWriter + entries.makeNodeTranslator(null, null) // no translator needed as String and URIEntry are both directly serialisable by YamlReaderWriter ); entries.setSerialiser(uri_keys, uri_dummy); return entries; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index c9a9bdb0..d8ca3a86 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -10,7 +10,7 @@ import plugins.Library.Library; import plugins.Library.client.FreenetArchiver; -import freenet.keys.FreenetURI; + import freenet.library.index.TermEntry; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; @@ -60,8 +60,8 @@ public ProtoIndexSerialiser(LiveArchiver, SimpleProgress> s) // srl_cls = new HashMap, ProtoIndexSerialiser>(); public static ProtoIndexSerialiser forIndex(Object o, short priorityClass) { - if (o instanceof FreenetURI) { - return forIndex((FreenetURI)o, priorityClass); + if (o instanceof String) { + return forIndex((String)o, priorityClass); } else if (o instanceof File) { return forIndex((File)o); } else { @@ -69,7 +69,7 @@ public static ProtoIndexSerialiser forIndex(Object o, short priorityClass) { } } - public static ProtoIndexSerialiser forIndex(FreenetURI uri, short priorityClass) { + public static ProtoIndexSerialiser forIndex(String uri, short priorityClass) { // ProtoIndexSerialiser srl = srl_cls.get(FreenetURI.class); // if (srl == null) { // // java's type-inference isn't that smart, see @@ -107,7 +107,7 @@ public static ProtoIndexSerialiser forIndex(File prefix) { PullTask> serialisable = new PullTask>(task.meta); subsrl.pull(serialisable); task.meta = serialisable.meta; - if (task.meta instanceof FreenetURI) { // if not FreenetURI, skip this silently so we can test on local files + if (task.meta instanceof String) { // if not FreenetURI, skip this silently so we can test on local files serialisable.data.put("reqID", task.meta); } try { @@ -137,9 +137,9 @@ public static class IndexTranslator /** ** URI-table translator */ - Translator>, Map> utrans = new - SkeletonBTreeMap.TreeTranslator>(null, new - ProtoIndexComponentSerialiser.TreeMapTranslator>(null)); + Translator>, Map> utrans = new + SkeletonBTreeMap.TreeTranslator>(null, new + ProtoIndexComponentSerialiser.TreeMapTranslator>(null)); private LiveArchiver, SimpleProgress> subsrl; @@ -185,7 +185,7 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) try { // FIXME yet more hacks related to the lack of proper asynchronous FreenetArchiver... ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get((Integer)map.get("serialFormatUID"), subsrl); - FreenetURI reqID = (FreenetURI)map.get("reqID"); + String reqID = (String)map.get("reqID"); String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); @@ -198,7 +198,7 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) totalPages = (Integer)o; Date modified = (Date)map.get("modified"); Map extra = (Map)map.get("extra"); - SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); + SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); SkeletonBTreeMap> ttab = ttrans.rev((Map)map.get("ttab")); return cmpsrl.setSerialiserFor(new ProtoIndex(reqID, name, ownerName, ownerEmail, totalPages, modified, extra, utab, ttab)); diff --git a/src/plugins/Library/index/URIKey.java b/src/plugins/Library/index/URIKey.java index a217f888..5af291bf 100644 --- a/src/plugins/Library/index/URIKey.java +++ b/src/plugins/Library/index/URIKey.java @@ -5,13 +5,9 @@ import freenet.library.util.BytePrefixKey; -import freenet.keys.FreenetURI; -import freenet.keys.BaseClientKey; -import freenet.keys.ClientKey; - /** ** A {@link BytePrefixKey} backed by the 32-byte routing key for the {@link -** freenet.keys.Key NodeKey} constructed from a {@link FreenetURI}. +** freenet.keys.Key NodeKey} constructed from a FreenetURI. ** ** @author infinity0 */ @@ -21,22 +17,14 @@ public URIKey() { super(0x20); } + /** + * Could be the routingkey. + * @param h + */ public URIKey(byte[] h) { super(0x20, h); } - public URIKey(FreenetURI u) throws java.net.MalformedURLException { - super(0x20, getNodeRoutingKey(u)); - } - - public static byte[] getNodeRoutingKey(FreenetURI u) throws java.net.MalformedURLException { - try { - return ((ClientKey)BaseClientKey.getBaseKey(u.isUSK()? u.sskForUSK(): u)).getNodeKey().getRoutingKey(); - } catch (ClassCastException e) { - throw new UnsupportedOperationException("Could not get the node routing key for FreenetURI " + u + ". Only CHK/SSK/USK/KSKs are supported."); - } - } - /*======================================================================== public interface BytePrefixKey ========================================================================*/ diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 59b4a30e..4a57bc1e 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -148,16 +148,12 @@ public static String testPushIndex() { if (push_index_thread == null) { push_index_start = new Date(); push_index_thread = new Thread() { - ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI, RequestStarter.INTERACTIVE_PRIORITY_CLASS); + ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), RequestStarter.INTERACTIVE_PRIORITY_CLASS); ProtoIndex idx; Random rand = new Random(); @Override public void run() { - try { - idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); - } catch (java.net.MalformedURLException e) { - throw new AssertionError(e); - } + idx = new ProtoIndex("CHK@", "test", null, null, 0); ProtoIndexComponentSerialiser.get().setSerialiserFor(idx); for (String key: push_index_words) { @@ -232,17 +228,12 @@ public static String testPushAndMergeIndex() { FreenetArchiver.setCacheDir(cacheDir); push_index_thread = new Thread() { - ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI, RequestStarter.INTERACTIVE_PRIORITY_CLASS); + ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), RequestStarter.INTERACTIVE_PRIORITY_CLASS); ProtoIndex idx; Random rand = new Random(); @Override public void run() { - - try { - idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); - } catch (java.net.MalformedURLException e) { - throw new AssertionError(e); - } + idx = new ProtoIndex("CHK@", "test", null, null, 0); ProtoIndexComponentSerialiser.get().setSerialiserFor(idx); try { diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index d63bb2d3..b50cfd15 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -87,11 +87,7 @@ public BIndexTest() { )); protected void newTestSkeleton() { - try { - idx = new ProtoIndex(new FreenetURI("CHK@yeah"), "test", null, null, 0); - } catch (java.net.MalformedURLException e) { - assertTrue(false); - } + idx = new ProtoIndex("CHK@yeah", "test", null, null, 0); csrl.setSerialiserFor(idx); timeDiff(); } From 8c62513a86087604621cdc8920f1e3566866cdf4 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 19:02:41 +0000 Subject: [PATCH 026/180] Moved Index.java interface and URIEntry, URIKey to the shared. --HG-- branch : eclipse-separation --- .../Library => shared/src/freenet/library/index}/Index.java | 3 +-- .../src/freenet/library}/index/URIEntry.java | 2 +- .../Library => shared/src/freenet/library}/index/URIKey.java | 2 +- src/plugins/Library/Library.java | 1 + src/plugins/Library/index/ProtoIndex.java | 5 ++++- src/plugins/Library/index/ProtoIndexComponentSerialiser.java | 2 ++ src/plugins/Library/index/ProtoIndexSerialiser.java | 2 ++ src/plugins/Library/index/xml/XMLIndex.java | 4 ++-- src/plugins/Library/search/inter/IndexQuery.java | 3 ++- src/plugins/Library/search/inter/Interdex.java | 2 +- 10 files changed, 17 insertions(+), 9 deletions(-) rename {src/plugins/Library => shared/src/freenet/library/index}/Index.java (88%) rename {src/plugins/Library => shared/src/freenet/library}/index/URIEntry.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/index/URIKey.java (96%) diff --git a/src/plugins/Library/Index.java b/shared/src/freenet/library/index/Index.java similarity index 88% rename from src/plugins/Library/Index.java rename to shared/src/freenet/library/index/Index.java index 1af3a391..7b211adf 100644 --- a/src/plugins/Library/Index.java +++ b/shared/src/freenet/library/index/Index.java @@ -1,9 +1,8 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library; +package freenet.library.index; -import freenet.library.index.TermEntry; import freenet.library.util.exec.Execution; import java.util.Set; diff --git a/src/plugins/Library/index/URIEntry.java b/shared/src/freenet/library/index/URIEntry.java similarity index 97% rename from src/plugins/Library/index/URIEntry.java rename to shared/src/freenet/library/index/URIEntry.java index a0f7a487..abd87f07 100644 --- a/src/plugins/Library/index/URIEntry.java +++ b/shared/src/freenet/library/index/URIEntry.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import java.util.Date; import java.util.Set; diff --git a/src/plugins/Library/index/URIKey.java b/shared/src/freenet/library/index/URIKey.java similarity index 96% rename from src/plugins/Library/index/URIKey.java rename to shared/src/freenet/library/index/URIKey.java index 5af291bf..be8ad2ca 100644 --- a/src/plugins/Library/index/URIKey.java +++ b/shared/src/freenet/library/index/URIKey.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import freenet.library.util.BytePrefixKey; diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 866fa5b1..2c589961 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -42,6 +42,7 @@ import freenet.client.events.ExpectedMIMEEvent; import freenet.keys.FreenetURI; import freenet.keys.USK; +import freenet.library.index.Index; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.Serialiser.PullTask; diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 41008762..8cd4cc79 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -13,10 +13,13 @@ import java.util.Set; import java.util.concurrent.Executor; -import plugins.Library.Index; + +import freenet.library.index.Index; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; +import freenet.library.index.URIEntry; +import freenet.library.index.URIKey; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.util.DataNotLoadedException; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 54be3993..81837cb1 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -12,6 +12,8 @@ import plugins.Library.client.FreenetArchiver; import freenet.library.index.TermEntry; +import freenet.library.index.URIEntry; +import freenet.library.index.URIKey; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index d8ca3a86..b8be863d 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -12,6 +12,8 @@ import plugins.Library.client.FreenetArchiver; import freenet.library.index.TermEntry; +import freenet.library.index.URIEntry; +import freenet.library.index.URIKey; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; diff --git a/src/plugins/Library/index/xml/XMLIndex.java b/src/plugins/Library/index/xml/XMLIndex.java index 2b5e1acf..fe59788d 100644 --- a/src/plugins/Library/index/xml/XMLIndex.java +++ b/src/plugins/Library/index/xml/XMLIndex.java @@ -4,8 +4,6 @@ package plugins.Library.index.xml; import plugins.Library.Library; -import plugins.Library.Index; -import plugins.Library.index.URIEntry; import plugins.Library.search.InvalidSearchException; import freenet.support.Fields; @@ -24,8 +22,10 @@ import freenet.node.RequestStarter; import freenet.node.RequestClient; import freenet.keys.FreenetURI; +import freenet.library.index.Index; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; +import freenet.library.index.URIEntry; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; diff --git a/src/plugins/Library/search/inter/IndexQuery.java b/src/plugins/Library/search/inter/IndexQuery.java index 08c819ec..cb23d21a 100644 --- a/src/plugins/Library/search/inter/IndexQuery.java +++ b/src/plugins/Library/search/inter/IndexQuery.java @@ -3,12 +3,13 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.search.inter; -import plugins.Library.Index; import java.util.Collections; import java.util.Set; import java.util.HashSet; +import freenet.library.index.Index; + /** ** DOCUMENT ** diff --git a/src/plugins/Library/search/inter/Interdex.java b/src/plugins/Library/search/inter/Interdex.java index 63585655..89ad97fa 100644 --- a/src/plugins/Library/search/inter/Interdex.java +++ b/src/plugins/Library/search/inter/Interdex.java @@ -3,9 +3,9 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.search.inter; -import plugins.Library.Index; import freenet.keys.FreenetURI; +import freenet.library.index.Index; import freenet.library.index.TermEntry; import freenet.library.index.TermIndexEntry; import freenet.library.index.TermTermEntry; From ab72b941a72b0c5f7c3db7d2e36aa46cb629c128 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 23:11:35 +0000 Subject: [PATCH 027/180] Created a register for the factory for LiveArchiver. Moved Priority. --HG-- branch : eclipse-separation --- .../src/freenet/library/ArchiverFactory.java | 13 ++++++ .../src/freenet/library/FactoryRegister.java | 14 +++++++ shared/src/freenet/library/Priority.java | 6 +++ src/plugins/Library/Library.java | 42 ++++++++++++++++--- src/plugins/Library/SpiderIndexUploader.java | 3 +- .../Library/client/FreenetArchiver.java | 2 +- src/plugins/Library/index/ProtoIndex.java | 2 - .../index/ProtoIndexComponentSerialiser.java | 14 +++---- .../Library/index/ProtoIndexSerialiser.java | 13 +++--- test/plugins/Library/Tester.java | 8 ++-- 10 files changed, 89 insertions(+), 28 deletions(-) create mode 100644 shared/src/freenet/library/ArchiverFactory.java create mode 100644 shared/src/freenet/library/FactoryRegister.java create mode 100644 shared/src/freenet/library/Priority.java diff --git a/shared/src/freenet/library/ArchiverFactory.java b/shared/src/freenet/library/ArchiverFactory.java new file mode 100644 index 00000000..952d7fdc --- /dev/null +++ b/shared/src/freenet/library/ArchiverFactory.java @@ -0,0 +1,13 @@ +package freenet.library; + +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.util.exec.SimpleProgress; + +public interface ArchiverFactory { + LiveArchiver + newArchiver(S rw, String mime, int size, Priority priorityLevel); + LiveArchiver + newArchiver(S rw, String mime, int size, LiveArchiver archiver); +} diff --git a/shared/src/freenet/library/FactoryRegister.java b/shared/src/freenet/library/FactoryRegister.java new file mode 100644 index 00000000..fc6c4ab4 --- /dev/null +++ b/shared/src/freenet/library/FactoryRegister.java @@ -0,0 +1,14 @@ +package freenet.library; + +public class FactoryRegister { + private static ArchiverFactory archiver = null; + + public static void register(ArchiverFactory factory) { + archiver = factory; + } + + public static ArchiverFactory getArchiverFactory() { + assert archiver != null; + return archiver; + } +} diff --git a/shared/src/freenet/library/Priority.java b/shared/src/freenet/library/Priority.java new file mode 100644 index 00000000..b1d2809f --- /dev/null +++ b/shared/src/freenet/library/Priority.java @@ -0,0 +1,6 @@ +package freenet.library; + +public enum Priority { + Interactive, + Bulk; +} diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 2c589961..3ed16155 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -42,10 +42,15 @@ import freenet.client.events.ExpectedMIMEEvent; import freenet.keys.FreenetURI; import freenet.keys.USK; +import freenet.library.ArchiverFactory; +import freenet.library.FactoryRegister; import freenet.library.index.Index; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; +import freenet.library.Priority; +import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; import freenet.node.RequestClient; @@ -61,7 +66,7 @@ * Library class is the api for others to use search facilities, it is used by the interfaces * @author MikeB */ -final public class Library implements URLUpdateHook { +final public class Library implements URLUpdateHook, ArchiverFactory { public static final String BOOKMARK_PREFIX = "bookmark:"; public static final String DEFAULT_INDEX_SITE = BOOKMARK_PREFIX + "liberty-of-information" + " " + BOOKMARK_PREFIX + "free-market-free-people" + " " + @@ -122,8 +127,9 @@ public boolean realTimeFlag() { * Method to setup Library class so it has access to PluginRespirator, and load bookmarks * TODO pull bookmarks from disk */ - private Library(PluginRespirator pr){ + private Library(PluginRespirator pr) { this.pr = pr; + FactoryRegister.register(this); PluginStore ps; if(pr!=null) { this.exec = pr.getNode().executor; @@ -557,7 +563,7 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali if (indextype == ProtoIndex.class) { // TODO HIGH this *must* be non-blocking as it fetches the whole index root PullTask task = new PullTask(indexkey); - ProtoIndexSerialiser.forIndex(indexkey, RequestStarter.INTERACTIVE_PRIORITY_CLASS).pull(task); + ProtoIndexSerialiser.forIndex(indexkey, Priority.Interactive).pull(task); index = task.data; } else if (indextype == XMLIndex.class) { @@ -588,7 +594,7 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali ** @throws IllegalStateException if the singleton has not been initialised ** or if it does not have a respirator. */ - public static FreenetArchiver + public static LiveArchiver makeArchiver(ObjectStreamReader r, ObjectStreamWriter w, String mime, int size, short priorityClass) { if (lib == null || lib.pr == null) { throw new IllegalStateException("Cannot archive to freenet without a fully live Library plugin connected to a freenet node."); @@ -596,7 +602,7 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali return new FreenetArchiver(lib.pr.getNode().clientCore, r, w, mime, size, priorityClass); } } - + /** ** Create a {@link FreenetArchiver} connected to the core of the ** singleton's {@link PluginRespirator}. @@ -604,11 +610,35 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali ** @throws IllegalStateException if the singleton has not been initialised ** or if it does not have a respirator. */ - public static FreenetArchiver + public static LiveArchiver makeArchiver(S rw, String mime, int size, short priorityClass) { return Library.makeArchiver(rw, rw, mime, size, priorityClass); } + public LiveArchiver + newArchiver(S rw, String mime, int size, Priority priorityLevel) { + short priorityClass = 0; + switch (priorityLevel) { + case Interactive: + priorityClass = RequestStarter.INTERACTIVE_PRIORITY_CLASS; + break; + case Bulk: + priorityClass = RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS; + break; + } + return makeArchiver(rw, mime, size, priorityClass); + } + + public LiveArchiver + newArchiver(S rw, String mime, int size, LiveArchiver archiver) { + short priorityClass = RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS; + if (archiver != null && + archiver instanceof FreenetArchiver) + priorityClass = ((FreenetArchiver) archiver).priorityClass; + + return makeArchiver(rw, mime, size, priorityClass); + } + public static String convertToHex(byte[] data) { StringBuilder buf = new StringBuilder(); for (int i = 0; i < data.length; i++) { diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index d886a77c..cc719c6f 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -27,6 +27,7 @@ import plugins.Library.index.ProtoIndexSerialiser; import freenet.client.InsertException; import freenet.keys.FreenetURI; +import freenet.library.Priority; import freenet.library.index.TermEntry; import freenet.library.index.TermEntryReaderWriter; import freenet.library.io.serial.LiveArchiver; @@ -764,7 +765,7 @@ private void updateOverallMetadata(ProtoIndex diskToMerge) { * on Freenet, essentially. */ private void makeFreenetSerialisers() { if(srl == null) { - srl = ProtoIndexSerialiser.forIndex(lastUploadURI.toASCIIString(), RequestStarter.BULK_SPLITFILE_PRIORITY_CLASS); + srl = ProtoIndexSerialiser.forIndex(lastUploadURI.toASCIIString(), Priority.Bulk); LiveArchiver,SimpleProgress> archiver = (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 8fe17415..91124f43 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -124,7 +124,7 @@ public FreenetArchiver(NodeClientCore c, ObjectStreamReader r, ObjectStreamWrite public FreenetArchiver(NodeClientCore c, S rw, String mime, int size, short priority) { this(c, rw, rw, mime, size, priority); } - + /** ** {@inheritDoc} ** diff --git a/src/plugins/Library/index/ProtoIndex.java b/src/plugins/Library/index/ProtoIndex.java index 8cd4cc79..d08cbb14 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/src/plugins/Library/index/ProtoIndex.java @@ -13,8 +13,6 @@ import java.util.Set; import java.util.concurrent.Executor; - - import freenet.library.index.Index; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java index 81837cb1..8abc58c7 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexComponentSerialiser.java @@ -8,9 +8,7 @@ import java.util.Map; import java.util.TreeMap; -import plugins.Library.Library; -import plugins.Library.client.FreenetArchiver; - +import freenet.library.FactoryRegister; import freenet.library.index.TermEntry; import freenet.library.index.URIEntry; import freenet.library.index.URIKey; @@ -36,7 +34,6 @@ import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskInProgressException; -import freenet.node.RequestStarter; /** ** Serialiser for the components of a ProtoIndex. @@ -199,10 +196,11 @@ protected ProtoIndexComponentSerialiser(int fmtid, LiveArchiver>(yamlrw, true, YamlReaderWriter.FILE_EXTENSION, "", "", null); diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index b8be863d..d2b60c56 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -8,9 +8,7 @@ import java.util.LinkedHashMap; import java.util.Map; -import plugins.Library.Library; -import plugins.Library.client.FreenetArchiver; - +import freenet.library.FactoryRegister; import freenet.library.index.TermEntry; import freenet.library.index.URIEntry; import freenet.library.index.URIKey; @@ -21,6 +19,7 @@ import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser; import freenet.library.io.serial.Translator; +import freenet.library.Priority; import freenet.library.util.SkeletonBTreeMap; import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.exec.SimpleProgress; @@ -61,9 +60,9 @@ public ProtoIndexSerialiser(LiveArchiver, SimpleProgress> s) // final protected static HashMap, ProtoIndexSerialiser> // srl_cls = new HashMap, ProtoIndexSerialiser>(); - public static ProtoIndexSerialiser forIndex(Object o, short priorityClass) { + public static ProtoIndexSerialiser forIndex(Object o, Priority priorityLevel) { if (o instanceof String) { - return forIndex((String)o, priorityClass); + return forIndex((String)o, priorityLevel); } else if (o instanceof File) { return forIndex((File)o); } else { @@ -71,7 +70,7 @@ public static ProtoIndexSerialiser forIndex(Object o, short priorityClass) { } } - public static ProtoIndexSerialiser forIndex(String uri, short priorityClass) { + public static ProtoIndexSerialiser forIndex(String uri, Priority priorityLevel) { // ProtoIndexSerialiser srl = srl_cls.get(FreenetURI.class); // if (srl == null) { // // java's type-inference isn't that smart, see @@ -82,7 +81,7 @@ public static ProtoIndexSerialiser forIndex(String uri, short priorityClass) { // One serialiser per application. See comments above re srl_cls. // java's type-inference isn't that smart, see - FreenetArchiver> arx = Library.makeArchiver(ProtoIndexComponentSerialiser.yamlrw, MIME_TYPE, 0x80 * ProtoIndex.BTREE_NODE_MIN, priorityClass); + LiveArchiver, SimpleProgress> arx = FactoryRegister.getArchiverFactory().newArchiver(ProtoIndexComponentSerialiser.yamlrw, MIME_TYPE, 0x80 * ProtoIndex.BTREE_NODE_MIN, priorityLevel); return new ProtoIndexSerialiser(arx); } diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 4a57bc1e..fff12757 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -9,8 +9,10 @@ import plugins.Library.*; import freenet.keys.FreenetURI; +import freenet.library.Priority; import freenet.library.index.TermEntry; import freenet.library.io.YamlReaderWriter; +import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeSet; import freenet.library.util.TaskAbortExceptionConvertor; @@ -80,7 +82,7 @@ public static String testPushProgress() { if (push_progress_thread == null) { push_progress_thread = new Thread() { YamlReaderWriter yamlrw = new YamlReaderWriter(); - FreenetArchiver> arx = Library.makeArchiver(yamlrw, "text/yaml", 0x10000, RequestStarter.INTERACTIVE_PRIORITY_CLASS); + LiveArchiver, SimpleProgress> arx = Library.makeArchiver(yamlrw, "text/yaml", 0x10000, RequestStarter.INTERACTIVE_PRIORITY_CLASS); @Override public void run() { push_progress_start = new Date(); @@ -148,7 +150,7 @@ public static String testPushIndex() { if (push_index_thread == null) { push_index_start = new Date(); push_index_thread = new Thread() { - ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), RequestStarter.INTERACTIVE_PRIORITY_CLASS); + ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), Priority.Interactive); ProtoIndex idx; Random rand = new Random(); @@ -228,7 +230,7 @@ public static String testPushAndMergeIndex() { FreenetArchiver.setCacheDir(cacheDir); push_index_thread = new Thread() { - ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), RequestStarter.INTERACTIVE_PRIORITY_CLASS); + ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(push_index_endURI.toASCIIString(), Priority.Interactive); ProtoIndex idx; Random rand = new Random(); From a11bb25f3f90ac8060f6fade88f86574358be2fe Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 3 Jan 2015 23:16:46 +0000 Subject: [PATCH 028/180] Moved the ProtoIndex to shared. --HG-- branch : eclipse-separation --- .../src/freenet/library}/index/ProtoIndex.java | 7 +------ .../library}/index/ProtoIndexComponentSerialiser.java | 5 +---- .../src/freenet/library}/index/ProtoIndexSerialiser.java | 5 +---- .../src/freenet/library}/index/package-info.java | 2 +- src/plugins/Library/Library.java | 4 ++-- src/plugins/Library/Main.java | 6 +++--- src/plugins/Library/SpiderIndexUploader.java | 6 +++--- test/plugins/Library/Tester.java | 3 +++ test/plugins/Library/index/BIndexTest.java | 3 +++ 9 files changed, 18 insertions(+), 23 deletions(-) rename {src/plugins/Library => shared/src/freenet/library}/index/ProtoIndex.java (97%) rename {src/plugins/Library => shared/src/freenet/library}/index/ProtoIndexComponentSerialiser.java (99%) rename {src/plugins/Library => shared/src/freenet/library}/index/ProtoIndexSerialiser.java (98%) rename {src/plugins/Library => shared/src/freenet/library}/index/package-info.java (92%) diff --git a/src/plugins/Library/index/ProtoIndex.java b/shared/src/freenet/library/index/ProtoIndex.java similarity index 97% rename from src/plugins/Library/index/ProtoIndex.java rename to shared/src/freenet/library/index/ProtoIndex.java index d08cbb14..ed35dad8 100644 --- a/src/plugins/Library/index/ProtoIndex.java +++ b/shared/src/freenet/library/index/ProtoIndex.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import java.util.AbstractSet; import java.util.Collection; @@ -13,11 +13,6 @@ import java.util.Set; import java.util.concurrent.Executor; -import freenet.library.index.Index; -import freenet.library.index.TermEntry; -import freenet.library.index.TermPageEntry; -import freenet.library.index.URIEntry; -import freenet.library.index.URIKey; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.util.DataNotLoadedException; diff --git a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java similarity index 99% rename from src/plugins/Library/index/ProtoIndexComponentSerialiser.java rename to shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java index 8abc58c7..4c4786f2 100644 --- a/src/plugins/Library/index/ProtoIndexComponentSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import java.util.Collection; import java.util.HashMap; @@ -9,9 +9,6 @@ import java.util.TreeMap; import freenet.library.FactoryRegister; -import freenet.library.index.TermEntry; -import freenet.library.index.URIEntry; -import freenet.library.index.URIKey; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/shared/src/freenet/library/index/ProtoIndexSerialiser.java similarity index 98% rename from src/plugins/Library/index/ProtoIndexSerialiser.java rename to shared/src/freenet/library/index/ProtoIndexSerialiser.java index d2b60c56..41f1bab6 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexSerialiser.java @@ -1,7 +1,7 @@ /* This code is part of Freenet. It is distributed under the GNU General * Public License, version 2 (or at your option any later version). See * http://www.gnu.org/ for further details of the GPL. */ -package plugins.Library.index; +package freenet.library.index; import java.io.File; import java.util.Date; @@ -9,9 +9,6 @@ import java.util.Map; import freenet.library.FactoryRegister; -import freenet.library.index.TermEntry; -import freenet.library.index.URIEntry; -import freenet.library.index.URIKey; import freenet.library.io.DataFormatException; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; diff --git a/src/plugins/Library/index/package-info.java b/shared/src/freenet/library/index/package-info.java similarity index 92% rename from src/plugins/Library/index/package-info.java rename to shared/src/freenet/library/index/package-info.java index 4afd4344..f59ec414 100644 --- a/src/plugins/Library/index/package-info.java +++ b/shared/src/freenet/library/index/package-info.java @@ -7,4 +7,4 @@ ** ** @author infinity0 */ -package plugins.Library.index; +package freenet.library.index; diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 3ed16155..6cd79439 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -18,8 +18,6 @@ import java.util.Set; import plugins.Library.client.FreenetArchiver; -import plugins.Library.index.ProtoIndex; -import plugins.Library.index.ProtoIndexSerialiser; import plugins.Library.index.xml.URLUpdateHook; import plugins.Library.index.xml.XMLIndex; import plugins.Library.search.InvalidSearchException; @@ -45,6 +43,8 @@ import freenet.library.ArchiverFactory; import freenet.library.FactoryRegister; import freenet.library.index.Index; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexSerialiser; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index f5701d0f..9b4b9776 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -26,9 +26,6 @@ import java.util.logging.Level; import plugins.Library.client.FreenetArchiver; -import plugins.Library.index.ProtoIndex; -import plugins.Library.index.ProtoIndexComponentSerialiser; -import plugins.Library.index.ProtoIndexSerialiser; import plugins.Library.search.Search; import plugins.Library.ui.WebInterface; @@ -44,6 +41,9 @@ import freenet.keys.FreenetURI; import freenet.keys.InsertableClientSSK; import freenet.l10n.BaseL10n.LANGUAGE; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; import freenet.library.index.TermEntryReaderWriter; import freenet.library.index.TermPageEntry; diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index cc719c6f..e7a51349 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -22,12 +22,12 @@ import java.util.logging.Level; import plugins.Library.client.FreenetArchiver; -import plugins.Library.index.ProtoIndex; -import plugins.Library.index.ProtoIndexComponentSerialiser; -import plugins.Library.index.ProtoIndexSerialiser; import freenet.client.InsertException; import freenet.keys.FreenetURI; import freenet.library.Priority; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; import freenet.library.index.TermEntryReaderWriter; import freenet.library.io.serial.LiveArchiver; diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index fff12757..6be7cfc5 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -10,6 +10,9 @@ import freenet.keys.FreenetURI; import freenet.library.Priority; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.LiveArchiver; diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index b50cfd15..dc7f0091 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -10,6 +10,9 @@ import plugins.Library.index.*; import freenet.keys.FreenetURI; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.SkeletonBTreeMap; From 65f9ae2df67d85cc4f9f14d485b1029d18a5ff1d Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 4 Jan 2015 00:16:49 +0000 Subject: [PATCH 029/180] First framework of the uploader compiling. --HG-- branch : eclipse-separation --- build.xml | 66 ++- .../index/ProtoIndexComponentSerialiser.java | 10 + .../library/io/serial/FileArchiver.java | 16 +- .../library/io/serial/LiveArchiver.java | 1 + .../library/uploader/DirectoryUploader.java | 542 ++++++++++++++++++ .../src/freenet/library/uploader/Merger.java | 145 +++++ .../library/uploader/UploaderLibrary.java | 107 ++++ .../library/uploader/UploaderPaths.java | 43 ++ 8 files changed, 895 insertions(+), 35 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/DirectoryUploader.java create mode 100644 uploader/src/freenet/library/uploader/Merger.java create mode 100644 uploader/src/freenet/library/uploader/UploaderLibrary.java create mode 100644 uploader/src/freenet/library/uploader/UploaderPaths.java diff --git a/build.xml b/build.xml index 04c40bb0..29771340 100644 --- a/build.xml +++ b/build.xml @@ -4,6 +4,8 @@ + + @@ -31,14 +33,25 @@ - - - + + + + + + + + + + + + + + - + @@ -97,9 +110,9 @@ - - - + + + @@ -111,31 +124,23 @@ - + - - + - - + @@ -143,12 +148,12 @@ - - + - + - + + @@ -165,8 +170,8 @@ - - + + @@ -176,8 +181,7 @@ - - + @@ -194,7 +198,7 @@ - + @@ -213,7 +217,7 @@ - + @@ -256,7 +260,7 @@ - + diff --git a/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java index 4c4786f2..7eeaddbe 100644 --- a/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java @@ -395,6 +395,11 @@ public BTreeNodeSerialiser(String n, LiveArchiver, SimplePro } } + @Override + public void waitForAsyncInserts() throws TaskAbortException { + subsrl.waitForAsyncInserts(); + } + } @@ -519,6 +524,11 @@ public EntryGroupSerialiser(LiveArchiver, SimpleProgress> s, } } + @Override + public void waitForAsyncInserts() throws TaskAbortException { + subsrl.waitForAsyncInserts(); + } + } diff --git a/shared/src/freenet/library/io/serial/FileArchiver.java b/shared/src/freenet/library/io/serial/FileArchiver.java index 7b7a04d9..8b985b18 100644 --- a/shared/src/freenet/library/io/serial/FileArchiver.java +++ b/shared/src/freenet/library/io/serial/FileArchiver.java @@ -132,7 +132,8 @@ protected File getFile(Object meta) { public interface LiveArchiver ========================================================================*/ - /*@Override**/ public void pull(PullTask t) throws TaskAbortException { + @Override + public void pull(PullTask t) throws TaskAbortException { File file = getFile(t.meta); try { FileInputStream is = new FileInputStream(file); @@ -153,7 +154,8 @@ public interface LiveArchiver } } - /*@Override**/ public void push(PushTask t) throws TaskAbortException { + @Override + public void push(PushTask t) throws TaskAbortException { if (random) { t.meta = java.util.UUID.randomUUID().toString(); } File file = getFile(t.meta); try { @@ -175,7 +177,8 @@ public interface LiveArchiver } } - /*@Override**/ public void pullLive(PullTask t, SimpleProgress p) throws TaskAbortException { + @Override + public void pullLive(PullTask t, SimpleProgress p) throws TaskAbortException { try { pull(t); if (testmode) { randomWait(p); } @@ -185,7 +188,8 @@ public interface LiveArchiver } } - /*@Override**/ public void pushLive(PushTask t, SimpleProgress p) throws TaskAbortException { + @Override + public void pushLive(PushTask t, SimpleProgress p) throws TaskAbortException { try { push(t); if (testmode) { randomWait(p); } @@ -195,4 +199,8 @@ public interface LiveArchiver } } + @Override + public void waitForAsyncInserts() { + } + } diff --git a/shared/src/freenet/library/io/serial/LiveArchiver.java b/shared/src/freenet/library/io/serial/LiveArchiver.java index 2b5440f1..1398c952 100644 --- a/shared/src/freenet/library/io/serial/LiveArchiver.java +++ b/shared/src/freenet/library/io/serial/LiveArchiver.java @@ -43,4 +43,5 @@ public interface LiveArchiver extends Archiver { */ public void pushLive(PushTask task, P p) throws TaskAbortException; + public void waitForAsyncInserts() throws TaskAbortException; } diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java new file mode 100644 index 00000000..13a9fbf9 --- /dev/null +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -0,0 +1,542 @@ +package freenet.library.uploader; + +import java.io.BufferedReader; +import java.io.EOFException; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.net.MalformedURLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.logging.Level; + +import freenet.library.Priority; +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; +import freenet.library.index.TermEntry; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.io.serial.Serialiser.PullTask; +import freenet.library.io.serial.Serialiser.PushTask; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; +import freenet.library.util.TaskAbortExceptionConvertor; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Closure; + +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutSuccessful; + +class DirectoryUploader implements Runnable { + + FcpConnection connection; + File directory; + + DirectoryUploader(FcpConnection c, File d) { + connection = c; + directory = d; + } + + public void run() { + // doit. + } + + private String lastUploadURI; + private Object freenetMergeSync = new Object(); + private boolean freenetMergeRunning = false; + private boolean diskMergeRunning = false; + + static final int MAX_HANDLING_COUNT = 5; + // When pushing is broken, allow max handling to reach this level before stalling forever to prevent running out of disk space. + private int PUSH_BROKEN_MAX_HANDLING_COUNT = 10; + // Don't use too much disk space, take into account fact that Spider slows down over time. + + private boolean pushBroken; + + /** The temporary on-disk index. We merge stuff into this until it exceeds a threshold size, then + * we create a new diskIdx and merge the old one into the idxFreenet. */ + ProtoIndex idxDisk; + + /** idxDisk gets merged into idxFreenet this long after the last merge completed. */ + static final long MAX_TIME = 24*60*60*1000L; + + /** idxDisk gets merged into idxFreenet after this many incoming updates from Spider. */ + static final int MAX_UPDATES = 16; + + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. + * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must + * fit into memory during the merge process. */ + static final int MAX_TERMS = 100*1000; + + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. + * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must + * fit into memory during the merge process. */ + static final int MAX_TERMS_NOT_UPLOADED = 10*1000; + + /** Maximum size of a single entry, in TermPageEntry count, on disk. If we exceed this we force an + * insert-to-freenet and move on to a new disk index. The problem is that the merge to Freenet has + * to keep the whole of each entry in RAM. This is only true for the data being merged in - the + * on-disk index - and not for the data on Freenet, which is pulled on demand. SCALABILITY */ + static final int MAX_DISK_ENTRY_SIZE = 10000; + + /** Like pushNumber, the number of the current disk dir, used to create idxDiskDir. */ + private int dirNumber; + static final String DISK_DIR_PREFIX = "library-temp-index-"; + /** Directory the current idxDisk is saved in. */ + File idxDiskDir; + private int mergedToDisk; + + ProtoIndexSerialiser srl = null; + String lastDiskIndexName; + /** The uploaded index on Freenet. This never changes, it just gets updated. */ + ProtoIndex idxFreenet; + + // private final SpiderIndexURIs spiderIndexURIs; + + long pushNumber; + static final String LAST_URL_FILENAME = "library.index.lastpushed.chk"; + static final String PRIV_URI_FILENAME = "library.index.privkey"; + static final String PUB_URI_FILENAME = "library.index.pubkey"; + static final String EDITION_FILENAME = "library.index.next-edition"; + + static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; + + static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; + + + // This is a member variable because it is huge, and having huge stuff in local variables seems to upset the default garbage collector. + // It doesn't need to be synchronized because it's always used from mergeToDisk, which never runs in parallel. + private Map> newtrees; + // Ditto + private SortedSet terms; + + ProtoIndexSerialiser srlDisk = null; + private ProtoIndexComponentSerialiser leafsrlDisk; + + private long lastMergedToFreenet = -1; + + + private boolean writeStringTo(File filename, String uri) { + FileOutputStream fos = null; + try { + fos = new FileOutputStream(filename); + OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); + osw.write(uri); + osw.close(); + fos = null; + return true; + } catch (IOException e) { + System.out.println("Failed to write to "+filename+" : "+uri+" : "+e); + return false; + } finally { + try { + if (fos != null) { + fos.close(); + } + } catch (IOException e) { + e.printStackTrace(); + } + } + } + + private String readStringFrom(File file) { + String ret; + FileInputStream fis = null; + try { + fis = new FileInputStream(file); + BufferedReader br = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + ret = br.readLine(); + fis.close(); + fis = null; + return ret; + } catch (IOException e) { + // Ignore + return null; + } finally { + try { + if (fis != null) { + fis.close(); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + + + /** Create a new on-disk index from terms and newtrees. + * @return True if the size of any one item in the index is so large that we must upload + * immediately to Freenet. + * @throws TaskAbortException If something broke catastrophically. */ + private boolean createDiskIndex() throws TaskAbortException { + boolean tooBig = false; + // created a new index, fill it with data. + // DON'T MERGE, merge with a lot of data will deadlock. + // FIXME throw in update() if it will deadlock. + for(String key : terms) { + SkeletonBTreeSet tree = makeEntryTree(leafsrlDisk); + SortedSet toMerge = newtrees.get(key); + tree.addAll(toMerge); + if(toMerge.size() > MAX_DISK_ENTRY_SIZE) + tooBig = true; + toMerge = null; + tree.deflate(); + assert(tree.isBare()); + idxDisk.ttab.put(key, tree); + } + idxDisk.ttab.deflate(); + return tooBig; + } + + + /** Create a directory for an on-disk index. + * @return False if something broke and we can't continue. */ + private boolean createDiskDir() { + dirNumber++; + idxDiskDir = new File(DISK_DIR_PREFIX + Integer.toString(dirNumber)); + System.out.println("Created new disk dir for merging: "+idxDiskDir); + if(!(idxDiskDir.mkdir() || idxDiskDir.isDirectory())) { + System.err.println("Unable to create new disk dir: "+idxDiskDir); + synchronized(this) { + pushBroken = true; + return false; + } + } + return true; + } + + /** Set up the serialisers for an on-disk index. + * @return False if something broke and we can't continue. */ + private boolean makeDiskDirSerialiser() { + if(srlDisk == null) { + srlDisk = ProtoIndexSerialiser.forIndex(idxDiskDir); + LiveArchiver,SimpleProgress> archiver = + (LiveArchiver,SimpleProgress>)(srlDisk.getChildSerialiser()); + leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); + if(lastDiskIndexName == null) { + idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); + // FIXME more hacks: It's essential that we use the same FileArchiver instance here. + leafsrlDisk.setSerialiserFor(idxDisk); + } else { + try { + PullTask pull = new PullTask(lastDiskIndexName); + System.out.println("Pulling previous index "+lastDiskIndexName+" from disk so can update it."); + srlDisk.pull(pull); + System.out.println("Pulled previous index "+lastDiskIndexName+" from disk - updating..."); + idxDisk = pull.data; + if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); + } catch (TaskAbortException e) { + System.err.println("Failed to download previous index for spider update: "+e); + e.printStackTrace(); + synchronized(freenetMergeSync) { + pushBroken = true; + } + return false; + } + } + } + return true; + } + + static final String INDEX_DOCNAME = "index.yml"; + + private ProtoIndexComponentSerialiser leafsrl; + + /** Merge a disk dir to an on-Freenet index. Usually called on startup, i.e. we haven't just + * created the on-disk index so we need to setup the ProtoIndex etc. */ + protected void mergeToFreenet(File diskDir) { + ProtoIndexSerialiser s = ProtoIndexSerialiser.forIndex(diskDir); + LiveArchiver,SimpleProgress> archiver = + (LiveArchiver,SimpleProgress>)(s.getChildSerialiser()); + ProtoIndexComponentSerialiser leaf = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); + String f = this.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); + if(f == null) { + if(diskDir.list().length == 0) { + System.err.println("Directory "+diskDir+" is empty. Nothing to merge."); + diskDir.delete(); + return; + } + // Ignore + System.err.println("Unable to merge old data "+diskDir); + return; + } else { + System.out.println("Continuing old bucket: "+f); + } + + ProtoIndex idxDisk = null; + try { + PullTask pull = new PullTask(f); + System.out.println("Pulling previous index "+f+" from disk so can update it."); + s.pull(pull); + System.out.println("Pulled previous index "+f+" from disk - updating..."); + idxDisk = pull.data; + if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: "+idxDisk.getSerialiser()+" should be "+archiver); + } catch (TaskAbortException e) { + System.err.println("Failed to download previous index for spider update: "+e); + e.printStackTrace(); + synchronized(freenetMergeSync) { + pushBroken = true; + } + return; + } + mergeToFreenet(idxDisk, diskDir); + } + + /** Delete everything in a directory. Only use this when we are *very sure* there is no + * important data below it! */ + private static boolean removeAll(File wd) { + if(!wd.isDirectory()) { + System.err.println("DELETING FILE "+wd); + if(!wd.delete() && wd.exists()) { + System.err.println("Could not delete file: " + wd); + return false; + } + } else { + for(File subfile: wd.listFiles()) { + if(!removeAll(subfile)) { + return false; + } + } + if(!wd.delete()) { + System.err.println("Could not delete directory: " + wd); + return false; + } + } + return true; + } + + private final Object inflateSync = new Object(); + + /** Merge from an on-disk index to an on-Freenet index. + * @param diskToMerge The on-disk index. + * @param diskDir The folder the on-disk index is stored in. + */ + protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { + System.out.println("Merging on-disk index to Freenet: "+diskDir); + if (lastUploadURI == null) { + lastUploadURI = readStringFrom(new File(LAST_URL_FILENAME)); + } + setupFreenetCacheDir(); + + makeFreenetSerialisers(); + + updateOverallMetadata(diskToMerge); + + final SkeletonBTreeMap> newtrees = diskToMerge.ttab; + + // Do the upload + + // async merge + Closure>, TaskAbortException> clo = + createMergeFromTreeClosure(newtrees); + try { + long mergeStartTime = System.currentTimeMillis(); + assert(idxFreenet.ttab.isBare()); + Iterator it = + diskToMerge.ttab.keySetAutoDeflate().iterator(); + TreeSet terms = new TreeSet(); + while(it.hasNext()) terms.add(it.next()); + System.out.println("Merging "+terms.size()+" terms from disk to Freenet..."); + assert(terms.size() == diskToMerge.ttab.size()); + assert(idxFreenet.ttab.isBare()); + assert(diskToMerge.ttab.isBare()); + long entriesAdded = terms.size(); + // Run the actual merge. + idxFreenet.ttab.update(terms, null, clo, new TaskAbortExceptionConvertor()); + assert(idxFreenet.ttab.isBare()); + // Deflate the main tree. + newtrees.deflate(); + assert(diskToMerge.ttab.isBare()); + + // Push the top node to a CHK. + PushTask task4 = new PushTask(idxFreenet); + task4.meta = "Unknown"; + srl.push(task4); + + // Now wait for the inserts to finish. They are started asynchronously in the above merge. + LiveArchiver, SimpleProgress> arch = srl.getChildSerialiser(); + arch.waitForAsyncInserts(); + + long mergeEndTime = System.currentTimeMillis(); + System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); + String uri = (String) task4.meta; + lastUploadURI = uri; + if(writeStringTo(new File(LAST_URL_FILENAME), uri)) { + newtrees.deflate(); + diskToMerge = null; + terms = null; + System.out.println("Finished with disk index "+diskDir); + removeAll(diskDir); + } + + // Create the USK to redirect to the CHK at the top of the index. + uploadUSKForFreenetIndex(uri); + + } catch (TaskAbortException e) { + System.err.println("Failed to upload index for spider: "+e); + e.printStackTrace(); + synchronized(freenetMergeSync) { + pushBroken = true; + } + } + } + + private void uploadUSKForFreenetIndex(String uri) { + final ClientPut usk = new ClientPut(lastUploadURI, "USKupload"); + usk.setTargetURI(uri); + connection.addFcpListener(new FcpAdapter() { + public void receivedPutFailed(FcpConnection fcpConnection, PutFailed result) { + System.out.println("Could not upload USK"); + synchronized (usk) { + usk.notify(); + } + connection.removeFcpListener(this); + } + + public void receivedPutSuccessful(FcpConnection fcpConnection, PutSuccessful result) { + System.out.println("USK uploaded"); + synchronized (usk) { + usk.notify(); + } + connection.removeFcpListener(this); + } + }); + + try { + connection.sendMessage(usk); + usk.wait(); + } catch (InterruptedException e) { + System.err.println("Could not upload USK"); + System.exit(1); + } catch (IOException e) { + System.err.println("IO Exception when uploading USK"); + System.exit(1); + } + } + + + /** Create a Closure which will merge the subtrees from one index (on disk) into the subtrees + * of another index (on Freenet). It will be called with each subtree from the on-Freenet + * index, and will merge data from the relevant on-disk subtree. Both subtrees are initially + * deflated, and should be deflated when we leave the method, to avoid running out of memory. + * @param newtrees The on-disk tree of trees to get data from. + * @return + */ + private Closure>, TaskAbortException> createMergeFromTreeClosure(final SkeletonBTreeMap> newtrees) { + return new + Closure>, TaskAbortException>() { + /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { + String key = entry.getKey(); + SkeletonBTreeSet tree = entry.getValue(); + boolean newTree = false; + if (tree == null) { + entry.setValue(tree = makeEntryTree(leafsrl)); + newTree = true; + } + assert(tree.isBare()); + SortedSet data; + // Can't be run in parallel. + synchronized(inflateSync) { + newtrees.inflate(key, true); + SkeletonBTreeSet entries; + entries = newtrees.get(key); + // CONCURRENCY: Because the lower-level trees are packed by the top tree, the bottom + // trees (SkeletonBTreeSet's) are not independant of each other. When the newtrees + // inflate above runs, it can deflate a tree that is still in use by another instance + // of this callback. Therefore we must COPY IT AND DEFLATE IT INSIDE THE LOCK. + entries.inflate(); + data = new TreeSet(entries); + entries.deflate(); + assert(entries.isBare()); + } + if (tree != null) { + if (newTree) { + tree.addAll(data); + assert(tree.size() == data.size()); + } else { + int oldSize = tree.size(); + tree.update(data, null); + // Note that it is possible for data.size() + oldSize != tree.size(), because we might be merging data we've already merged. + // But most of the time it will add up. + } + tree.deflate(); + assert(tree.isBare()); + } + } + }; + } + + /** Update the overall metadata for the on-Freenet index from the on-disk index. */ + private void updateOverallMetadata(ProtoIndex diskToMerge) { + idxFreenet.setName(diskToMerge.getName()); + idxFreenet.setOwnerEmail(diskToMerge.getOwnerEmail()); + idxFreenet.setOwner(diskToMerge.getOwner()); + // This is roughly accurate, it might not be exactly so if we process a bit out of order. + idxFreenet.setTotalPages(diskToMerge.getTotalPages() + Math.max(0,idxFreenet.getTotalPages())); + } + + /** Setup the serialisers for uploading to Freenet. These convert tree nodes to and from blocks + * on Freenet, essentially. */ + private void makeFreenetSerialisers() { + if(srl == null) { + srl = ProtoIndexSerialiser.forIndex(lastUploadURI, Priority.Bulk); + LiveArchiver,SimpleProgress> archiver = + (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); + leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); + if(lastUploadURI == null) { + idxFreenet = new ProtoIndex("CHK@", "test", null, null, 0L); + // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. + leafsrl.setSerialiserFor(idxFreenet); + } else { + try { + PullTask pull = new PullTask(lastUploadURI); + System.out.println("Pulling previous index "+lastUploadURI+" so can update it."); + srl.pull(pull); + System.out.println("Pulled previous index "+lastUploadURI+" - updating..."); + idxFreenet = pull.data; + if(idxFreenet.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); + } catch (TaskAbortException e) { + System.err.println("Failed to download previous index for spider update: "+e); + e.printStackTrace(); + synchronized(freenetMergeSync) { + pushBroken = true; + } + return; + } + } + } + } + + /** Set up the on-disk cache, which keeps a copy of everything we upload to Freenet, so we + * won't need to re-download it, which can be very slow and doesn't always succeed. */ + private void setupFreenetCacheDir() { + File dir = new File(UploaderPaths.LIBRARY_CACHE); + dir.mkdir(); + } + + protected static SkeletonBTreeSet makeEntryTree(ProtoIndexComponentSerialiser leafsrl) { + SkeletonBTreeSet tree = new SkeletonBTreeSet(ProtoIndex.BTREE_NODE_MIN); + leafsrl.setSerialiserFor(tree); + return tree; + } +} diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java new file mode 100644 index 00000000..6e115e05 --- /dev/null +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -0,0 +1,145 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ + +package freenet.library.uploader; + +import java.io.File; +import java.io.FilenameFilter; +import java.io.IOException; +import java.net.UnknownHostException; + +import net.pterodactylus.fcp.ClientHello; +import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.NodeHello; + +/** + * Standalone program to do the merging. + * + * The ambition is to avoid having the merger running in the freenet process, + * instead run it as a separate java program. + * + * It reads and removes the files created by the plugin (by Spider) and + * delivers data using FCP. + * + * Initially it is the same jar used as plugin and for the separate process. + * + * + * Initial logic: + * Run once to merge once. If more than one of these merge jobs are + * started at the time, the FcpConnection will fail to open since + * they use the same name. + *

    Check if there are directories to merge. If so, merge the first of them (order is not important). Done! + *
      If there are no files to merge. Done! + *
        Fetch the index top to get the top fan-out. + *
          Get the first term in the first and create an index with all + * the contents from all the files with all terms from the same index. + * Rewrite all files with the rest of the terms. + *
            Merge that index. + *
              Done. + */ +final public class Merger { + public static void main(String[] argv) { + int exitStatus = 0; + + System.out.println("Separate program started."); + //if (!cwd.matches(".*/plugins")) { + // System.err.println("Should be started in the freenet directory."); + // System.exit(1); + //} + + // Now we are in the Freenet directory. + // The rest of the work is done here. + FcpConnection connection = null; + try { + try { + connection = new FcpConnection("127.0.0.1"); + connection.connect(); + } catch (UnknownHostException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } + final String clientName = "SpiderMerger"; + final FcpMessage hello = new ClientHello(clientName); + connection.addFcpListener(new FcpAdapter() { + public void receivedNodeHello(FcpConnection c, NodeHello nh) { + synchronized (hello) { + hello.notify(); + } + c.removeFcpListener(this); + } + + public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { + System.out.println("Another " + clientName + " connected - Aborting."); + System.exit(1); + } + }); + + synchronized (hello) { + try { + connection.sendMessage(hello); + hello.wait(); + } catch (InterruptedException e) { + System.err.println("Waiting for connection interrupted."); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Hello cannot write."); + exitStatus = 1; + return; + } + } + System.out.println("Connected"); + + UploaderLibrary.init(connection); + + final String[] dirsToMerge; + File directory = new File("."); + dirsToMerge = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; + return true; + } + + }); + + System.out.println("There are " + dirsToMerge.length + " old directories to merge."); + if (dirsToMerge.length > 0) { + new DirectoryUploader(connection, new File(directory, dirsToMerge[0])).run(); + return; + } + + String[] filesToMerge = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(UploaderPaths.BASE_FILENAME_PUSH_DATA))) return false; + File f = new File(arg0, arg1); + if(!f.isFile()) return false; + if(f.length() == 0) { f.delete(); return false; } + return true; + } + + }); + + System.out.println("There are " + filesToMerge.length + " files to merge."); + for (String s : filesToMerge) { + System.out.println("File: " + s); + } + + } finally { + if (connection != null) { + connection.close(); + } + System.exit(exitStatus); + } + } +} diff --git a/uploader/src/freenet/library/uploader/UploaderLibrary.java b/uploader/src/freenet/library/uploader/UploaderLibrary.java new file mode 100644 index 00000000..c8fcc8c0 --- /dev/null +++ b/uploader/src/freenet/library/uploader/UploaderLibrary.java @@ -0,0 +1,107 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ +package freenet.library.uploader; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.net.MalformedURLException; +import java.security.MessageDigest; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import freenet.library.ArchiverFactory; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.Priority; + + + +/** + * Library class is the api for others to use search facilities, it is used by the interfaces + * @author MikeB + */ +final public class UploaderLibrary implements ArchiverFactory { + + public static final String BOOKMARK_PREFIX = "bookmark:"; + public static final String DEFAULT_INDEX_SITE = BOOKMARK_PREFIX + "liberty-of-information" + " " + BOOKMARK_PREFIX + "free-market-free-people" + " " + + BOOKMARK_PREFIX + "gotcha" + " " + BOOKMARK_PREFIX + "wanna" + " " + BOOKMARK_PREFIX + "wanna.old" + " " + BOOKMARK_PREFIX + "gogo"; + private static int version = 36; + public static final String plugName = "Library " + getVersion(); + + public static String getPlugName() { + return plugName; + } + + public static long getVersion() { + return version; + } + + /** + ** Library singleton. + */ + private static UploaderLibrary lib; + + public static FcpConnection fcpConnection; + + public synchronized static void init(FcpConnection connection) { + fcpConnection = connection; + } + + + public static String convertToHex(byte[] data) { + StringBuilder buf = new StringBuilder(); + for (int i = 0; i < data.length; i++) { + int halfbyte = (data[i] >>> 4) & 0x0F; + int two_halfs = 0; + do { + if ((0 <= halfbyte) && (halfbyte <= 9)) + buf.append((char) ('0' + halfbyte)); + else + buf.append((char) ('a' + (halfbyte - 10))); + halfbyte = data[i] & 0x0F; + } while (two_halfs++ < 1); + } + return buf.toString(); + } + + //this function will return the String representation of the MD5 hash for the input string + public static String MD5(String text) { + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + byte[] b = text.getBytes("UTF-8"); + md.update(b, 0, b.length); + byte[] md5hash = md.digest(); + return convertToHex(md5hash); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public LiveArchiver newArchiver( + S rw, String mime, int size, freenet.library.Priority priorityLevel) { + // TODO Auto-generated method stub + return null; + } + + @Override + public LiveArchiver newArchiver( + S rw, String mime, int size, + LiveArchiver archiver) { + // TODO Auto-generated method stub + return null; + } +} diff --git a/uploader/src/freenet/library/uploader/UploaderPaths.java b/uploader/src/freenet/library/uploader/UploaderPaths.java new file mode 100644 index 00000000..76b04486 --- /dev/null +++ b/uploader/src/freenet/library/uploader/UploaderPaths.java @@ -0,0 +1,43 @@ +package freenet.library.uploader; + +public class UploaderPaths { + static final int MAX_HANDLING_COUNT = 5; + // When pushing is broken, allow max handling to reach this level before stalling forever to prevent running out of disk space. + + + /** idxDisk gets merged into idxFreenet this long after the last merge completed. */ + static final long MAX_TIME = 24*60*60*1000L; + + /** idxDisk gets merged into idxFreenet after this many incoming updates from Spider. */ + static final int MAX_UPDATES = 16; + + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. + * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must + * fit into memory during the merge process. */ + static final int MAX_TERMS = 100*1000; + + /** idxDisk gets merged into idxFreenet after it has grown to this many terms. + * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must + * fit into memory during the merge process. */ + static final int MAX_TERMS_NOT_UPLOADED = 10*1000; + + /** Maximum size of a single entry, in TermPageEntry count, on disk. If we exceed this we force an + * insert-to-freenet and move on to a new disk index. The problem is that the merge to Freenet has + * to keep the whole of each entry in RAM. This is only true for the data being merged in - the + * on-disk index - and not for the data on Freenet, which is pulled on demand. SCALABILITY */ + static final int MAX_DISK_ENTRY_SIZE = 10000; + + /** Like pushNumber, the number of the current disk dir, used to create idxDiskDir. */ + static final String DISK_DIR_PREFIX = "library-temp-index-"; + + static final String LAST_URL_FILENAME = "library.index.lastpushed.chk"; + static final String PRIV_URI_FILENAME = "library.index.privkey"; + static final String PUB_URI_FILENAME = "library.index.pubkey"; + static final String EDITION_FILENAME = "library.index.next-edition"; + + static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; + + static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; + + static final String LIBRARY_CACHE = "library-spider-pushed-data-cache"; +} From 95f21f32aa217051340f023614618c218e9616db Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 4 Jan 2015 01:06:02 +0000 Subject: [PATCH 030/180] Create the uploader jar. --HG-- branch : eclipse-separation --- .hgignore | 1 + build.xml | 15 +++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.hgignore b/.hgignore index 91bc4210..de8451a4 100644 --- a/.hgignore +++ b/.hgignore @@ -3,4 +3,5 @@ shared/bin uploader/bin lib/snakeyaml-1.5.jar dist/Library.jar +dist/uploader.jar shared/TermEntryTest/test.yml diff --git a/build.xml b/build.xml index 29771340..6e22d0f2 100644 --- a/build.xml +++ b/build.xml @@ -257,6 +257,21 @@ + + + + + + + + + + + + + + From 0e29164e82796f6109d40af67d88076f7a34c0ac Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 4 Jan 2015 02:00:30 +0000 Subject: [PATCH 031/180] Reordered the removelistener to reduce concurrentexceptions. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/Merger.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 6e115e05..e50b956f 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -69,19 +69,19 @@ public static void main(String[] argv) { } final String clientName = "SpiderMerger"; final FcpMessage hello = new ClientHello(clientName); - connection.addFcpListener(new FcpAdapter() { + FcpAdapter helloListener = new FcpAdapter() { public void receivedNodeHello(FcpConnection c, NodeHello nh) { synchronized (hello) { hello.notify(); } - c.removeFcpListener(this); } public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { System.out.println("Another " + clientName + " connected - Aborting."); System.exit(1); } - }); + }; + connection.addFcpListener(helloListener); synchronized (hello) { try { @@ -95,8 +95,11 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti System.err.println("Hello cannot write."); exitStatus = 1; return; + } finally { + connection.removeFcpListener(helloListener); } } + helloListener = null; System.out.println("Connected"); UploaderLibrary.init(connection); From df0e4e0da59b454275aa32fbd8f23ff4b12cb035 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 4 Jan 2015 21:00:56 +0000 Subject: [PATCH 032/180] Added spider compatibility when reading the terms. Proof of concept file reading in Merger. --HG-- branch : eclipse-separation --- shared/src/freenet/copied/Base64.java | 249 ++++++++++++++++++ .../copied/IllegalBase64Exception.java | 16 ++ .../library/index/TermEntryReaderWriter.java | 97 ++++++- .../src/freenet/library/uploader/Merger.java | 75 +++++- 4 files changed, 434 insertions(+), 3 deletions(-) create mode 100644 shared/src/freenet/copied/Base64.java create mode 100644 shared/src/freenet/copied/IllegalBase64Exception.java diff --git a/shared/src/freenet/copied/Base64.java b/shared/src/freenet/copied/Base64.java new file mode 100644 index 00000000..1167ff3f --- /dev/null +++ b/shared/src/freenet/copied/Base64.java @@ -0,0 +1,249 @@ +package freenet.copied; + +import java.nio.charset.Charset; + + +/** + * This class provides encoding of byte arrays into Base64-encoded strings, + * and decoding the other way. + * + *

              NOTE! This is modified Base64 with slightly different characters than + * usual, so it won't require escaping when used in URLs. + * + *

              NOTE! This class only does the padding that's normal in Base64 + * if the 'true' flag is given to the encode() method. This is because + * Base64 requires that the length of the encoded text be a multiple + * of four characters, padded with '='. Without the 'true' flag, we don't + * add these '=' characters. + * + * @author Stephen Blackheath + */ +public class Base64 +{ + static final Charset UTF8 = Charset.forName("UTF-8"); + + private static char[] base64Alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789~-".toCharArray(); + + private static char[] base64StandardAlphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".toCharArray(); + + /** + * A reverse lookup table to convert base64 letters back into the + * a 6-bit sequence. + */ + private static byte[] base64Reverse; + private static byte[] base64StandardReverse; + + // Populate the base64Reverse lookup table from the base64Alphabet table. + static { + base64Reverse = new byte[128]; + base64StandardReverse = new byte[base64Reverse.length]; + + // Set all entries to 0xFF, which means that that particular letter + // is not a legal base64 letter. + for (int i = 0; i < base64Reverse.length; i++) { + base64Reverse[i] = (byte) 0xFF; + base64StandardReverse[i] = (byte) 0xFF; + } + for (int i = 0; i < base64Alphabet.length; i++) { + base64Reverse[base64Alphabet[i]] = (byte) i; + base64StandardReverse[base64StandardAlphabet[i]] = (byte) i; + } + } + + /** + * Encode to our shortened (non-standards-compliant) format. + */ + public static String encode(byte[] in) + { + return encode(in, false); + } + + /* FIXME: Figure out where this function is used and maybe remove it if its not + * used. Its old javadoc which has been here for a while fools the user into believing + * that the format is standard compliant */ + + /** + * Caller should specify equalsPad=true if they want a standards compliant padding, + * but not standard compliant encoding. + */ + public static String encode(byte[] in, boolean equalsPad) { + return encode(in, equalsPad, base64Alphabet); + } + + /** + * Convenience method to encode a string, in our shortened format. + * + * Please use this to encode a string, rather than trying to encode the string + * yourself using the 0-arg String.getBytes() which is not deterministic. + */ + public static String encodeUTF8(String in) { + return encodeUTF8(in, false); + } + + /** + * Convenience method to encode a string. + * + * Please use this to encode a string, rather than trying to encode the string + * yourself using the 0-arg String.getBytes() which is not deterministic. + */ + public static String encodeUTF8(String in, boolean equalsPad) { + return encode(in.getBytes(UTF8), equalsPad, base64Alphabet); + } + + /** + * Convenience method to encode a string. + * + * Please use this to encode a string, rather than trying to encode the string + * yourself using the 0-arg String.getBytes() which is not deterministic. + */ + public static String encodeStandardUTF8(String in) { + return encodeStandard(in.getBytes(UTF8)); + } + + /** + * Standard compliant encoding. + */ + public static String encodeStandard(byte[] in) { + return encode(in, true, base64StandardAlphabet); + } + + /** + * Caller should specify equalsPad=true if they want a standards compliant encoding. + */ + private static String encode(byte[] in, boolean equalsPad, char[] alphabet) + { + char[] out = new char[((in.length+2)/3)*4]; + int rem = in.length%3; + int o = 0; + for (int i = 0; i < in.length;) { + int val = (in[i++] & 0xFF) << 16; + if (i < in.length) + val |= (in[i++] & 0xFF) << 8; + if (i < in.length) + val |= (in[i++] & 0xFF); + out[o++] = alphabet[(val>>18) & 0x3F]; + out[o++] = alphabet[(val>>12) & 0x3F]; + out[o++] = alphabet[(val>>6) & 0x3F]; + out[o++] = alphabet[val & 0x3F]; + } + int outLen = out.length; + switch (rem) { + case 1: outLen -= 2; break; + case 2: outLen -= 1; break; + } + // Pad with '=' signs up to a multiple of four if requested. + if (equalsPad) + while (outLen < out.length) + out[outLen++] = '='; + return new String(out, 0, outLen); + } + + /** + * Handles the standards-compliant padding (padded with '=' signs) as well as our + * shortened form. + * @throws IllegalBase64Exception + */ + public static byte[] decode(String inStr) throws IllegalBase64Exception { + return decode(inStr, base64Reverse); + } + + /** + * Convenience method to decode into a string, in our shortened format. + * + * Please use this to decode into a string, rather than trying to decode the + * string yourself using new String(bytes[]) which is not deterministic. + */ + public static String decodeUTF8(String inStr) throws IllegalBase64Exception { + return new String(decode(inStr), UTF8); + } + + /** + * Handles the standards-compliant base64 encoding. + */ + public static byte[] decodeStandard(String inStr) throws IllegalBase64Exception { + return decode(inStr, base64StandardReverse); + } + + /** + * Handles the standards-compliant (padded with '=' signs) as well as our + * shortened form. + */ + private static byte[] decode(String inStr, byte[] reverseAlphabet) + throws IllegalBase64Exception + { + try { + char[] in = inStr.toCharArray(); + int inLength = in.length; + + // Strip trailing equals signs. + while ((inLength > 0) && (in[inLength-1] == '=')) + inLength--; + + int blocks = inLength/4; + int remainder = inLength & 3; + // wholeInLen and wholeOutLen are the the length of the input and output + // sequences respectively, not including any partial block at the end. + int wholeInLen = blocks*4; + int wholeOutLen = blocks*3; + int outLen = wholeOutLen; + switch (remainder) { + case 1: throw new IllegalBase64Exception("illegal Base64 length"); + case 2: outLen = wholeOutLen+1; break; + case 3: outLen = wholeOutLen+2; break; + default: outLen = wholeOutLen; + } + byte[] out = new byte[outLen]; + int o = 0; + int i; + for (i = 0; i < wholeInLen;) { + int in1 = reverseAlphabet[in[i]]; + int in2 = reverseAlphabet[in[i+1]]; + int in3 = reverseAlphabet[in[i+2]]; + int in4 = reverseAlphabet[in[i+3]]; + int orValue = in1|in2|in3|in4; + if ((orValue & 0x80) != 0) + throw new IllegalBase64Exception("illegal Base64 character"); + int outVal = (in1 << 18) | (in2 << 12) | (in3 << 6) | in4; + out[o] = (byte) (outVal>>16); + out[o+1] = (byte) (outVal>>8); + out[o+2] = (byte) outVal; + i += 4; + o += 3; + } + int orValue; + switch (remainder) { + case 2: + { + int in1 = reverseAlphabet[in[i]]; + int in2 = reverseAlphabet[in[i+1]]; + orValue = in1|in2; + int outVal = (in1 << 18) | (in2 << 12); + out[o] = (byte) (outVal>>16); + } + break; + case 3: + { + int in1 = reverseAlphabet[in[i]]; + int in2 = reverseAlphabet[in[i+1]]; + int in3 = reverseAlphabet[in[i+2]]; + orValue = in1|in2|in3; + int outVal = (in1 << 18) | (in2 << 12) | (in3 << 6); + out[o] = (byte) (outVal>>16); + out[o+1] = (byte) (outVal>>8); + } + break; + default: + // Keep compiler happy + orValue = 0; + } + if ((orValue & 0x80) != 0) + throw new IllegalBase64Exception("illegal Base64 character"); + return out; + } + // Illegal characters can cause an ArrayIndexOutOfBoundsException when + // looking up reverseAlphabet. + catch (ArrayIndexOutOfBoundsException e) { + throw new IllegalBase64Exception("illegal Base64 character"); + } + } +} diff --git a/shared/src/freenet/copied/IllegalBase64Exception.java b/shared/src/freenet/copied/IllegalBase64Exception.java new file mode 100644 index 00000000..e5706cc4 --- /dev/null +++ b/shared/src/freenet/copied/IllegalBase64Exception.java @@ -0,0 +1,16 @@ +package freenet.copied; + +/** + * This exception is thrown if a Base64-encoded string is of an illegal length + * or contains an illegal character. + */ +public class IllegalBase64Exception + extends Exception +{ + + private static final long serialVersionUID = -1; + public IllegalBase64Exception(String descr) + { + super(descr); + } +} diff --git a/shared/src/freenet/library/index/TermEntryReaderWriter.java b/shared/src/freenet/library/index/TermEntryReaderWriter.java index ba0c5c5b..8152501d 100644 --- a/shared/src/freenet/library/index/TermEntryReaderWriter.java +++ b/shared/src/freenet/library/index/TermEntryReaderWriter.java @@ -7,19 +7,27 @@ import freenet.library.io.DataFormatException; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; +import freenet.copied.Base64; import java.util.Map; import java.util.HashMap; +import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.OutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.net.URLEncoder; /** ** Reads and writes {@link TermEntry}s in binary form, for performance. ** +** This needs to be able to read FreenetURI:s as generated from Spider but +** everything written uses the simpler String-version of the FreenetURI. +** To distringuish between them, when using the String-version, it is always +** preceeded with a short that is 0. +** ** @author infinity0 */ public class TermEntryReaderWriter implements ObjectStreamReader, ObjectStreamWriter { @@ -36,6 +44,89 @@ public static TermEntryReaderWriter getInstance() { return readObject(new DataInputStream(is)); } + static final byte CHK = 1; + static final byte SSK = 2; + static final byte KSK = 3; + static final byte USK = 4; + static final short ClientCHK_EXTRA_LENGTH = 5; + static final short ClientSSK_EXTRA_LENGTH = 5; + + /** + * This is to be able to read the data created by Spider. + */ + private String readFreenetURI(DataInputStream dis1) throws IOException { + int len = dis1.readShort(); + if (len == 0) { + // This is the new format. + return dis1.readUTF(); + } + byte[] buf = new byte[len]; + dis1.readFully(buf); + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + DataInputStream dis = new DataInputStream(bais); + byte type = dis.readByte(); + String keyType; + if(type == CHK) + keyType = "CHK"; + else if(type == SSK) + keyType = "SSK"; + else if(type == KSK) + keyType = "KSK"; + else + throw new IOException("Unrecognized FreenetURI type " + type); + byte[] routingKey = null; + byte[] cryptoKey = null; + byte[] extra = null; + if((type == CHK) || (type == SSK)) { + // routingKey is a hash, so is exactly 32 bytes + routingKey = new byte[32]; + dis.readFully(routingKey); + // cryptoKey is a 256 bit AES key, so likewise + cryptoKey = new byte[32]; + dis.readFully(cryptoKey); + // Number of bytes of extra depends on key type + int extraLen; + extraLen = (type == CHK ? ClientCHK_EXTRA_LENGTH : ClientSSK_EXTRA_LENGTH); + extra = new byte[extraLen]; + dis.readFully(extra); + } + + String docName = null; + if(type != CHK) + docName = dis.readUTF(); + int count = dis.readInt(); + String[] metaStrings = new String[count]; + for(int i = 0; i < metaStrings.length; i++) + metaStrings[i] = dis.readUTF(); + + StringBuilder b = new StringBuilder(); + + b.append(keyType).append('@'); + if(!"KSK".equals(keyType)) { + if(routingKey != null) + b.append(Base64.encode(routingKey)); + if(cryptoKey != null) + b.append(',').append(Base64.encode(cryptoKey)); + if(extra != null) + b.append(',').append(Base64.encode(extra)); + if(docName != null) + b.append('/'); + } + + if(docName != null) + b.append(URLEncoder.encode(docName, "UTF-8")); + + //if(keyType.equals("USK")) { + // b.append('/'); + // b.append(suggestedEdition); + // } + + for(int i = 0; i < metaStrings.length; i++) { + b.append('/').append(URLEncoder.encode(metaStrings[i], "UTF-8")); + } + return b.toString(); + } + public TermEntry readObject(DataInputStream dis) throws IOException { long svuid = dis.readLong(); if (svuid != TermEntry.serialVersionUID) { @@ -52,9 +143,9 @@ public TermEntry readObject(DataInputStream dis) throws IOException { case TERM: return new TermTermEntry(subj, rel, dis.readUTF()); case INDEX: - return new TermIndexEntry(subj, rel, dis.readUTF()); + return new TermIndexEntry(subj, rel, readFreenetURI(dis)); case PAGE: - String page = dis.readUTF(); + String page = readFreenetURI(dis); int size = dis.readInt(); String title = null; if (size < 0) { @@ -88,10 +179,12 @@ public void writeObject(TermEntry en, DataOutputStream dos) throws IOException { dos.writeUTF(((TermTermEntry)en).term); return; case INDEX: + dos.writeShort(0); dos.writeUTF(((TermIndexEntry)en).index); return; case PAGE: TermPageEntry enn = (TermPageEntry)en; + dos.writeShort(0); dos.writeUTF(enn.page); int size = enn.hasPositions() ? enn.positionsSize() : 0; if(enn.title == null) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index e50b956f..e712fe6e 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -6,8 +6,16 @@ import java.io.File; import java.io.FilenameFilter; +import java.io.EOFException; import java.io.IOException; +import java.io.DataInputStream; +import java.io.FileInputStream; +import java.io.InputStream; import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import net.pterodactylus.fcp.ClientHello; import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; @@ -16,6 +24,10 @@ import net.pterodactylus.fcp.FcpMessage; import net.pterodactylus.fcp.NodeHello; +import freenet.library.index.ProtoIndex; +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; + /** * Standalone program to do the merging. * @@ -42,6 +54,52 @@ *

                Done. */ final public class Merger { + /** Read the TermEntry's from the Bucket into newtrees and terms, and set up the index + * properties. + * @param data The Bucket containing TermPageEntry's etc serialised with TermEntryReaderWriter. + */ + private static Map> readTermsFrom(File f) { + Map> newtrees = new HashMap>(); + DataInputStream is = null; + try { + is = new DataInputStream(new FileInputStream(f)); + String line; + int laps = 0; + do { + line = is.readLine(); + System.out.println("Line: " + line); + if (laps > 100) { + System.err.println("Cannot get out of file header."); + System.exit(1); + } + } while (!"End".equals(line)); + try{ + while(true){ // Keep going til an EOFExcepiton is thrown + TermEntry readObject = TermEntryReaderWriter.getInstance().readObject(is); + SortedSet set = newtrees.get(readObject.subj); + if(set == null) + newtrees.put(readObject.subj, set = new TreeSet()); + set.add(readObject); + } + }catch(EOFException e){ + // EOF, do nothing + } + } catch (IOException ex) { + ex.printStackTrace(); + System.exit(1); + } finally { + try { + is.close(); + } catch (IOException e) { + System.err.println("Cannot close"); + System.exit(1); + } + } + return newtrees; + } + + + public static void main(String[] argv) { int exitStatus = 0; @@ -117,7 +175,8 @@ public boolean accept(File arg0, String arg1) { System.out.println("There are " + dirsToMerge.length + " old directories to merge."); if (dirsToMerge.length > 0) { - new DirectoryUploader(connection, new File(directory, dirsToMerge[0])).run(); + new DirectoryUploader(connection, + new File(directory, dirsToMerge[0])).run(); return; } @@ -136,6 +195,20 @@ public boolean accept(File arg0, String arg1) { System.out.println("There are " + filesToMerge.length + " files to merge."); for (String s : filesToMerge) { System.out.println("File: " + s); + Map> terms = readTermsFrom(new File(s)); + System.out.println("terms:"); + SortedSet ss = null; + for (String t : terms.keySet()) { + ss = terms.get(t); + System.out.println("\t" + t + ", " + + ss.size() + " elements"); + } + if (ss != null) { + System.out.println("\t\tLast entry:"); + for (TermEntry tt : ss) { + System.out.println("\t\t" + tt); + } + } } } finally { From 3c485c8834ce0ae41b50e226510062a2c728c8b4 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 4 Jan 2015 22:54:08 +0000 Subject: [PATCH 033/180] Improved the proof of concept. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/Merger.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index e712fe6e..34b87986 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -26,6 +26,7 @@ import freenet.library.index.ProtoIndex; import freenet.library.index.TermEntry; +import freenet.library.index.TermPageEntry; import freenet.library.index.TermEntryReaderWriter; /** @@ -202,11 +203,16 @@ public boolean accept(File arg0, String arg1) { ss = terms.get(t); System.out.println("\t" + t + ", " + ss.size() + " elements"); - } - if (ss != null) { - System.out.println("\t\tLast entry:"); for (TermEntry tt : ss) { - System.out.println("\t\t" + tt); + if (tt.entryType() == TermEntry.EntryType.PAGE) { + TermPageEntry tpe = (TermPageEntry) tt; + System.out.println("\t" + tpe.page + ":"); + for (Map.Entry entry : + tpe.posFragments.entrySet()) { + System.out.println("\t\t" + entry.getKey() + + " - " + entry.getValue()); + } + } } } } From f861df0b1abd065377b1ceb58321a2bc3324453b Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 25 Jan 2015 23:01:57 +0000 Subject: [PATCH 034/180] Build more packages. --HG-- branch : eclipse-separation --- build.xml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/build.xml b/build.xml index 6e22d0f2..eb18e43e 100644 --- a/build.xml +++ b/build.xml @@ -238,7 +238,7 @@ + description="create jar packages"> @@ -255,11 +255,7 @@ - - - From 405c482447e0e7eb09229cb9c21d0cd30ae08a97 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 14 Feb 2015 16:36:32 +0000 Subject: [PATCH 035/180] Some more work, slowly approaching uploading. --HG-- branch : eclipse-separation --- .../src/freenet/library/ArchiverFactory.java | 13 +- .../library/uploader/DirectoryUploader.java | 150 +++++++++--------- .../src/freenet/library/uploader/Merger.java | 4 +- .../library/uploader/UploaderLibrary.java | 147 ++++++++++------- .../library/uploader/archiver/FcpReader.java | 99 ++++++++++++ .../library/uploader/archiver/FcpWriter.java | 72 +++++++++ 6 files changed, 342 insertions(+), 143 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/archiver/FcpReader.java create mode 100644 uploader/src/freenet/library/uploader/archiver/FcpWriter.java diff --git a/shared/src/freenet/library/ArchiverFactory.java b/shared/src/freenet/library/ArchiverFactory.java index 952d7fdc..b9ae839c 100644 --- a/shared/src/freenet/library/ArchiverFactory.java +++ b/shared/src/freenet/library/ArchiverFactory.java @@ -6,8 +6,13 @@ import freenet.library.util.exec.SimpleProgress; public interface ArchiverFactory { - LiveArchiver - newArchiver(S rw, String mime, int size, Priority priorityLevel); - LiveArchiver - newArchiver(S rw, String mime, int size, LiveArchiver archiver); + + LiveArchiver + newArchiver(S rw, String mime, int size, + Priority priorityLevel); + + + LiveArchiver + newArchiver(S rw, String mime, int size, + LiveArchiver archiver); } diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 13a9fbf9..1ca7c153 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -1,25 +1,17 @@ package freenet.library.uploader; import java.io.BufferedReader; -import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.FilenameFilter; import java.io.IOException; -import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; -import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.SortedSet; import java.util.TreeSet; -import java.util.logging.Level; import freenet.library.Priority; import freenet.library.index.ProtoIndex; @@ -39,7 +31,6 @@ import net.pterodactylus.fcp.ClientPut; import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FcpMessage; import net.pterodactylus.fcp.PutFailed; import net.pterodactylus.fcp.PutSuccessful; @@ -54,57 +45,60 @@ class DirectoryUploader implements Runnable { } public void run() { - // doit. + mergeToFreenet(directory); } private String lastUploadURI; - private Object freenetMergeSync = new Object(); - private boolean freenetMergeRunning = false; - private boolean diskMergeRunning = false; static final int MAX_HANDLING_COUNT = 5; - // When pushing is broken, allow max handling to reach this level before stalling forever to prevent running out of disk space. - private int PUSH_BROKEN_MAX_HANDLING_COUNT = 10; - // Don't use too much disk space, take into account fact that Spider slows down over time. + // When pushing is broken, allow max handling to reach this level + // before stalling forever to prevent running out of disk space. - private boolean pushBroken; - - /** The temporary on-disk index. We merge stuff into this until it exceeds a threshold size, then - * we create a new diskIdx and merge the old one into the idxFreenet. */ + /** The temporary on-disk index. We merge stuff into this until it + * exceeds a threshold size, then we create a new diskIdx and + * merge the old one into the idxFreenet. */ ProtoIndex idxDisk; - /** idxDisk gets merged into idxFreenet this long after the last merge completed. */ + /** idxDisk gets merged into idxFreenet this long after the last + * merge completed. */ static final long MAX_TIME = 24*60*60*1000L; - /** idxDisk gets merged into idxFreenet after this many incoming updates from Spider. */ + /** idxDisk gets merged into idxFreenet after this many incoming + * updates from Spider. */ static final int MAX_UPDATES = 16; - /** idxDisk gets merged into idxFreenet after it has grown to this many terms. - * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must - * fit into memory during the merge process. */ + /** idxDisk gets merged into idxFreenet after it has grown to this + * many terms. Note that the entire main tree of terms (not the + * sub-trees with the positions and urls in) must fit into memory + * during the merge process. */ static final int MAX_TERMS = 100*1000; - /** idxDisk gets merged into idxFreenet after it has grown to this many terms. - * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must - * fit into memory during the merge process. */ + /** idxDisk gets merged into idxFreenet after it has grown to this + * many terms. Note that the entire main tree of terms (not the + * sub-trees with the positions and urls in) must fit into memory + * during the merge process. */ static final int MAX_TERMS_NOT_UPLOADED = 10*1000; - /** Maximum size of a single entry, in TermPageEntry count, on disk. If we exceed this we force an - * insert-to-freenet and move on to a new disk index. The problem is that the merge to Freenet has - * to keep the whole of each entry in RAM. This is only true for the data being merged in - the - * on-disk index - and not for the data on Freenet, which is pulled on demand. SCALABILITY */ + /** Maximum size of a single entry, in TermPageEntry count, on + * disk. If we exceed this we force an insert-to-freenet and move + * on to a new disk index. The problem is that the merge to + * Freenet has to keep the whole of each entry in RAM. This is + * only true for the data being merged in - the on-disk index - + * and not for the data on Freenet, which is pulled on + * demand. SCALABILITY */ static final int MAX_DISK_ENTRY_SIZE = 10000; - /** Like pushNumber, the number of the current disk dir, used to create idxDiskDir. */ + /** Like pushNumber, the number of the current disk dir, used to + * create idxDiskDir. */ private int dirNumber; static final String DISK_DIR_PREFIX = "library-temp-index-"; /** Directory the current idxDisk is saved in. */ File idxDiskDir; - private int mergedToDisk; ProtoIndexSerialiser srl = null; String lastDiskIndexName; - /** The uploaded index on Freenet. This never changes, it just gets updated. */ + /** The uploaded index on Freenet. This never changes, it just + * gets updated. */ ProtoIndex idxFreenet; // private final SpiderIndexURIs spiderIndexURIs; @@ -120,8 +114,10 @@ public void run() { static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; - // This is a member variable because it is huge, and having huge stuff in local variables seems to upset the default garbage collector. - // It doesn't need to be synchronized because it's always used from mergeToDisk, which never runs in parallel. + // This is a member variable because it is huge, and having huge + // stuff in local variables seems to upset the default garbage + // collector. It doesn't need to be synchronized because it's + // always used from mergeToDisk, which never runs in parallel. private Map> newtrees; // Ditto private SortedSet terms; @@ -129,8 +125,6 @@ public void run() { ProtoIndexSerialiser srlDisk = null; private ProtoIndexComponentSerialiser leafsrlDisk; - private long lastMergedToFreenet = -1; - private boolean writeStringTo(File filename, String uri) { FileOutputStream fos = null; @@ -182,8 +176,8 @@ private String readStringFrom(File file) { /** Create a new on-disk index from terms and newtrees. - * @return True if the size of any one item in the index is so large that we must upload - * immediately to Freenet. + * @return True if the size of any one item in the index is so + * large that we must upload immediately to Freenet. * @throws TaskAbortException If something broke catastrophically. */ private boolean createDiskIndex() throws TaskAbortException { boolean tooBig = false; @@ -215,7 +209,6 @@ private boolean createDiskDir() { if(!(idxDiskDir.mkdir() || idxDiskDir.isDirectory())) { System.err.println("Unable to create new disk dir: "+idxDiskDir); synchronized(this) { - pushBroken = true; return false; } } @@ -232,7 +225,8 @@ private boolean makeDiskDirSerialiser() { leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); if(lastDiskIndexName == null) { idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); - // FIXME more hacks: It's essential that we use the same FileArchiver instance here. + // FIXME more hacks: It's essential that we use the + // same FileArchiver instance here. leafsrlDisk.setSerialiserFor(idxDisk); } else { try { @@ -246,9 +240,6 @@ private boolean makeDiskDirSerialiser() { } catch (TaskAbortException e) { System.err.println("Failed to download previous index for spider update: "+e); e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } return false; } } @@ -260,8 +251,9 @@ private boolean makeDiskDirSerialiser() { private ProtoIndexComponentSerialiser leafsrl; - /** Merge a disk dir to an on-Freenet index. Usually called on startup, i.e. we haven't just - * created the on-disk index so we need to setup the ProtoIndex etc. */ + /** Merge a disk dir to an on-Freenet index. Usually called on + * startup, i.e. we haven't just created the on-disk index so we + * need to setup the ProtoIndex etc. */ protected void mergeToFreenet(File diskDir) { ProtoIndexSerialiser s = ProtoIndexSerialiser.forIndex(diskDir); LiveArchiver,SimpleProgress> archiver = @@ -293,16 +285,13 @@ protected void mergeToFreenet(File diskDir) { } catch (TaskAbortException e) { System.err.println("Failed to download previous index for spider update: "+e); e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } return; } mergeToFreenet(idxDisk, diskDir); } - /** Delete everything in a directory. Only use this when we are *very sure* there is no - * important data below it! */ + /** Delete everything in a directory. Only use this when we are + * *very sure* there is no important data below it! */ private static boolean removeAll(File wd) { if(!wd.isDirectory()) { System.err.println("DELETING FILE "+wd); @@ -372,7 +361,8 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { task4.meta = "Unknown"; srl.push(task4); - // Now wait for the inserts to finish. They are started asynchronously in the above merge. + // Now wait for the inserts to finish. They are started + // asynchronously in the above merge. LiveArchiver, SimpleProgress> arch = srl.getChildSerialiser(); arch.waitForAsyncInserts(); @@ -394,9 +384,6 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } catch (TaskAbortException e) { System.err.println("Failed to upload index for spider: "+e); e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } } } @@ -434,10 +421,12 @@ public void receivedPutSuccessful(FcpConnection fcpConnection, PutSuccessful res } - /** Create a Closure which will merge the subtrees from one index (on disk) into the subtrees - * of another index (on Freenet). It will be called with each subtree from the on-Freenet - * index, and will merge data from the relevant on-disk subtree. Both subtrees are initially - * deflated, and should be deflated when we leave the method, to avoid running out of memory. + /** Create a Closure which will merge the subtrees from one index + * (on disk) into the subtrees of another index (on Freenet). It + * will be called with each subtree from the on-Freenet index, and + * will merge data from the relevant on-disk subtree. Both + * subtrees are initially deflated, and should be deflated when we + * leave the method, to avoid running out of memory. * @param newtrees The on-disk tree of trees to get data from. * @return */ @@ -459,10 +448,14 @@ private Closure>, TaskAbortException> newtrees.inflate(key, true); SkeletonBTreeSet entries; entries = newtrees.get(key); - // CONCURRENCY: Because the lower-level trees are packed by the top tree, the bottom - // trees (SkeletonBTreeSet's) are not independant of each other. When the newtrees - // inflate above runs, it can deflate a tree that is still in use by another instance - // of this callback. Therefore we must COPY IT AND DEFLATE IT INSIDE THE LOCK. + // CONCURRENCY: Because the lower-level trees are + // packed by the top tree, the bottom trees + // (SkeletonBTreeSet's) are not independant of + // each other. When the newtrees inflate above + // runs, it can deflate a tree that is still in + // use by another instance of this + // callback. Therefore we must COPY IT AND DEFLATE + // IT INSIDE THE LOCK. entries.inflate(); data = new TreeSet(entries); entries.deflate(); @@ -475,8 +468,10 @@ private Closure>, TaskAbortException> } else { int oldSize = tree.size(); tree.update(data, null); - // Note that it is possible for data.size() + oldSize != tree.size(), because we might be merging data we've already merged. - // But most of the time it will add up. + // Note that it is possible for data.size() + + // oldSize != tree.size(), because we might be + // merging data we've already merged. But + // most of the time it will add up. } tree.deflate(); assert(tree.isBare()); @@ -485,17 +480,19 @@ private Closure>, TaskAbortException> }; } - /** Update the overall metadata for the on-Freenet index from the on-disk index. */ + /** Update the overall metadata for the on-Freenet index from the + * on-disk index. */ private void updateOverallMetadata(ProtoIndex diskToMerge) { idxFreenet.setName(diskToMerge.getName()); idxFreenet.setOwnerEmail(diskToMerge.getOwnerEmail()); idxFreenet.setOwner(diskToMerge.getOwner()); - // This is roughly accurate, it might not be exactly so if we process a bit out of order. + // This is roughly accurate, it might not be exactly so if we + // process a bit out of order. idxFreenet.setTotalPages(diskToMerge.getTotalPages() + Math.max(0,idxFreenet.getTotalPages())); } - /** Setup the serialisers for uploading to Freenet. These convert tree nodes to and from blocks - * on Freenet, essentially. */ + /** Setup the serialisers for uploading to Freenet. These convert + * tree nodes to and from blocks on Freenet, essentially. */ private void makeFreenetSerialisers() { if(srl == null) { srl = ProtoIndexSerialiser.forIndex(lastUploadURI, Priority.Bulk); @@ -504,7 +501,8 @@ private void makeFreenetSerialisers() { leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); if(lastUploadURI == null) { idxFreenet = new ProtoIndex("CHK@", "test", null, null, 0L); - // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. + // FIXME more hacks: It's essential that we use the + // same FreenetArchiver instance here. leafsrl.setSerialiserFor(idxFreenet); } else { try { @@ -518,17 +516,15 @@ private void makeFreenetSerialisers() { } catch (TaskAbortException e) { System.err.println("Failed to download previous index for spider update: "+e); e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } return; } } } } - /** Set up the on-disk cache, which keeps a copy of everything we upload to Freenet, so we - * won't need to re-download it, which can be very slow and doesn't always succeed. */ + /** Set up the on-disk cache, which keeps a copy of everything we + * upload to Freenet, so we won't need to re-download it, which + * can be very slow and doesn't always succeed. */ private void setupFreenetCacheDir() { File dir = new File(UploaderPaths.LIBRARY_CACHE); dir.mkdir(); diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 34b87986..6cbe1587 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -24,6 +24,7 @@ import net.pterodactylus.fcp.FcpMessage; import net.pterodactylus.fcp.NodeHello; +import freenet.library.FactoryRegister; import freenet.library.index.ProtoIndex; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; @@ -162,6 +163,7 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti System.out.println("Connected"); UploaderLibrary.init(connection); + FactoryRegister.register(UploaderLibrary.getInstance()); final String[] dirsToMerge; File directory = new File("."); @@ -221,7 +223,7 @@ public boolean accept(File arg0, String arg1) { if (connection != null) { connection.close(); } - System.exit(exitStatus); } + System.exit(exitStatus); } } diff --git a/uploader/src/freenet/library/uploader/UploaderLibrary.java b/uploader/src/freenet/library/uploader/UploaderLibrary.java index c8fcc8c0..ec1ad96e 100644 --- a/uploader/src/freenet/library/uploader/UploaderLibrary.java +++ b/uploader/src/freenet/library/uploader/UploaderLibrary.java @@ -7,6 +7,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; +import java.lang.UnsupportedOperationException; import java.net.MalformedURLException; import java.security.MessageDigest; import java.util.ArrayList; @@ -21,6 +22,8 @@ import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; +import freenet.library.uploader.archiver.FcpReader; +import freenet.library.uploader.archiver.FcpWriter; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; @@ -35,73 +38,95 @@ */ final public class UploaderLibrary implements ArchiverFactory { - public static final String BOOKMARK_PREFIX = "bookmark:"; - public static final String DEFAULT_INDEX_SITE = BOOKMARK_PREFIX + "liberty-of-information" + " " + BOOKMARK_PREFIX + "free-market-free-people" + " " + - BOOKMARK_PREFIX + "gotcha" + " " + BOOKMARK_PREFIX + "wanna" + " " + BOOKMARK_PREFIX + "wanna.old" + " " + BOOKMARK_PREFIX + "gogo"; - private static int version = 36; - public static final String plugName = "Library " + getVersion(); + public static final String BOOKMARK_PREFIX = "bookmark:"; + public static final String DEFAULT_INDEX_SITE = BOOKMARK_PREFIX + "liberty-of-information" + " " + BOOKMARK_PREFIX + "free-market-free-people" + " " + + BOOKMARK_PREFIX + "gotcha" + " " + BOOKMARK_PREFIX + "wanna" + " " + BOOKMARK_PREFIX + "wanna.old" + " " + BOOKMARK_PREFIX + "gogo"; + private static int version = 36; + public static final String plugName = "Library " + getVersion(); - public static String getPlugName() { - return plugName; - } + public static String getPlugName() { + return plugName; + } - public static long getVersion() { - return version; - } + public static long getVersion() { + return version; + } - /** - ** Library singleton. - */ - private static UploaderLibrary lib; - - public static FcpConnection fcpConnection; - - public synchronized static void init(FcpConnection connection) { - fcpConnection = connection; - } + /** + ** Library singleton. + */ + private static UploaderLibrary lib; + public static UploaderLibrary getInstance() { + if (lib == null) { + lib = new UploaderLibrary(); + } + return lib; + } + + public static FcpConnection fcpConnection; + + public synchronized static void init(FcpConnection connection) { + fcpConnection = connection; + } - public static String convertToHex(byte[] data) { - StringBuilder buf = new StringBuilder(); - for (int i = 0; i < data.length; i++) { - int halfbyte = (data[i] >>> 4) & 0x0F; - int two_halfs = 0; - do { - if ((0 <= halfbyte) && (halfbyte <= 9)) - buf.append((char) ('0' + halfbyte)); - else - buf.append((char) ('a' + (halfbyte - 10))); - halfbyte = data[i] & 0x0F; - } while (two_halfs++ < 1); - } - return buf.toString(); - } + public static String convertToHex(byte[] data) { + StringBuilder buf = new StringBuilder(); + for (int i = 0; i < data.length; i++) { + int halfbyte = (data[i] >>> 4) & 0x0F; + int two_halfs = 0; + do { + if ((0 <= halfbyte) && (halfbyte <= 9)) + buf.append((char) ('0' + halfbyte)); + else + buf.append((char) ('a' + (halfbyte - 10))); + halfbyte = data[i] & 0x0F; + } while (two_halfs++ < 1); + } + return buf.toString(); + } - //this function will return the String representation of the MD5 hash for the input string - public static String MD5(String text) { - try { - MessageDigest md = MessageDigest.getInstance("MD5"); - byte[] b = text.getBytes("UTF-8"); - md.update(b, 0, b.length); - byte[] md5hash = md.digest(); - return convertToHex(md5hash); - } catch (Exception e) { - throw new RuntimeException(e); - } - } + //this function will return the String representation of the MD5 hash for the input string + public static String MD5(String text) { + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + byte[] b = text.getBytes("UTF-8"); + md.update(b, 0, b.length); + byte[] md5hash = md.digest(); + return convertToHex(md5hash); + } catch (Exception e) { + throw new RuntimeException(e); + } + } - @Override - public LiveArchiver newArchiver( - S rw, String mime, int size, freenet.library.Priority priorityLevel) { - // TODO Auto-generated method stub - return null; - } + @Override + public + LiveArchiver + newArchiver(S rw, String mime, int size, + freenet.library.Priority priorityLevel) { + if (rw instanceof ObjectStreamWriter) { + return new FcpReader(new File(UploaderPaths.LIBRARY_CACHE), + rw, mime, size, priorityLevel); + } else if (rw instanceof ObjectStreamReader) { + return new FcpWriter(fcpConnection, rw, mime, size, priorityLevel); + } else { + // This is a Shouldn't happen. + throw new IllegalArgumentException("Unknown reader/writer: " + rw); + } + } - @Override - public LiveArchiver newArchiver( - S rw, String mime, int size, - LiveArchiver archiver) { - // TODO Auto-generated method stub - return null; - } + @Override + public + LiveArchiver + newArchiver(S rw, String mime, int size, + LiveArchiver archiver) { + freenet.library.Priority priorityLevel = freenet.library.Priority.Bulk; + /* + if (archiver != null && + archiver isinstance ??) { + priorityLevel = ((??) archiver).getPriorityLevel(); + } + */ + return newArchiver(rw, mime, size, priorityLevel); + } } diff --git a/uploader/src/freenet/library/uploader/archiver/FcpReader.java b/uploader/src/freenet/library/uploader/archiver/FcpReader.java new file mode 100644 index 00000000..8f7645f8 --- /dev/null +++ b/uploader/src/freenet/library/uploader/archiver/FcpReader.java @@ -0,0 +1,99 @@ +package freenet.library.uploader.archiver; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import net.pterodactylus.fcp.FcpConnection; +import freenet.library.Priority; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + +public class FcpReader implements + LiveArchiver { + private File cacheDir; + private ObjectStreamReader reader; + private String mimeType; + private int size; + private Priority priorityLevel; + + public FcpReader(File directory, ObjectStreamReader r, + String mime, int s, + Priority pl) { + cacheDir = directory; + reader = r; + mimeType = mime; + size = s; + priorityLevel = pl; + } + + @Override + public void pull(freenet.library.io.serial.Serialiser.PullTask task) + throws TaskAbortException { + pullLive(task, null); + } + + @Override + public void push(freenet.library.io.serial.Serialiser.PushTask task) + throws TaskAbortException { + throw new UnsupportedOperationException(); + } + + /** + * Initial implementation, fetch everything from the cache. This means + * that we cannot take over someone else's index. + */ + @Override + public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, + SimpleProgress progress) throws TaskAbortException { + if (cacheDir.exists()) { + String cacheKey = null; + if (task.meta instanceof String) { + cacheKey = (String) task.meta; + } else if (task.meta instanceof byte[]) { + throw new UnsupportedOperationException( + "Not implemented yet."); + } + + try { + if(cacheDir != null && cacheDir.exists() && cacheDir.canRead()) { + File cached = new File(cacheDir, cacheKey); + if(cached.exists() && + cached.length() != 0 && + cached.canRead()) { + InputStream is = new FileInputStream(cached); + task.data = (T) reader.readObject(is); + is.close(); + } + } + + if (progress != null) { + progress.addPartDone(); + } + } catch (IOException e) { + System.out.println("IOException:"); + e.printStackTrace(); + throw new TaskAbortException("Failed to read content from local tempbucket", e, true); + } + return; + } + throw new UnsupportedOperationException( + "Cannot find the key " + + task.meta + + " in the cache."); + } + + @Override + public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, + SimpleProgress p) throws TaskAbortException { + throw new UnsupportedOperationException(); + } + + @Override + public void waitForAsyncInserts() throws TaskAbortException { + throw new UnsupportedOperationException(); + } +} diff --git a/uploader/src/freenet/library/uploader/archiver/FcpWriter.java b/uploader/src/freenet/library/uploader/archiver/FcpWriter.java new file mode 100644 index 00000000..7b1b876f --- /dev/null +++ b/uploader/src/freenet/library/uploader/archiver/FcpWriter.java @@ -0,0 +1,72 @@ +package freenet.library.uploader.archiver; + +import java.io.IOException; + +import net.pterodactylus.fcp.ClientGet; +import net.pterodactylus.fcp.ClientHello; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.NodeHello; +import freenet.library.Priority; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + +public class FcpWriter implements + LiveArchiver { + private FcpConnection connection; + private ObjectStreamWriter writer; + private String mimeType; + private int size; + private Priority priorityLevel; + private String identifier; + + private static int identifierCounter = 0; + private static String getNewIdentifier() { + return "FcpWriter" + (++identifierCounter); + } + + public FcpWriter(FcpConnection fcpConnection, ObjectStreamWriter w, + String mime, int s, + Priority pl) { + connection = fcpConnection; + writer = w; + mimeType = mime; + size = s; + priorityLevel = pl; + identifier = getNewIdentifier(); + } + + @Override + public void pull(freenet.library.io.serial.Serialiser.PullTask task) + throws TaskAbortException { + pullLive(task, null); + } + + @Override + public void push(freenet.library.io.serial.Serialiser.PushTask task) + throws TaskAbortException { + throw new UnsupportedOperationException(); + } + + @Override + public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, + SimpleProgress p) throws TaskAbortException { + throw new UnsupportedOperationException(); + } + + @Override + public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, + SimpleProgress p) throws TaskAbortException { + throw new UnsupportedOperationException(); + } + + @Override + public void waitForAsyncInserts() throws TaskAbortException { + throw new UnsupportedOperationException(); + } +} From b7c47ca52cf094976270bc6598f422b4003c6562 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 15 Mar 2015 18:00:33 +0000 Subject: [PATCH 036/180] New workspace with project named a little differently. --HG-- branch : eclipse-separation --- .classpath | 2 +- shared/.classpath | 1 + uploader/.classpath | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.classpath b/.classpath index 24982e93..aa1f13c2 100644 --- a/.classpath +++ b/.classpath @@ -5,6 +5,6 @@ - + diff --git a/shared/.classpath b/shared/.classpath index 977652bf..056e19e4 100644 --- a/shared/.classpath +++ b/shared/.classpath @@ -5,5 +5,6 @@ + diff --git a/uploader/.classpath b/uploader/.classpath index 19dc2af3..f760e33a 100644 --- a/uploader/.classpath +++ b/uploader/.classpath @@ -3,7 +3,7 @@ - - + + From 5f5e1a1a7e9ba7079a2756763fa815c7ea0e760b Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 16 Mar 2015 05:55:47 +0000 Subject: [PATCH 037/180] Added debug trace. Fixes to get the merge working. --HG-- branch : eclipse-separation --- shared/src/freenet/copied/SHA256.java | 223 ++++++++++++++++++ .../library/util/exec/SimpleProgress.java | 5 +- src/plugins/Library/Main.java | 1 - .../library/uploader/DirectoryUploader.java | 9 +- .../library/uploader/archiver/FcpReader.java | 7 +- .../library/uploader/archiver/FcpWriter.java | 1 + 6 files changed, 240 insertions(+), 6 deletions(-) create mode 100644 shared/src/freenet/copied/SHA256.java diff --git a/shared/src/freenet/copied/SHA256.java b/shared/src/freenet/copied/SHA256.java new file mode 100644 index 00000000..bcc1d038 --- /dev/null +++ b/shared/src/freenet/copied/SHA256.java @@ -0,0 +1,223 @@ +/** +Cryptix General Licence +Copyright (C) 1995, 1996, 1997, 1998, 1999, 2000 +The Cryptix Foundation Limited. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the copyright notice, +this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in +the documentation and/or other materials provided with the +distribution. +THIS SOFTWARE IS PROVIDED BY THE CRYPTIX FOUNDATION LIMITED ``AS IS'' +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED +AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. + * + * Copyright (C) 2000 The Cryptix Foundation Limited. All rights reserved. + * + * Use, modification, copying and distribution of this software is subject to + * the terms and conditions of the Cryptix General Licence. You should have + * received a copy of the Cryptix General Licence along with this library; + * if not, you can download a copy from http://www.cryptix.org/ . + */ +// Copied from freenet.crypt +package freenet.copied; + +import java.io.IOException; +import java.io.InputStream; +import java.security.GeneralSecurityException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.Provider; +import java.security.Security; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * @author Jeroen C. van Gelderen (gelderen@cryptix.org) + */ +public class SHA256 { + /** Size (in bytes) of this hash */ + private static final int HASH_SIZE = 32; + + private static final int MESSAGE_DIGESTS_TO_CACHE = 16; + private static final ArrayList digests = new ArrayList(); + + /** + * It won't reset the Message Digest for you! + * @param InputStream + * @param MessageDigest + * @return + * @throws IOException + */ + public static void hash(InputStream is, MessageDigest md) throws IOException { + try { + byte[] buf = new byte[4096]; + int readBytes = is.read(buf); + while(readBytes > -1) { + md.update(buf, 0, readBytes); + readBytes = is.read(buf); + } + } finally { + is.close(); + } + } + + // From freenet.crypt.JceLoader + static public final Provider SUN; // optional, may be null + static private boolean checkUse(String prop) + { + return checkUse(prop, "true"); + } + static private boolean checkUse(String prop, String def) + { + return "true".equalsIgnoreCase(System.getProperty("freenet.jce."+prop, def)); + } + static { + SUN = checkUse("use.SUN") ? Security.getProvider("SUN") : null; + } + + // From freenet.crypt.Util + public static final Map mdProviders; + + static private long benchmark(MessageDigest md) throws GeneralSecurityException + { + long times = Long.MAX_VALUE; + byte[] input = new byte[1024]; + byte[] output = new byte[md.getDigestLength()]; + // warm-up + for (int i = 0; i < 32; i++) { + md.update(input, 0, input.length); + md.digest(output, 0, output.length); + System.arraycopy(output, 0, input, (i*output.length)%(input.length-output.length), output.length); + } + for (int i = 0; i < 128; i++) { + long startTime = System.nanoTime(); + for (int j = 0; j < 4; j++) { + for (int k = 0; k < 32; k ++) { + md.update(input, 0, input.length); + } + md.digest(output, 0, output.length); + } + long endTime = System.nanoTime(); + times = Math.min(endTime - startTime, times); + System.arraycopy(output, 0, input, 0, output.length); + } + return times; + } + + static { + try { + HashMap mdProviders_internal = new HashMap(); + + for (String algo: new String[] { + "SHA1", "MD5", "SHA-256", "SHA-384", "SHA-512" + }) { + final Provider sun = SUN; + MessageDigest md = MessageDigest.getInstance(algo); + md.digest(); + if (sun != null) { + // SUN provider is faster (in some configurations) + try { + MessageDigest sun_md = MessageDigest.getInstance(algo, sun); + sun_md.digest(); + if (md.getProvider() != sun_md.getProvider()) { + long time_def = benchmark(md); + long time_sun = benchmark(sun_md); + System.out.println(algo + " (" + md.getProvider() + "): " + time_def + "ns"); + System.out.println(algo + " (" + sun_md.getProvider() + "): " + time_sun + "ns"); + if (time_sun < time_def) { + md = sun_md; + } + } + } catch(GeneralSecurityException e) { + // ignore + System.err.println(algo + "@" + sun + " benchmark failed"); + } catch(Throwable e) { + // ignore + System.err.println(algo + "@" + sun + " benchmark failed"); + } + } + Provider mdProvider = md.getProvider(); + System.out.println(algo + ": using " + mdProvider); + mdProviders_internal.put(algo, mdProvider); + } + mdProviders = Collections.unmodifiableMap(mdProviders_internal); + } catch(NoSuchAlgorithmException e) { + // impossible + throw new Error(e); + } + } + + private static final Provider mdProvider = mdProviders.get("SHA-256"); + + /** + * Create a new SHA-256 MessageDigest + * Either succeed or stop the node. + */ + public static MessageDigest getMessageDigest() { + try { + MessageDigest md = null; + synchronized(digests) { + int x = digests.size(); + if(x == 0) md = null; + else md = digests.remove(x-1); + } + if(md == null) + md = MessageDigest.getInstance("SHA-256", mdProvider); + return md; + } catch(NoSuchAlgorithmException e2) { + //TODO: maybe we should point to a HOWTO for freejvms + System.err.println("Check your JVM settings especially the JCE!" + e2); + e2.printStackTrace(); + } + throw new RuntimeException(); + } + + /** + * Return a MessageDigest to the pool. + * Must be SHA-256 ! + */ + public static void returnMessageDigest(MessageDigest md256) { + if(md256 == null) + return; + String algo = md256.getAlgorithm(); + if(!(algo.equals("SHA-256") || algo.equals("SHA256"))) + throw new IllegalArgumentException("Should be SHA-256 but is " + algo); + md256.reset(); + synchronized (digests) { + int mdPoolSize = digests.size(); + if (mdPoolSize > MESSAGE_DIGESTS_TO_CACHE || noCache) { // don't cache too many of them + return; + } + digests.add(md256); + } + } + + public static byte[] digest(byte[] data) { + MessageDigest md = getMessageDigest(); + byte[] hash = md.digest(data); + returnMessageDigest(md); + return hash; + } + + public static int getDigestLength() { + return HASH_SIZE; + } + + private static boolean noCache = false; + +} diff --git a/shared/src/freenet/library/util/exec/SimpleProgress.java b/shared/src/freenet/library/util/exec/SimpleProgress.java index 17f6d537..c5bda165 100644 --- a/shared/src/freenet/library/util/exec/SimpleProgress.java +++ b/shared/src/freenet/library/util/exec/SimpleProgress.java @@ -91,9 +91,10 @@ public boolean finalizedTotal() { */ public synchronized void addPartDone() { if (pdone == known) { - throw new IllegalStateException("Can't increased parts done above parts known"); + throw new IllegalStateException("More parts done than known"); } pdone++; + System.err.println("DEBUG: " + this + " done " + pdone + "/" + known); if (finalizedTotal() && pdone == known) { inprogress = false; notifyAll(); @@ -116,12 +117,14 @@ public synchronized void addPartKnown(int parts, boolean finalise) { estimate = known + parts; known = estimate; estimate = ProgressParts.TOTAL_FINALIZED; + System.err.println("DEBUG: " + this + " finalise " + pdone + "/" + known); } else { if (finalizedTotal()) { throw new IllegalArgumentException("Cannot un-finalise a final total!"); } estimate = ProgressParts.ESTIMATE_UNKNOWN; known += parts; + System.err.println("DEBUG: " + this + " add " + pdone + "/" + known); } } diff --git a/src/plugins/Library/Main.java b/src/plugins/Library/Main.java index 9b4b9776..9bb4a619 100644 --- a/src/plugins/Library/Main.java +++ b/src/plugins/Library/Main.java @@ -72,7 +72,6 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.security.MessageDigest; -import plugins.Library.index.xml.LibrarianHandler; /** * Library class is the api for others to use search facilities, it is used by the interfaces diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 1ca7c153..26ffe1c0 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -344,27 +344,34 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { diskToMerge.ttab.keySetAutoDeflate().iterator(); TreeSet terms = new TreeSet(); while(it.hasNext()) terms.add(it.next()); - System.out.println("Merging "+terms.size()+" terms from disk to Freenet..."); + System.out.println("Merging " + + terms.size() + + " terms from disk to Freenet..."); assert(terms.size() == diskToMerge.ttab.size()); assert(idxFreenet.ttab.isBare()); assert(diskToMerge.ttab.isBare()); long entriesAdded = terms.size(); // Run the actual merge. + System.out.println("Start update"); idxFreenet.ttab.update(terms, null, clo, new TaskAbortExceptionConvertor()); assert(idxFreenet.ttab.isBare()); // Deflate the main tree. + System.out.println("Start deflate"); newtrees.deflate(); assert(diskToMerge.ttab.isBare()); // Push the top node to a CHK. PushTask task4 = new PushTask(idxFreenet); task4.meta = "Unknown"; + System.out.println("Start pushing"); srl.push(task4); // Now wait for the inserts to finish. They are started // asynchronously in the above merge. LiveArchiver, SimpleProgress> arch = srl.getChildSerialiser(); + System.out.println("Start waiting"); arch.waitForAsyncInserts(); + System.out.println("Done waiting"); long mergeEndTime = System.currentTimeMillis(); System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); diff --git a/uploader/src/freenet/library/uploader/archiver/FcpReader.java b/uploader/src/freenet/library/uploader/archiver/FcpReader.java index 8f7645f8..17d8b52d 100644 --- a/uploader/src/freenet/library/uploader/archiver/FcpReader.java +++ b/uploader/src/freenet/library/uploader/archiver/FcpReader.java @@ -6,6 +6,8 @@ import java.io.InputStream; import net.pterodactylus.fcp.FcpConnection; +import freenet.copied.Base64; +import freenet.copied.SHA256; import freenet.library.Priority; import freenet.library.io.ObjectStreamReader; import freenet.library.io.serial.LiveArchiver; @@ -54,8 +56,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, if (task.meta instanceof String) { cacheKey = (String) task.meta; } else if (task.meta instanceof byte[]) { - throw new UnsupportedOperationException( - "Not implemented yet."); + cacheKey = Base64.encode(SHA256.digest((byte[]) task.meta)); } try { @@ -71,7 +72,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, } if (progress != null) { - progress.addPartDone(); + progress.addPartKnown(0, true); } } catch (IOException e) { System.out.println("IOException:"); diff --git a/uploader/src/freenet/library/uploader/archiver/FcpWriter.java b/uploader/src/freenet/library/uploader/archiver/FcpWriter.java index 7b1b876f..815e2820 100644 --- a/uploader/src/freenet/library/uploader/archiver/FcpWriter.java +++ b/uploader/src/freenet/library/uploader/archiver/FcpWriter.java @@ -56,6 +56,7 @@ public void push(freenet.library.io.serial.Serialiser.PushTask task) @Override public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, SimpleProgress p) throws TaskAbortException { + System.out.println("FcpWriter.pullLive()"); throw new UnsupportedOperationException(); } From a6635edae0601f9849d9c2a5e97d84297991fe60 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 20 Mar 2015 04:37:26 +0000 Subject: [PATCH 038/180] Abandoned the idea of separating the Archiver and joined together to a single one. --HG-- branch : eclipse-separation --- .../FcpReader.java => FcpArchiver.java} | 43 ++++++++--- .../library/uploader/UploaderLibrary.java | 15 +--- .../library/uploader/archiver/FcpWriter.java | 73 ------------------- 3 files changed, 36 insertions(+), 95 deletions(-) rename uploader/src/freenet/library/uploader/{archiver/FcpReader.java => FcpArchiver.java} (66%) delete mode 100644 uploader/src/freenet/library/uploader/archiver/FcpWriter.java diff --git a/uploader/src/freenet/library/uploader/archiver/FcpReader.java b/uploader/src/freenet/library/uploader/FcpArchiver.java similarity index 66% rename from uploader/src/freenet/library/uploader/archiver/FcpReader.java rename to uploader/src/freenet/library/uploader/FcpArchiver.java index 17d8b52d..2d7d45ec 100644 --- a/uploader/src/freenet/library/uploader/archiver/FcpReader.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -1,35 +1,56 @@ -package freenet.library.uploader.archiver; +package freenet.library.uploader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import sun.reflect.generics.reflectiveObjects.NotImplementedException; + +import net.pterodactylus.fcp.ClientGet; +import net.pterodactylus.fcp.ClientHello; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; +import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.NodeHello; import freenet.copied.Base64; import freenet.copied.SHA256; import freenet.library.Priority; import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; -public class FcpReader implements - LiveArchiver { +public class FcpArchiver + implements LiveArchiver { + private FcpConnection connection; private File cacheDir; - private ObjectStreamReader reader; + private S readerWriter; private String mimeType; private int size; private Priority priorityLevel; + private String identifier; + + private static int identifierCounter = 0; + private static String getNewIdentifier() { + return "FcpWriter" + (++identifierCounter); + } - public FcpReader(File directory, ObjectStreamReader r, - String mime, int s, - Priority pl) { + public FcpArchiver(FcpConnection fcpConnection, + File directory, + S rw, + String mime, int s, + Priority pl) { + connection = fcpConnection; cacheDir = directory; - reader = r; + readerWriter = rw; mimeType = mime; size = s; priorityLevel = pl; + identifier = getNewIdentifier(); } @Override @@ -41,7 +62,7 @@ public void pull(freenet.library.io.serial.Serialiser.PullTask task) @Override public void push(freenet.library.io.serial.Serialiser.PushTask task) throws TaskAbortException { - throw new UnsupportedOperationException(); + throw new NotImplementedException(); } /** @@ -66,7 +87,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, cached.length() != 0 && cached.canRead()) { InputStream is = new FileInputStream(cached); - task.data = (T) reader.readObject(is); + task.data = (T) readerWriter.readObject(is); is.close(); } } @@ -90,7 +111,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, SimpleProgress p) throws TaskAbortException { - throw new UnsupportedOperationException(); + throw new NotImplementedException(); } @Override diff --git a/uploader/src/freenet/library/uploader/UploaderLibrary.java b/uploader/src/freenet/library/uploader/UploaderLibrary.java index ec1ad96e..8512ba23 100644 --- a/uploader/src/freenet/library/uploader/UploaderLibrary.java +++ b/uploader/src/freenet/library/uploader/UploaderLibrary.java @@ -22,8 +22,6 @@ import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; -import freenet.library.uploader.archiver.FcpReader; -import freenet.library.uploader.archiver.FcpWriter; import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; @@ -104,15 +102,10 @@ public static String MD5(String text) { LiveArchiver newArchiver(S rw, String mime, int size, freenet.library.Priority priorityLevel) { - if (rw instanceof ObjectStreamWriter) { - return new FcpReader(new File(UploaderPaths.LIBRARY_CACHE), - rw, mime, size, priorityLevel); - } else if (rw instanceof ObjectStreamReader) { - return new FcpWriter(fcpConnection, rw, mime, size, priorityLevel); - } else { - // This is a Shouldn't happen. - throw new IllegalArgumentException("Unknown reader/writer: " + rw); - } + return new FcpArchiver(fcpConnection, + new File(UploaderPaths.LIBRARY_CACHE), + rw, + mime, size, priorityLevel); } @Override diff --git a/uploader/src/freenet/library/uploader/archiver/FcpWriter.java b/uploader/src/freenet/library/uploader/archiver/FcpWriter.java deleted file mode 100644 index 815e2820..00000000 --- a/uploader/src/freenet/library/uploader/archiver/FcpWriter.java +++ /dev/null @@ -1,73 +0,0 @@ -package freenet.library.uploader.archiver; - -import java.io.IOException; - -import net.pterodactylus.fcp.ClientGet; -import net.pterodactylus.fcp.ClientHello; -import net.pterodactylus.fcp.ClientPut; -import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; -import net.pterodactylus.fcp.FcpAdapter; -import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FcpMessage; -import net.pterodactylus.fcp.NodeHello; -import freenet.library.Priority; -import freenet.library.io.ObjectStreamWriter; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; - -public class FcpWriter implements - LiveArchiver { - private FcpConnection connection; - private ObjectStreamWriter writer; - private String mimeType; - private int size; - private Priority priorityLevel; - private String identifier; - - private static int identifierCounter = 0; - private static String getNewIdentifier() { - return "FcpWriter" + (++identifierCounter); - } - - public FcpWriter(FcpConnection fcpConnection, ObjectStreamWriter w, - String mime, int s, - Priority pl) { - connection = fcpConnection; - writer = w; - mimeType = mime; - size = s; - priorityLevel = pl; - identifier = getNewIdentifier(); - } - - @Override - public void pull(freenet.library.io.serial.Serialiser.PullTask task) - throws TaskAbortException { - pullLive(task, null); - } - - @Override - public void push(freenet.library.io.serial.Serialiser.PushTask task) - throws TaskAbortException { - throw new UnsupportedOperationException(); - } - - @Override - public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, - SimpleProgress p) throws TaskAbortException { - System.out.println("FcpWriter.pullLive()"); - throw new UnsupportedOperationException(); - } - - @Override - public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, - SimpleProgress p) throws TaskAbortException { - throw new UnsupportedOperationException(); - } - - @Override - public void waitForAsyncInserts() throws TaskAbortException { - throw new UnsupportedOperationException(); - } -} From 1e8a345369cf4572ce69eea2ea88fc40571dae28 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 5 Apr 2015 05:54:51 +0000 Subject: [PATCH 039/180] Added upload to the fcp-adapter. --HG-- branch : eclipse-separation --- .../library/util/exec/SimpleProgress.java | 6 +- .../library/uploader/DirectoryUploader.java | 122 +++++- .../freenet/library/uploader/FcpArchiver.java | 364 +++++++++++++++++- .../src/freenet/library/uploader/Merger.java | 11 +- 4 files changed, 480 insertions(+), 23 deletions(-) diff --git a/shared/src/freenet/library/util/exec/SimpleProgress.java b/shared/src/freenet/library/util/exec/SimpleProgress.java index c5bda165..068c9a8c 100644 --- a/shared/src/freenet/library/util/exec/SimpleProgress.java +++ b/shared/src/freenet/library/util/exec/SimpleProgress.java @@ -94,7 +94,7 @@ public synchronized void addPartDone() { throw new IllegalStateException("More parts done than known"); } pdone++; - System.err.println("DEBUG: " + this + " done " + pdone + "/" + known); + // System.err.println("DEBUG: " + this + " done " + pdone + "/" + known); if (finalizedTotal() && pdone == known) { inprogress = false; notifyAll(); @@ -117,14 +117,14 @@ public synchronized void addPartKnown(int parts, boolean finalise) { estimate = known + parts; known = estimate; estimate = ProgressParts.TOTAL_FINALIZED; - System.err.println("DEBUG: " + this + " finalise " + pdone + "/" + known); + // System.err.println("DEBUG: " + this + " finalise " + pdone + "/" + known); } else { if (finalizedTotal()) { throw new IllegalArgumentException("Cannot un-finalise a final total!"); } estimate = ProgressParts.ESTIMATE_UNKNOWN; known += parts; - System.err.println("DEBUG: " + this + " add " + pdone + "/" + known); + // System.err.println("DEBUG: " + this + " add " + pdone + "/" + known); } } diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 26ffe1c0..3937908c 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -1,12 +1,15 @@ package freenet.library.uploader; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; +import java.io.UnsupportedEncodingException; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; @@ -33,6 +36,8 @@ import net.pterodactylus.fcp.FcpConnection; import net.pterodactylus.fcp.PutFailed; import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.UploadFrom; class DirectoryUploader implements Runnable { @@ -49,6 +54,7 @@ public void run() { } private String lastUploadURI; + private boolean uskUploadDone; static final int MAX_HANDLING_COUNT = 5; // When pushing is broken, allow max handling to reach this level @@ -107,7 +113,7 @@ public void run() { static final String LAST_URL_FILENAME = "library.index.lastpushed.chk"; static final String PRIV_URI_FILENAME = "library.index.privkey"; static final String PUB_URI_FILENAME = "library.index.pubkey"; - static final String EDITION_FILENAME = "library.index.next-edition"; + static final String EDITION_FILENAME = "library.index.last-edition"; static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; @@ -394,41 +400,137 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } } + private String readFileLine(final String filename) { + File f = new File(filename); + FileInputStream fis; + try { + fis = new FileInputStream(f); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } + BufferedReader br = null; + String line; + try { + br = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + line = br.readLine(); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } finally { + try { + if (br != null) { + br.close(); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + return line; + } + + protected void writeFileLine(String filename, String string) { + File f = new File(filename); + FileOutputStream fos; + try { + fos = new FileOutputStream(f); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } + BufferedWriter bw = null; + try { + bw = new BufferedWriter(new OutputStreamWriter(fos, "UTF-8")); + bw.write(string); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + throw new RuntimeException(); + } finally { + try { + if (bw != null) { + bw.close(); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + private void uploadUSKForFreenetIndex(String uri) { - final ClientPut usk = new ClientPut(lastUploadURI, "USKupload"); + String insertURI = readFileLine(PRIV_URI_FILENAME); + String keyPart = insertURI.substring("freenet:SSK@".length()); + int lastEdition = Integer.parseInt(readFileLine(EDITION_FILENAME)); + final ClientPut usk = new ClientPut("USK@" + keyPart + "/" + (lastEdition + 1), + "USKupload", + UploadFrom.redirect); usk.setTargetURI(uri); - connection.addFcpListener(new FcpAdapter() { + uskUploadDone = false; + FcpAdapter fcpListener = new FcpAdapter() { public void receivedPutFailed(FcpConnection fcpConnection, PutFailed result) { + assert fcpConnection == connection; + assert result != null; System.out.println("Could not upload USK"); + uskUploadDone = true; synchronized (usk) { - usk.notify(); + usk.notifyAll(); } - connection.removeFcpListener(this); } public void receivedPutSuccessful(FcpConnection fcpConnection, PutSuccessful result) { + assert fcpConnection == connection; + assert result != null; System.out.println("USK uploaded"); + uskUploadDone = true; synchronized (usk) { - usk.notify(); + usk.notifyAll(); } - connection.removeFcpListener(this); } - }); + + public void receivedURIGenerated(FcpConnection fcpConnection, URIGenerated uriGenerated) { + assert fcpConnection == connection; + assert uriGenerated != null; + System.out.println("URI generated " + uriGenerated.getURI()); + int editionStartPos = uriGenerated.getURI().lastIndexOf('/') + 1; + writeFileLine(EDITION_FILENAME, uriGenerated.getURI().substring(editionStartPos)); + } + + }; + connection.addFcpListener(fcpListener); try { connection.sendMessage(usk); - usk.wait(); + while (!uskUploadDone) { + synchronized (usk) { + usk.wait(); + } + } } catch (InterruptedException e) { System.err.println("Could not upload USK"); System.exit(1); } catch (IOException e) { System.err.println("IO Exception when uploading USK"); System.exit(1); + } finally { + connection.removeFcpListener(fcpListener); } } - /** Create a Closure which will merge the subtrees from one index + /** Create a Closure which will merge the subtrees from one index * (on disk) into the subtrees of another index (on Freenet). It * will be called with each subtree from the on-Freenet index, and * will merge data from the relevant on-disk subtree. Both diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 2d7d45ec..2129ec90 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -2,10 +2,15 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; - -import sun.reflect.generics.reflectiveObjects.NotImplementedException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import net.pterodactylus.fcp.ClientGet; import net.pterodactylus.fcp.ClientHello; @@ -14,7 +19,14 @@ import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.FinishedCompression; import net.pterodactylus.fcp.NodeHello; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutFetchable; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.StartedCompression; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; import freenet.copied.Base64; import freenet.copied.SHA256; import freenet.library.Priority; @@ -24,15 +36,25 @@ import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; + public class FcpArchiver implements LiveArchiver { private FcpConnection connection; private File cacheDir; - private S readerWriter; + private ObjectStreamReader reader; + private ObjectStreamWriter writer; private String mimeType; private int size; private Priority priorityLevel; private String identifier; + + /** + * Before synchronizing on stillRunning, be sure to synchronize + * connection! + */ + private Map stillRunning = + new HashMap(); + private Thread cleanupThread; private static int identifierCounter = 0; private static String getNewIdentifier() { @@ -46,12 +68,23 @@ public FcpArchiver(FcpConnection fcpConnection, Priority pl) { connection = fcpConnection; cacheDir = directory; - readerWriter = rw; + reader = rw; + writer = rw; mimeType = mime; size = s; priorityLevel = pl; identifier = getNewIdentifier(); } + + private net.pterodactylus.fcp.Priority getPriority() { + switch (priorityLevel) { + case Interactive: + return net.pterodactylus.fcp.Priority.interactive; + case Bulk: + return net.pterodactylus.fcp.Priority.bulkSplitfile; + } + return net.pterodactylus.fcp.Priority.bulkSplitfile; + } @Override public void pull(freenet.library.io.serial.Serialiser.PullTask task) @@ -62,7 +95,7 @@ public void pull(freenet.library.io.serial.Serialiser.PullTask task) @Override public void push(freenet.library.io.serial.Serialiser.PushTask task) throws TaskAbortException { - throw new NotImplementedException(); + pushLive(task, null); } /** @@ -87,7 +120,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, cached.length() != 0 && cached.canRead()) { InputStream is = new FileInputStream(cached); - task.data = (T) readerWriter.readObject(is); + task.data = (T) reader.readObject(is); is.close(); } } @@ -107,15 +140,328 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, task.meta + " in the cache."); } + + private class PushAdapter extends FcpAdapter { + private ClientPut putter; + private String identifier; + private String token; + private String uri; + private int progressTotal; + private int progressCompleted; + private boolean done; + + public PushAdapter(ClientPut p, String i, String t) { + putter = p; + identifier = i; + token = t; + uri = null; + progressTotal = 0; + progressCompleted = 0; + synchronized (stillRunning) { + stillRunning.put(token, this); + printLeft(); + } + } + + /** + * Show the amount of outstanding work. + */ + void printLeft() { + int total = 0; + int completed = 0; + synchronized (stillRunning) { + for (Map.Entry entry : stillRunning.entrySet()) { + total += entry.getValue().progressTotal; + completed += entry.getValue().progressCompleted; + } + System.out.println("Outstanding " + stillRunning.size() + " jobs " + + "(" + completed + "/" + total + ")"); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { + assert c == connection; + assert ps != null; + if (!identifier.equals(ps.getIdentifier())) + return; + System.out.println("receivedPutSuccessful for " + token + ": " + ps); + markDone(); + } + + @Override + public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + System.out.println("receivedPutFetchable for " + token + ": " + pf); + synchronized (this) { + this.notifyAll(); + } + } + + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + synchronized (putter) { + putter.notify(); + } + System.out.println("receivedPutFailed for " + token + ": " + pf); + markDone(); + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!identifier.equals(sp.getIdentifier())) + return; + if (sp.getFailed() > 0 || + sp.getFatallyFailed() > 0) { + System.out.println(token + "failed - aborted."); + markDone(); + } + progressCompleted = sp.getSucceeded(); + progressTotal = sp.getTotal(); + System.out.println("receivedSimpleProgess for " + token + ": " + + sp.getSucceeded() + "/" + sp.getTotal()); + if (sp.isFinalizedTotal() && + sp.getSucceeded() == sp.getTotal()) { + markDone(); + } + printLeft(); + } + + @Override + public void receivedStartedCompression(FcpConnection c, + StartedCompression startedCompression) { + assert c == connection; + assert startedCompression != null; + if (!identifier.equals(startedCompression.getIdentifier())) + return; + System.out.println("receivedStartedCompression for " + + token + ": " + + startedCompression); + } + + @Override + public void receviedFinishedCompression(FcpConnection c, + FinishedCompression finishedCompression) { + assert c == connection; + assert finishedCompression != null; + if (!identifier.equals(finishedCompression.getIdentifier())) + return; + System.out.println("receivedFinishedCompression for " + + token + ": " + + finishedCompression); + } + + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + if (!identifier.equals(uriGenerated.getIdentifier())) + return; + System.out.println("receivedURIGenerated for " + + token + ": " + + uriGenerated); + uri = uriGenerated.getURI(); + synchronized (this) { + this.notifyAll(); + } + } + + private void markDone() { + done = true; + synchronized (this) { + this.notifyAll(); + } + // Signal to the cleanup thread: + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + + private void forgetAboutThis() { + assert done; + connection.removeFcpListener(this); + synchronized (stillRunning) { + stillRunning.remove(token); + stillRunning.notifyAll(); + printLeft(); + } + } + + boolean isDone() { + return done; + } + + String getURI() { + return uri; + } + }; + + + private static int counter = 1; @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, - SimpleProgress p) throws TaskAbortException { - throw new NotImplementedException(); + SimpleProgress progress) throws TaskAbortException { + final String identifier = "FcpArchiver" + counter; + final String token = "FcpArchiverPushLive" + counter; + counter++; + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setClientToken(token); + putter.setEarlyEncode(true); + putter.setPriority(getPriority()); + putter.setVerbosity(Verbosity.ALL); + + File file = new File(cacheDir, token); + FileOutputStream fileOut = null; + try { + fileOut = new FileOutputStream(file); + writer.writeObject(task.data, fileOut); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } finally { + try { + fileOut.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + final long dataLength = file.length(); + putter.setDataLength(dataLength); + + + PipedInputStream in = new PipedInputStream(); + putter.setPayloadInputStream(in); + + try { + final PipedOutputStream out = new PipedOutputStream(in); + final T data = task.data; + new Thread( + new Runnable() { + public void run () { + try { + // write to the PipedOutputStream + writer.writeObject(data, out); + out.close(); + } + catch (IOException e) { + // logging and exception handling should go here + e.printStackTrace(); + } + } + } + ).start(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + PushAdapter putterListener = new PushAdapter(putter, identifier, token); + connection.addFcpListener(putterListener); + try { + if (progress != null) { + progress.addPartKnown(1, true); + } + connection.sendMessage(putter); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + // Wait for identifier + synchronized (putterListener) { + while (putterListener.getURI() == null) { + try { + putterListener.wait(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + + if (progress != null) { + progress.addPartDone(); + } + task.meta = putterListener.getURI(); + + // Moving file. + file.renameTo(new File(cacheDir, putterListener.getURI())); + + startCleanupThread(); + } + + private synchronized void startCleanupThread() { + if (cleanupThread == null) { + cleanupThread = new Thread( + new Runnable() { + public void run () { + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (PushAdapter pa : copy) { + if (pa.isDone()) { + pa.forgetAboutThis(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + removeCleanupThread(); + } + } + ); + cleanupThread.start(); + } + } + + private synchronized void removeCleanupThread() { + cleanupThread = null; } @Override public void waitForAsyncInserts() throws TaskAbortException { - throw new UnsupportedOperationException(); + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); } } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 6cbe1587..daf64798 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -143,6 +143,14 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti }; connection.addFcpListener(helloListener); + FcpAdapter closeListener = new FcpAdapter() { + public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { + System.out.println("Connection Closed - Aborting."); + System.exit(1); + } + }; + connection.addFcpListener(closeListener); + synchronized (hello) { try { connection.sendMessage(hello); @@ -218,12 +226,13 @@ public boolean accept(File arg0, String arg1) { } } } - + connection.removeFcpListener(closeListener); } finally { if (connection != null) { connection.close(); } } + System.out.println("Upload completed."); System.exit(exitStatus); } } From 57cd5f0e73a5b170c60de2de0f2ed0f0e018c4db Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 6 Apr 2015 12:11:43 +0000 Subject: [PATCH 040/180] Removed tabs. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/Merger.java | 70 +++++++++---------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index daf64798..0b0eac92 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -57,24 +57,24 @@ */ final public class Merger { /** Read the TermEntry's from the Bucket into newtrees and terms, and set up the index - * properties. - * @param data The Bucket containing TermPageEntry's etc serialised with TermEntryReaderWriter. - */ + * properties. + * @param data The Bucket containing TermPageEntry's etc serialised with TermEntryReaderWriter. + */ private static Map> readTermsFrom(File f) { - Map> newtrees = new HashMap>(); + Map> newtrees = new HashMap>(); DataInputStream is = null; try { is = new DataInputStream(new FileInputStream(f)); - String line; - int laps = 0; - do { - line = is.readLine(); - System.out.println("Line: " + line); - if (laps > 100) { - System.err.println("Cannot get out of file header."); - System.exit(1); - } - } while (!"End".equals(line)); + String line; + int laps = 0; + do { + line = is.readLine(); + System.out.println("Line: " + line); + if (laps > 100) { + System.err.println("Cannot get out of file header."); + System.exit(1); + } + } while (!"End".equals(line)); try{ while(true){ // Keep going til an EOFExcepiton is thrown TermEntry readObject = TermEntryReaderWriter.getInstance().readObject(is); @@ -88,14 +88,14 @@ private static Map> readTermsFrom(File f) { } } catch (IOException ex) { ex.printStackTrace(); - System.exit(1); + System.exit(1); } finally { - try { - is.close(); - } catch (IOException e) { - System.err.println("Cannot close"); - System.exit(1); - } + try { + is.close(); + } catch (IOException e) { + System.err.println("Cannot close"); + System.exit(1); + } } return newtrees; } @@ -144,7 +144,7 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti connection.addFcpListener(helloListener); FcpAdapter closeListener = new FcpAdapter() { - public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { + public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { System.out.println("Connection Closed - Aborting."); System.exit(1); } @@ -164,7 +164,7 @@ public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { exitStatus = 1; return; } finally { - connection.removeFcpListener(helloListener); + connection.removeFcpListener(helloListener); } } helloListener = null; @@ -175,7 +175,7 @@ public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { final String[] dirsToMerge; File directory = new File("."); - dirsToMerge = directory.list(new FilenameFilter() { + dirsToMerge = directory.list(new FilenameFilter() { public boolean accept(File arg0, String arg1) { if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; @@ -187,7 +187,7 @@ public boolean accept(File arg0, String arg1) { System.out.println("There are " + dirsToMerge.length + " old directories to merge."); if (dirsToMerge.length > 0) { new DirectoryUploader(connection, - new File(directory, dirsToMerge[0])).run(); + new File(directory, dirsToMerge[0])).run(); return; } @@ -206,16 +206,16 @@ public boolean accept(File arg0, String arg1) { System.out.println("There are " + filesToMerge.length + " files to merge."); for (String s : filesToMerge) { System.out.println("File: " + s); - Map> terms = readTermsFrom(new File(s)); - System.out.println("terms:"); - SortedSet ss = null; - for (String t : terms.keySet()) { - ss = terms.get(t); - System.out.println("\t" + t + ", " + - ss.size() + " elements"); - for (TermEntry tt : ss) { - if (tt.entryType() == TermEntry.EntryType.PAGE) { - TermPageEntry tpe = (TermPageEntry) tt; + Map> terms = readTermsFrom(new File(s)); + System.out.println("terms:"); + SortedSet ss = null; + for (String t : terms.keySet()) { + ss = terms.get(t); + System.out.println("\t" + t + ", " + + ss.size() + " elements"); + for (TermEntry tt : ss) { + if (tt.entryType() == TermEntry.EntryType.PAGE) { + TermPageEntry tpe = (TermPageEntry) tt; System.out.println("\t" + tpe.page + ":"); for (Map.Entry entry : tpe.posFragments.entrySet()) { From 1c39122eee937ff21ebd4816b184f41e72eb94c0 Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 6 Apr 2015 12:12:22 +0000 Subject: [PATCH 041/180] Use the file the second time, to avoid locking up the writer. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/FcpArchiver.java | 44 +++++-------------- 1 file changed, 12 insertions(+), 32 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 2129ec90..2fdbf16b 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -2,6 +2,7 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; @@ -321,52 +322,32 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, putter.setPriority(getPriority()); putter.setVerbosity(Verbosity.ALL); + // Writing to file. File file = new File(cacheDir, token); FileOutputStream fileOut = null; try { fileOut = new FileOutputStream(file); writer.writeObject(task.data, fileOut); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new TaskAbortException("Cannot write to file " + file, e); } finally { try { fileOut.close(); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new TaskAbortException("Cannot close file " + file, e); } } final long dataLength = file.length(); putter.setDataLength(dataLength); - - PipedInputStream in = new PipedInputStream(); - putter.setPayloadInputStream(in); - + FileInputStream in; try { - final PipedOutputStream out = new PipedOutputStream(in); - final T data = task.data; - new Thread( - new Runnable() { - public void run () { - try { - // write to the PipedOutputStream - writer.writeObject(data, out); - out.close(); - } - catch (IOException e) { - // logging and exception handling should go here - e.printStackTrace(); - } - } - } - ).start(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + in = new FileInputStream(file); + } catch (FileNotFoundException e) { + throw new TaskAbortException("Cannot read from file " + file, e); } + putter.setPayloadInputStream(in); PushAdapter putterListener = new PushAdapter(putter, identifier, token); connection.addFcpListener(putterListener); @@ -375,9 +356,9 @@ public void run () { progress.addPartKnown(1, true); } connection.sendMessage(putter); + in.close(); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new TaskAbortException("Cannot send message", e); } // Wait for identifier @@ -386,8 +367,7 @@ public void run () { try { putterListener.wait(); } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new TaskAbortException("Iterrupted wait", e); } } } From 452d905185283d4a6b76ac87a47d94acf518db11 Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 6 Apr 2015 12:18:41 +0000 Subject: [PATCH 042/180] Second attempt at removing the tabs. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/Merger.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 0b0eac92..814f4817 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -216,15 +216,15 @@ public boolean accept(File arg0, String arg1) { for (TermEntry tt : ss) { if (tt.entryType() == TermEntry.EntryType.PAGE) { TermPageEntry tpe = (TermPageEntry) tt; - System.out.println("\t" + tpe.page + ":"); - for (Map.Entry entry : - tpe.posFragments.entrySet()) { - System.out.println("\t\t" + entry.getKey() + - " - " + entry.getValue()); - } - } - } - } + System.out.println("\t" + tpe.page + ":"); + for (Map.Entry entry : + tpe.posFragments.entrySet()) { + System.out.println("\t\t" + entry.getKey() + + " - " + entry.getValue()); + } + } + } + } } connection.removeFcpListener(closeListener); } finally { From 2b7e0e16bb384d0ecf1ab1685e4aac30a9dd277d Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 8 Apr 2015 05:34:39 +0000 Subject: [PATCH 043/180] Moved the handling of the close-detection to get the right close message. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/Merger.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 814f4817..ecd1eadf 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -114,6 +114,14 @@ public static void main(String[] argv) { // Now we are in the Freenet directory. // The rest of the work is done here. FcpConnection connection = null; + + FcpAdapter closeListener = new FcpAdapter() { + public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { + System.out.println("Connection Closed - Aborting."); + System.exit(1); + } + }; + try { try { connection = new FcpConnection("127.0.0.1"); @@ -143,12 +151,6 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti }; connection.addFcpListener(helloListener); - FcpAdapter closeListener = new FcpAdapter() { - public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { - System.out.println("Connection Closed - Aborting."); - System.exit(1); - } - }; connection.addFcpListener(closeListener); synchronized (hello) { @@ -226,8 +228,8 @@ public boolean accept(File arg0, String arg1) { } } } - connection.removeFcpListener(closeListener); } finally { + connection.removeFcpListener(closeListener); if (connection != null) { connection.close(); } From f60365a63a418d19e8a38d442d662d84da2d90e4 Mon Sep 17 00:00:00 2001 From: anonymous Date: Thu, 9 Apr 2015 21:35:27 +0000 Subject: [PATCH 044/180] Created the processing of files and creating the on file index. Removed the uploader functions from the plugin part. --HG-- branch : eclipse-separation --- src/plugins/Library/SpiderIndexURIs.java | 2 +- src/plugins/Library/SpiderIndexUploader.java | 940 +----------------- .../library/uploader/DirectoryCreator.java | 95 ++ .../freenet/library/uploader/FcpArchiver.java | 29 +- .../freenet/library/uploader/IndexPeeker.java | 24 + .../src/freenet/library/uploader/Merger.java | 171 ++-- .../library/uploader/TermEntryFileWriter.java | 62 ++ .../uploader/TermEntryReaderIterator.java | 80 ++ 8 files changed, 393 insertions(+), 1010 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/DirectoryCreator.java create mode 100644 uploader/src/freenet/library/uploader/IndexPeeker.java create mode 100644 uploader/src/freenet/library/uploader/TermEntryFileWriter.java create mode 100644 uploader/src/freenet/library/uploader/TermEntryReaderIterator.java diff --git a/src/plugins/Library/SpiderIndexURIs.java b/src/plugins/Library/SpiderIndexURIs.java index b9e7c707..16487ca0 100644 --- a/src/plugins/Library/SpiderIndexURIs.java +++ b/src/plugins/Library/SpiderIndexURIs.java @@ -117,7 +117,7 @@ synchronized FreenetURI getPublicUSK() { private synchronized long getLastUploadedEdition() { /** If none uploaded, return -1, otherwise return the last uploaded version. */ - return edition-1; + return edition; } } \ No newline at end of file diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index e7a51349..17012c10 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -1,58 +1,19 @@ package plugins.Library; -import java.io.BufferedReader; -import java.io.EOFException; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.logging.Level; -import plugins.Library.client.FreenetArchiver; -import freenet.client.InsertException; import freenet.keys.FreenetURI; -import freenet.library.Priority; -import freenet.library.index.ProtoIndex; -import freenet.library.index.ProtoIndexComponentSerialiser; -import freenet.library.index.ProtoIndexSerialiser; -import freenet.library.index.TermEntry; -import freenet.library.index.TermEntryReaderWriter; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.io.serial.Serialiser.PullTask; -import freenet.library.io.serial.Serialiser.PushTask; -import freenet.library.util.SkeletonBTreeMap; -import freenet.library.util.SkeletonBTreeSet; -import freenet.library.util.TaskAbortExceptionConvertor; -import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; -import freenet.library.util.func.Closure; -import freenet.node.RequestStarter; import freenet.pluginmanager.PluginNotFoundException; import freenet.pluginmanager.PluginReplySender; import freenet.pluginmanager.PluginRespirator; import freenet.support.Logger; -import freenet.support.MutableBoolean; import freenet.support.SimpleFieldSet; -import freenet.support.TimeUtil; import freenet.support.api.Bucket; import freenet.support.io.BucketTools; -import freenet.support.io.Closer; import freenet.support.io.FileBucket; -import freenet.support.io.FileUtil; -import freenet.support.io.LineReadingInputStream; + public class SpiderIndexUploader { @@ -67,839 +28,23 @@ public class SpiderIndexUploader { } private final PluginRespirator pr; - private Object freenetMergeSync = new Object(); - private boolean freenetMergeRunning = false; - private boolean diskMergeRunning = false; - - private final ArrayList toMergeToDisk = new ArrayList(); - static final int MAX_HANDLING_COUNT = 5; - // When pushing is broken, allow max handling to reach this level before stalling forever to prevent running out of disk space. - private int PUSH_BROKEN_MAX_HANDLING_COUNT = 10; - // Don't use too much disk space, take into account fact that Spider slows down over time. - - private boolean pushBroken; - - /** The temporary on-disk index. We merge stuff into this until it exceeds a threshold size, then - * we create a new diskIdx and merge the old one into the idxFreenet. */ - ProtoIndex idxDisk; - - /** idxDisk gets merged into idxFreenet this long after the last merge completed. */ - static final long MAX_TIME = 24*60*60*1000L; - - /** idxDisk gets merged into idxFreenet after this many incoming updates from Spider. */ - static final int MAX_UPDATES = 16; - - /** idxDisk gets merged into idxFreenet after it has grown to this many terms. - * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must - * fit into memory during the merge process. */ - static final int MAX_TERMS = 100*1000; - - /** idxDisk gets merged into idxFreenet after it has grown to this many terms. - * Note that the entire main tree of terms (not the sub-trees with the positions and urls in) must - * fit into memory during the merge process. */ - static final int MAX_TERMS_NOT_UPLOADED = 10*1000; - /** Maximum size of a single entry, in TermPageEntry count, on disk. If we exceed this we force an - * insert-to-freenet and move on to a new disk index. The problem is that the merge to Freenet has - * to keep the whole of each entry in RAM. This is only true for the data being merged in - the - * on-disk index - and not for the data on Freenet, which is pulled on demand. SCALABILITY */ - static final int MAX_DISK_ENTRY_SIZE = 10000; - - /** Like pushNumber, the number of the current disk dir, used to create idxDiskDir. */ - private int dirNumber; - static final String DISK_DIR_PREFIX = "library-temp-index-"; - /** Directory the current idxDisk is saved in. */ - File idxDiskDir; - private int mergedToDisk; - - ProtoIndexSerialiser srl = null; - FreenetURI lastUploadURI = null; - String lastDiskIndexName; - /** The uploaded index on Freenet. This never changes, it just gets updated. */ - ProtoIndex idxFreenet; - private final SpiderIndexURIs spiderIndexURIs; - long pushNumber; - static final String LAST_URL_FILENAME = "library.index.lastpushed.chk"; + private long pushNumber; static final String PRIV_URI_FILENAME = "library.index.privkey"; static final String PUB_URI_FILENAME = "library.index.pubkey"; - static final String EDITION_FILENAME = "library.index.next-edition"; - - static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; + static final String EDITION_FILENAME = "library.index.last-edition"; static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; - /** Merge from the Bucket chain to the on-disk idxDisk. */ - protected void wrapMergeToDisk() { - spiderIndexURIs.loadSSKURIs(); - boolean first = true; - while(true) { - final Bucket data; - synchronized(freenetMergeSync) { - if(pushBroken) { - Logger.error(this, "Pushing broken"); - return; - } - if(first && diskMergeRunning) { - Logger.error(this, "Already running a handler!"); - return; - } else if((!first) && (!diskMergeRunning)) { - Logger.error(this, "Already running yet runningHandler is false?!"); - return; - } - first = false; - if(toMergeToDisk.size() == 0) { - if(logMINOR) Logger.minor(this, "Nothing to handle"); - diskMergeRunning = false; - freenetMergeSync.notifyAll(); - return; - } - data = toMergeToDisk.remove(0); - freenetMergeSync.notifyAll(); - diskMergeRunning = true; - } - try { - mergeToDisk(data); - } catch (Throwable t) { - // Failed. - synchronized(freenetMergeSync) { - diskMergeRunning = false; - pushBroken = true; - freenetMergeSync.notifyAll(); - } - if(t instanceof RuntimeException) - throw (RuntimeException)t; - if(t instanceof Error) - throw (Error)t; - } - } - } - - // This is a member variable because it is huge, and having huge stuff in local variables seems to upset the default garbage collector. - // It doesn't need to be synchronized because it's always used from mergeToDisk, which never runs in parallel. - private Map> newtrees; - // Ditto - private SortedSet terms; - - ProtoIndexSerialiser srlDisk = null; - private ProtoIndexComponentSerialiser leafsrlDisk; - - private long lastMergedToFreenet = -1; - - /** Merge a bucket of TermEntry's into an on-disk index. */ - private void mergeToDisk(Bucket data) { - - boolean newIndex = false; - - if(idxDiskDir == null) { - newIndex = true; - if(!createDiskDir()) return; - } - - if(!makeDiskDirSerialiser()) return; - - // Read data into newtrees and trees. - long entriesAdded = readTermsFrom(data); - - if(terms.size() == 0) { - System.out.println("Nothing to merge"); - synchronized(this) { - newtrees = null; - terms = null; - } - return; - } - - // Merge the new data to the disk index. - - try { - final MutableBoolean maxDiskEntrySizeExceeded = new MutableBoolean(); - maxDiskEntrySizeExceeded.value = false; - long mergeStartTime = System.currentTimeMillis(); - if(newIndex) { - if(createDiskIndex()) - maxDiskEntrySizeExceeded.value = true; - } else { - // async merge - Closure>, TaskAbortException> clo = - createMergeFromNewtreesClosure(maxDiskEntrySizeExceeded); - assert(idxDisk.ttab.isBare()); - System.out.println("Merging "+terms.size()+" terms, tree.size = "+idxDisk.ttab.size()+" from "+data+"..."); - idxDisk.ttab.update(terms, null, clo, new TaskAbortExceptionConvertor()); - - } - // Synchronize anyway so garbage collector knows about it. - synchronized(this) { - newtrees = null; - terms = null; - } - assert(idxDisk.ttab.isBare()); - PushTask task4 = new PushTask(idxDisk); - srlDisk.push(task4); - - long mergeEndTime = System.currentTimeMillis(); - System.out.print(entriesAdded + " entries merged to disk in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta + ", "); - // FileArchiver produces a String, which is a filename not including the prefix or suffix. - String uri = (String)task4.meta; - lastDiskIndexName = uri; - System.out.println("Pushed new index to file "+uri); - if(writeStringTo(new File(LAST_DISK_FILENAME), uri) && - writeStringTo(new File(idxDiskDir, LAST_DISK_FILENAME), uri)) { - // Successfully uploaded and written new status. Can delete the incoming data. - data.free(); - } - - maybeMergeToFreenet(maxDiskEntrySizeExceeded); - } catch (TaskAbortException e) { - Logger.error(this, "Failed to upload index for spider: "+e, e); - System.err.println("Failed to upload index for spider: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - } - } - - /** We have just written a Bucket of new data to an on-disk index. We may or may not want to - * upload to an on-Freenet index, depending on how big the data is etc. If we do, we will need - * to create a new on-disk index. - * @param maxDiskEntrySizeExceeded A flag object which is set (off-thread) if any single term - * in the index is very large. - */ - private void maybeMergeToFreenet(MutableBoolean maxDiskEntrySizeExceeded) { - // Maybe chain to mergeToFreenet ??? - - boolean termTooBig = false; - synchronized(maxDiskEntrySizeExceeded) { - termTooBig = maxDiskEntrySizeExceeded.value; - } - - mergedToDisk++; - if((lastMergedToFreenet > 0 && idxDisk.ttab.size() > MAX_TERMS) || - (idxDisk.ttab.size() > MAX_TERMS_NOT_UPLOADED) - || (mergedToDisk > MAX_UPDATES) || termTooBig || - (lastMergedToFreenet > 0 && (System.currentTimeMillis() - lastMergedToFreenet) > MAX_TIME)) { - - final ProtoIndex diskToMerge = idxDisk; - final File dir = idxDiskDir; - System.out.println("" + - idxDisk.ttab.size() + " terms in index, " + - mergedToDisk + " merges, " + - (lastMergedToFreenet <= 0 - ? "never merged to Freenet" - : ("last merged to Freenet "+TimeUtil.formatTime(System.currentTimeMillis() - lastMergedToFreenet)) + "ago")); - - System.out.print("Exceeded threshold for "); - if (lastMergedToFreenet > 0 && idxDisk.ttab.size() > MAX_TERMS) - System.out.print("terms, "); - if (idxDisk.ttab.size() > MAX_TERMS_NOT_UPLOADED) - System.out.print("not uploaded terms, "); - if (mergedToDisk > MAX_UPDATES) - System.out.print("updates, "); - if (termTooBig) - System.out.print("term too big, "); - if (lastMergedToFreenet > 0 && (System.currentTimeMillis() - lastMergedToFreenet) > MAX_TIME) - System.out.print("time since last merge, "); - System.out.println("starting new disk index and starting merge from disk to Freenet..."); - mergedToDisk = 0; - lastMergedToFreenet = -1; - idxDisk = null; - srlDisk = null; - leafsrlDisk = null; - idxDiskDir = null; - lastDiskIndexName = null; - - synchronized(freenetMergeSync) { - while(freenetMergeRunning) { - if(pushBroken) return; - System.err.println("Need to merge to Freenet, but last merge not finished yet. Waiting..."); - try { - freenetMergeSync.wait(); - } catch (InterruptedException e) { - // Ignore - } - } - if(pushBroken) return; - freenetMergeRunning = true; - } - - Runnable r = new Runnable() { - - public void run() { - try { - mergeToFreenet(diskToMerge, dir); - } catch (Throwable t) { - Logger.error(this, "Merge to Freenet failed: "+t, t); - System.err.println("Merge to Freenet failed: "+t); - t.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - } finally { - synchronized(freenetMergeSync) { - freenetMergeRunning = false; - if(!pushBroken) - lastMergedToFreenet = System.currentTimeMillis(); - freenetMergeSync.notifyAll(); - } - } - } - - }; - pr.getNode().executor.execute(r, "Library: Merge data from disk to Freenet"); - } else { - System.out.println("Not merging to Freenet yet: "+idxDisk.ttab.size()+" terms in index, "+mergedToDisk+" merges, "+(lastMergedToFreenet <= 0 ? "never merged to Freenet" : ("last merged to Freenet "+TimeUtil.formatTime(System.currentTimeMillis() - lastMergedToFreenet))+"ago")); - } - } - - private boolean writeURITo(File filename, FreenetURI uri) { - return writeStringTo(filename, uri.toString()); - } - - private boolean writeStringTo(File filename, String uri) { - FileOutputStream fos = null; - try { - fos = new FileOutputStream(filename); - OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); - osw.write(uri.toString()); - osw.close(); - fos = null; - return true; - } catch (IOException e) { - Logger.error(this, "Failed to write to "+filename+" : "+uri, e); - System.out.println("Failed to write to "+filename+" : "+uri+" : "+e); - return false; - } finally { - Closer.close(fos); - } - } - - private String readStringFrom(File file) { - String ret; - FileInputStream fis = null; - try { - fis = new FileInputStream(file); - BufferedReader br = new BufferedReader(new InputStreamReader(fis, "UTF-8")); - ret = br.readLine(); - fis.close(); - fis = null; - return ret; - } catch (IOException e) { - // Ignore - return null; - } finally { - Closer.close(fis); - } - } - - private FreenetURI readURIFrom(File file) { - String s = readStringFrom(file); - if(s != null) { - try { - return new FreenetURI(s); - } catch (MalformedURLException e) { - // Ignore. - } - } - return null; - } - - /** Create a callback object which will do the merging of individual terms. This will be called - * for each term as it is unpacked from the existing on-disk index. It then merges in new data - * from newtrees and writes the subtree for the term back to disk. Most of the work is done in - * update() below. - * @param maxDiskEntrySizeExceeded Will be set if any single term is so large that we need to - * upload to Freenet immediately. */ - private Closure>, TaskAbortException> createMergeFromNewtreesClosure(final MutableBoolean maxDiskEntrySizeExceeded) { - return new - Closure>, TaskAbortException>() { - /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { - String key = entry.getKey(); - SkeletonBTreeSet tree = entry.getValue(); - if(logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); - if(tree != null) - Logger.debug(this, "Merging data (on disk) in term "+key); - else - Logger.debug(this, "Adding new term to disk index: "+key); - if (tree == null) { - entry.setValue(tree = makeEntryTree(leafsrlDisk)); - } - assert(tree.isBare()); - SortedSet toMerge = newtrees.get(key); - tree.update(toMerge, null); - if(toMerge.size() > MAX_DISK_ENTRY_SIZE) - synchronized(maxDiskEntrySizeExceeded) { - maxDiskEntrySizeExceeded.value = true; - } - toMerge = null; - newtrees.remove(key); - assert(tree.isBare()); - if(logMINOR) Logger.minor(this, "Updated: "+key+" : "+tree); - } - }; - } - - /** Create a new on-disk index from terms and newtrees. - * @return True if the size of any one item in the index is so large that we must upload - * immediately to Freenet. - * @throws TaskAbortException If something broke catastrophically. */ - private boolean createDiskIndex() throws TaskAbortException { - boolean tooBig = false; - // created a new index, fill it with data. - // DON'T MERGE, merge with a lot of data will deadlock. - // FIXME throw in update() if it will deadlock. - for(String key : terms) { - SkeletonBTreeSet tree = makeEntryTree(leafsrlDisk); - SortedSet toMerge = newtrees.get(key); - tree.addAll(toMerge); - if(toMerge.size() > MAX_DISK_ENTRY_SIZE) - tooBig = true; - toMerge = null; - tree.deflate(); - assert(tree.isBare()); - idxDisk.ttab.put(key, tree); - } - idxDisk.ttab.deflate(); - return tooBig; - } - - /** Read the TermEntry's from the Bucket into newtrees and terms, and set up the index - * properties. - * @param data The Bucket containing TermPageEntry's etc serialised with TermEntryReaderWriter. - */ - private long readTermsFrom(Bucket data) { - FileWriter w = null; - newtrees = new HashMap>(); - terms = new TreeSet(); - int entriesAdded = 0; - InputStream is = null; - try { - Logger.normal(this, "Bucket of buffer received, "+data.size()+" bytes"); - is = data.getInputStream(); - SimpleFieldSet fs = new SimpleFieldSet(new LineReadingInputStream(is), 1024, 512, true, true, true); - idxDisk.setName(fs.get("index.title")); - idxDisk.setOwnerEmail(fs.get("index.owner.email")); - idxDisk.setOwner(fs.get("index.owner.name")); - idxDisk.setTotalPages(fs.getLong("totalPages", -1)); - try{ - while(true){ // Keep going til an EOFExcepiton is thrown - TermEntry readObject = TermEntryReaderWriter.getInstance().readObject(is); - SortedSet set = newtrees.get(readObject.subj); - if(set == null) - newtrees.put(readObject.subj, set = new TreeSet()); - set.add(readObject); - terms.add(readObject.subj); - entriesAdded++; - } - }catch(EOFException e){ - // EOF, do nothing - } - } catch (IOException ex) { - java.util.logging.Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex); - } finally { - Closer.close(is); - } - return entriesAdded; - } - - /** Create a directory for an on-disk index. - * @return False if something broke and we can't continue. */ - private boolean createDiskDir() { - dirNumber++; - idxDiskDir = new File(DISK_DIR_PREFIX + Integer.toString(dirNumber)); - System.out.println("Created new disk dir for merging: "+idxDiskDir); - if(!(idxDiskDir.mkdir() || idxDiskDir.isDirectory())) { - Logger.error(this, "Unable to create new disk dir: "+idxDiskDir); - synchronized(this) { - pushBroken = true; - return false; - } - } - return true; - } - - /** Set up the serialisers for an on-disk index. - * @return False if something broke and we can't continue. */ - private boolean makeDiskDirSerialiser() { - if(srlDisk == null) { - srlDisk = ProtoIndexSerialiser.forIndex(idxDiskDir); - LiveArchiver,SimpleProgress> archiver = - (LiveArchiver,SimpleProgress>)(srlDisk.getChildSerialiser()); - leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); - if(lastDiskIndexName == null) { - idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); - // FIXME more hacks: It's essential that we use the same FileArchiver instance here. - leafsrlDisk.setSerialiserFor(idxDisk); - } else { - try { - PullTask pull = new PullTask(lastDiskIndexName); - System.out.println("Pulling previous index "+lastDiskIndexName+" from disk so can update it."); - srlDisk.pull(pull); - System.out.println("Pulled previous index "+lastDiskIndexName+" from disk - updating..."); - idxDisk = pull.data; - if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); - } catch (TaskAbortException e) { - Logger.error(this, "Failed to download previous index for spider update: "+e, e); - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - return false; - } - } - } - return true; - } - static final String INDEX_DOCNAME = "index.yml"; - - private ProtoIndexComponentSerialiser leafsrl; - - /** Merge a disk dir to an on-Freenet index. Usually called on startup, i.e. we haven't just - * created the on-disk index so we need to setup the ProtoIndex etc. */ - protected void mergeToFreenet(File diskDir) { - ProtoIndexSerialiser s = ProtoIndexSerialiser.forIndex(diskDir); - LiveArchiver,SimpleProgress> archiver = - (LiveArchiver,SimpleProgress>)(s.getChildSerialiser()); - ProtoIndexComponentSerialiser leaf = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); - String f = this.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); - if(f == null) { - if(diskDir.list().length == 0) { - System.err.println("Directory "+diskDir+" is empty. Nothing to merge."); - diskDir.delete(); - return; - } - // Ignore - System.err.println("Unable to merge old data "+diskDir); - return; - } else { - System.out.println("Continuing old bucket: "+f); - } - - ProtoIndex idxDisk = null; - try { - PullTask pull = new PullTask(f); - System.out.println("Pulling previous index "+f+" from disk so can update it."); - s.pull(pull); - System.out.println("Pulled previous index "+f+" from disk - updating..."); - idxDisk = pull.data; - if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxDisk.getSerialiser()+" should be "+archiver); - } catch (TaskAbortException e) { - Logger.error(this, "Failed to download previous index for spider update: "+e, e); - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - return; - } - mergeToFreenet(idxDisk, diskDir); - } - - private final Object inflateSync = new Object(); - - /** Merge from an on-disk index to an on-Freenet index. - * @param diskToMerge The on-disk index. - * @param diskDir The folder the on-disk index is stored in. - */ - protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { - System.out.println("Merging on-disk index to Freenet: "+diskDir); - if(lastUploadURI == null) { - lastUploadURI = readURIFrom(new File(LAST_URL_FILENAME)); - } - setupFreenetCacheDir(); - - makeFreenetSerialisers(); - - updateOverallMetadata(diskToMerge); - - final SkeletonBTreeMap> newtrees = diskToMerge.ttab; - - // Do the upload - - // async merge - Closure>, TaskAbortException> clo = - createMergeFromTreeClosure(newtrees); - try { - long mergeStartTime = System.currentTimeMillis(); - assert(idxFreenet.ttab.isBare()); - Iterator it = - diskToMerge.ttab.keySetAutoDeflate().iterator(); - TreeSet terms = new TreeSet(); - while(it.hasNext()) terms.add(it.next()); - System.out.println("Merging "+terms.size()+" terms from disk to Freenet..."); - assert(terms.size() == diskToMerge.ttab.size()); - assert(idxFreenet.ttab.isBare()); - assert(diskToMerge.ttab.isBare()); - long entriesAdded = terms.size(); - // Run the actual merge. - idxFreenet.ttab.update(terms, null, clo, new TaskAbortExceptionConvertor()); - assert(idxFreenet.ttab.isBare()); - // Deflate the main tree. - newtrees.deflate(); - assert(diskToMerge.ttab.isBare()); - - // Push the top node to a CHK. - PushTask task4 = new PushTask(idxFreenet); - task4.meta = FreenetURI.EMPTY_CHK_URI; - srl.push(task4); - - // Now wait for the inserts to finish. They are started asynchronously in the above merge. - FreenetArchiver> arch = - (FreenetArchiver>) srl.getChildSerialiser(); - arch.waitForAsyncInserts(); - - long mergeEndTime = System.currentTimeMillis(); - System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); - FreenetURI uri = (FreenetURI)task4.meta; - lastUploadURI = uri; - if(writeURITo(new File(LAST_URL_FILENAME), uri)) { - newtrees.deflate(); - diskToMerge = null; - terms = null; - System.out.println("Finished with disk index "+diskDir); - FileUtil.removeAll(diskDir); - } - - // Create the USK to redirect to the CHK at the top of the index. - uploadUSKForFreenetIndex(uri); - - } catch (TaskAbortException e) { - Logger.error(this, "Failed to upload index for spider: "+e, e); - System.err.println("Failed to upload index for spider: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - } - } - - private void uploadUSKForFreenetIndex(FreenetURI uri) { - FreenetURI privUSK = spiderIndexURIs.getPrivateUSK(); - try { - FreenetURI tmp = pr.getHLSimpleClient().insertRedirect(privUSK, uri); - long ed; - synchronized(freenetMergeSync) { - ed = spiderIndexURIs.setEdition(tmp.getEdition()+1); - } - System.out.println("Uploaded index as USK to "+tmp); - - writeStringTo(new File(EDITION_FILENAME), Long.toString(ed)); - - } catch (InsertException e) { - System.err.println("Failed to upload USK for index update: "+e); - e.printStackTrace(); - Logger.error(this, "Failed to upload USK for index update", e); - } - } - - /** Create a Closure which will merge the subtrees from one index (on disk) into the subtrees - * of another index (on Freenet). It will be called with each subtree from the on-Freenet - * index, and will merge data from the relevant on-disk subtree. Both subtrees are initially - * deflated, and should be deflated when we leave the method, to avoid running out of memory. - * @param newtrees The on-disk tree of trees to get data from. - * @return - */ - private Closure>, TaskAbortException> createMergeFromTreeClosure(final SkeletonBTreeMap> newtrees) { - return new - Closure>, TaskAbortException>() { - /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { - String key = entry.getKey(); - SkeletonBTreeSet tree = entry.getValue(); - if (logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); - boolean newTree = false; - if (tree == null) { - entry.setValue(tree = makeEntryTree(leafsrl)); - newTree = true; - } - assert(tree.isBare()); - SortedSet data; - // Can't be run in parallel. - synchronized(inflateSync) { - newtrees.inflate(key, true); - SkeletonBTreeSet entries; - entries = newtrees.get(key); - // CONCURRENCY: Because the lower-level trees are packed by the top tree, the bottom - // trees (SkeletonBTreeSet's) are not independant of each other. When the newtrees - // inflate above runs, it can deflate a tree that is still in use by another instance - // of this callback. Therefore we must COPY IT AND DEFLATE IT INSIDE THE LOCK. - entries.inflate(); - data = new TreeSet(entries); - entries.deflate(); - assert(entries.isBare()); - } - if (tree != null) { - if (newTree) { - tree.addAll(data); - assert(tree.size() == data.size()); - Logger.debug(this, "Added data to Freenet for term "+key+" : "+data.size()); - } else { - int oldSize = tree.size(); - tree.update(data, null); - // Note that it is possible for data.size() + oldSize != tree.size(), because we might be merging data we've already merged. - // But most of the time it will add up. - Logger.debug(this, "Merged data to Freenet in term "+key+" : "+data.size()+" + "+oldSize+" -> "+tree.size()); - } - tree.deflate(); - assert(tree.isBare()); - if(logMINOR) Logger.minor(this, "Updated: "+key+" : "+tree); - } - } - }; - } - - /** Update the overall metadata for the on-Freenet index from the on-disk index. */ - private void updateOverallMetadata(ProtoIndex diskToMerge) { - idxFreenet.setName(diskToMerge.getName()); - idxFreenet.setOwnerEmail(diskToMerge.getOwnerEmail()); - idxFreenet.setOwner(diskToMerge.getOwner()); - // This is roughly accurate, it might not be exactly so if we process a bit out of order. - idxFreenet.setTotalPages(diskToMerge.getTotalPages() + Math.max(0,idxFreenet.getTotalPages())); - } - - /** Setup the serialisers for uploading to Freenet. These convert tree nodes to and from blocks - * on Freenet, essentially. */ - private void makeFreenetSerialisers() { - if(srl == null) { - srl = ProtoIndexSerialiser.forIndex(lastUploadURI.toASCIIString(), Priority.Bulk); - LiveArchiver,SimpleProgress> archiver = - (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); - leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); - if(lastUploadURI == null) { - idxFreenet = new ProtoIndex("CHK@", "test", null, null, 0L); - // FIXME more hacks: It's essential that we use the same FreenetArchiver instance here. - leafsrl.setSerialiserFor(idxFreenet); - } else { - try { - PullTask pull = new PullTask(lastUploadURI); - System.out.println("Pulling previous index "+lastUploadURI+" so can update it."); - srl.pull(pull); - System.out.println("Pulled previous index "+lastUploadURI+" - updating..."); - idxFreenet = pull.data; - if(idxFreenet.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); - } catch (TaskAbortException e) { - Logger.error(this, "Failed to download previous index for spider update: "+e, e); - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - synchronized(freenetMergeSync) { - pushBroken = true; - } - return; - } - } - } - } - - /** Set up the on-disk cache, which keeps a copy of everything we upload to Freenet, so we - * won't need to re-download it, which can be very slow and doesn't always succeed. */ - private void setupFreenetCacheDir() { - if(FreenetArchiver.getCacheDir() == null) { - File dir = new File("library-spider-pushed-data-cache"); - dir.mkdir(); - FreenetArchiver.setCacheDir(dir); - } - } - - protected static SkeletonBTreeSet makeEntryTree(ProtoIndexComponentSerialiser leafsrl) { - SkeletonBTreeSet tree = new SkeletonBTreeSet(ProtoIndex.BTREE_NODE_MIN); - leafsrl.setSerialiserFor(tree); - return tree; - } public void start() { - final String[] oldToMerge; - synchronized(freenetMergeSync) { - oldToMerge = new File(".").list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(BASE_FILENAME_PUSH_DATA))) return false; - File f = new File(arg0, arg1); - if(!f.isFile()) return false; - if(f.length() == 0) { f.delete(); return false; } - String s = f.getName().substring(BASE_FILENAME_PUSH_DATA.length()); - pushNumber = Math.max(pushNumber, Long.parseLong(s)+1); - return true; - } - - }); - } - final String[] dirsToMerge; - synchronized(freenetMergeSync) { - dirsToMerge = new File(".").list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(DISK_DIR_PREFIX))) return false; - File f = new File(arg0, arg1); - String s = f.getName().substring(DISK_DIR_PREFIX.length()); - dirNumber = Math.max(dirNumber, Integer.parseInt(s)+1); - return true; - } - - }); - } - if(oldToMerge != null && oldToMerge.length > 0) { - System.out.println("Found "+oldToMerge.length+" buckets of old index data to merge..."); - Runnable r = new Runnable() { - - public void run() { - synchronized(freenetMergeSync) { - for(String filename : oldToMerge) { - File f = new File(filename); - toMergeToDisk.add(new FileBucket(f, true, false, false, true)); - } - } - wrapMergeToDisk(); - } - - }; - pr.getNode().executor.execute(r, "Library: handle index data from previous run"); - } - if(dirsToMerge != null && dirsToMerge.length > 0) { - System.out.println("Found "+dirsToMerge.length+" disk trees of old index data to merge..."); - Runnable r = new Runnable() { - - public void run() { - synchronized(freenetMergeSync) { - while(freenetMergeRunning) { - if(pushBroken) return; - System.err.println("Need to merge to Freenet, but last merge not finished yet. Waiting..."); - try { - freenetMergeSync.wait(); - } catch (InterruptedException e) { - // Ignore - } - } - if(pushBroken) return; - freenetMergeRunning = true; - } - try { - for(String filename : dirsToMerge) { - File f = new File(filename); - mergeToFreenet(f); - } - } finally { - synchronized(freenetMergeSync) { - freenetMergeRunning = false; - if(!pushBroken) - lastMergedToFreenet = System.currentTimeMillis(); - freenetMergeSync.notifyAll(); - } - } - - } - - }; - pr.getNode().executor.execute(r, "Library: handle trees from previous run"); - } + System.out.println("Started pass-though spider uploader."); } public void handlePushBuffer(SimpleFieldSet params, Bucket data) { - if(data.size() == 0) { Logger.error(this, "Bucket of data ("+data+") to push is empty", new Exception("error")); System.err.println("Bucket of data ("+data+")to push from Spider is empty"); @@ -907,9 +52,6 @@ public void handlePushBuffer(SimpleFieldSet params, Bucket data) { return; } - // Process data off-thread, but only one load at a time. - // Hence it won't stall Spider unless we get behind. - long pn; synchronized(this) { pn = pushNumber++; @@ -924,54 +66,41 @@ public void handlePushBuffer(SimpleFieldSet params, Bucket data) { } catch (IOException e1) { System.err.println("Unable to back up push data #"+pn+" : "+e1); e1.printStackTrace(); - Logger.error(this, "Unable to back up push data #"+pn, e1); - output = data; } - synchronized(freenetMergeSync) { - boolean waited = false; - while(toMergeToDisk.size() > MAX_HANDLING_COUNT && !pushBroken) { - Logger.error(this, "Spider feeding us data too fast, waiting for background process to finish. Ahead of us in the queue: "+toMergeToDisk.size()); + // Stall Spider if we get behind. + int countFilesToMerge = 0; + int tooManyFilesToMerge = 0; + do { + if (tooManyFilesToMerge > 0) { + System.out.println("There are " + countFilesToMerge + " files to merge...stalling spider."); try { - waited = true; - freenetMergeSync.wait(); + Thread.sleep( + tooManyFilesToMerge * + tooManyFilesToMerge * + 100 * + 1000); } catch (InterruptedException e) { - // Ignore - } - } - toMergeToDisk.add(output); - if(pushBroken) { - if(toMergeToDisk.size() < PUSH_BROKEN_MAX_HANDLING_COUNT) - // We have written the data, it will be recovered after restart. - Logger.error(this, "Pushing is broken, failing"); - else { - // Wait forever to prevent running out of disk space. - // Spider is single threaded. - // FIXME: Use an error return or a throwable to shut down Spider. - while(true) { - try { - freenetMergeSync.wait(); - } catch (InterruptedException e) { - // Ignore - } - } + // TODO Auto-generated catch block + e.printStackTrace(); } - return; - } - if(waited) - Logger.error(this, "Waited for previous handler to go away, moving on..."); - //if(freenetMergeRunning) return; // Already running, no need to restart it. - if(diskMergeRunning) return; // Already running, no need to restart it. - } - Runnable r = new Runnable() { - - public void run() { -// wrapMergeToFreenet(); - wrapMergeToDisk(); } - - }; - pr.getNode().executor.execute(r, "Library: Handle data from Spider"); + String[] filesToMerge = new File(".").list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(BASE_FILENAME_PUSH_DATA))) return false; + File f = new File(arg0, arg1); + if(!f.isFile()) return false; + if(f.length() == 0) { f.delete(); return false; } + return true; + } + + }); + + countFilesToMerge = filesToMerge.length; + tooManyFilesToMerge = countFilesToMerge - 2; + } while (tooManyFilesToMerge > 0); + System.out.println("There are " + countFilesToMerge + " files to merge."); } public FreenetURI getPublicUSKURI() { @@ -989,7 +118,4 @@ public void handleGetSpiderURI(PluginReplySender replysender) { // Race condition, ignore. } } - - - } diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java new file mode 100644 index 00000000..88e44921 --- /dev/null +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -0,0 +1,95 @@ +package freenet.library.uploader; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.Map; + +import freenet.library.index.ProtoIndex; +import freenet.library.index.ProtoIndexComponentSerialiser; +import freenet.library.index.ProtoIndexSerialiser; +import freenet.library.index.TermEntry; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.io.serial.Serialiser.PushTask; +import freenet.library.util.SkeletonBTreeSet; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + +class DirectoryCreator { + private ProtoIndex idxDisk; + private ProtoIndexComponentSerialiser leafsrlDisk; + private int countTerms; + private ProtoIndexSerialiser srlDisk; + private File newIndexDir; + + DirectoryCreator(File directory) { + int nextIndexDirNumber = 1; + String nextIndexDirName = UploaderPaths.DISK_DIR_PREFIX + nextIndexDirNumber; + newIndexDir = new File(directory, nextIndexDirName); + newIndexDir.mkdir(); + srlDisk = ProtoIndexSerialiser.forIndex(newIndexDir); + LiveArchiver, SimpleProgress> archiver = + (LiveArchiver, SimpleProgress>) srlDisk.getChildSerialiser(); + leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); + idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); + leafsrlDisk.setSerialiserFor(idxDisk); + + countTerms = 0; + } + + private static boolean writeStringTo(File filename, String uri) { + FileOutputStream fos = null; + try { + fos = new FileOutputStream(filename); + OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); + osw.write(uri.toString()); + osw.close(); + fos = null; + return true; + } catch (IOException e) { + System.out.println("Failed to write to "+filename+" : "+uri+" : "+e); + return false; + } finally { + try { + if (fos != null) { + fos.close(); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + + public void putEntry(TermEntry tt) throws TaskAbortException { + // TODO Auto-generated method stub + SkeletonBTreeSet tree; + if (idxDisk.ttab.containsKey(tt.subj)) { + // merge + tree = idxDisk.ttab.get(tt.subj); + } else { + tree = new SkeletonBTreeSet(ProtoIndex.BTREE_NODE_MIN); + leafsrlDisk.setSerialiserFor(tree); + } + tree.add(tt); + tree.deflate(); + assert(tree.isBare()); + idxDisk.ttab.put(tt.subj, tree); + countTerms++; + } + + public void done() throws TaskAbortException { + // TODO Auto-generated method stub + idxDisk.ttab.deflate(); + assert(idxDisk.ttab.isBare()); + PushTask task4 = new PushTask(idxDisk); + srlDisk.push(task4); + String uri = (String) task4.meta; + System.out.println("Created new directory, file root at " + uri + + " with " + countTerms + " terms."); + writeStringTo(new File(newIndexDir, UploaderPaths.LAST_DISK_FILENAME), uri); + } + + +} diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 2fdbf16b..3845c3b3 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -6,22 +6,15 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; -import net.pterodactylus.fcp.ClientGet; -import net.pterodactylus.fcp.ClientHello; import net.pterodactylus.fcp.ClientPut; -import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FcpMessage; import net.pterodactylus.fcp.FinishedCompression; -import net.pterodactylus.fcp.NodeHello; import net.pterodactylus.fcp.PutFailed; import net.pterodactylus.fcp.PutFetchable; import net.pterodactylus.fcp.PutSuccessful; @@ -186,7 +179,11 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { assert ps != null; if (!identifier.equals(ps.getIdentifier())) return; - System.out.println("receivedPutSuccessful for " + token + ": " + ps); + System.out.println("receivedPutSuccessful for " + token); + System.out.println("Storing " + progressTotal + + " took " + + ((ps.getCompletionTime() - ps.getStartupTime()) / 1000) + "s"); + markDone(); } @@ -196,7 +193,7 @@ public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { assert pf != null; if (!identifier.equals(pf.getIdentifier())) return; - System.out.println("receivedPutFetchable for " + token + ": " + pf); + System.out.println("receivedPutFetchable for " + token); synchronized (this) { this.notifyAll(); } @@ -212,7 +209,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed pf) { synchronized (putter) { putter.notify(); } - System.out.println("receivedPutFailed for " + token + ": " + pf); + System.out.println("receivedPutFailed for " + token); markDone(); } @@ -246,9 +243,7 @@ public void receivedStartedCompression(FcpConnection c, assert startedCompression != null; if (!identifier.equals(startedCompression.getIdentifier())) return; - System.out.println("receivedStartedCompression for " + - token + ": " + - startedCompression); + System.out.println("receivedStartedCompression for " + token); } @Override @@ -258,9 +253,7 @@ public void receviedFinishedCompression(FcpConnection c, assert finishedCompression != null; if (!identifier.equals(finishedCompression.getIdentifier())) return; - System.out.println("receivedFinishedCompression for " + - token + ": " + - finishedCompression); + System.out.println("receivedFinishedCompression for " + token); } public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { @@ -268,9 +261,7 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert uriGenerated != null; if (!identifier.equals(uriGenerated.getIdentifier())) return; - System.out.println("receivedURIGenerated for " + - token + ": " + - uriGenerated); + System.out.println("receivedURIGenerated for " + token); uri = uriGenerated.getURI(); synchronized (this) { this.notifyAll(); diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java new file mode 100644 index 00000000..f309c8d3 --- /dev/null +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -0,0 +1,24 @@ +package freenet.library.uploader; + +class IndexPeeker { + + class Section { + + boolean contains(String subj) { + if (subj.substring(0, 1).equals("delta".substring(0, 1))) + return true; + return false; + } + + } + + boolean onTop(String subj) { + // TODO Auto-generated method stub + return false; + } + + Section getSectionFor(String string) { + return new Section(); + } + +} diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index ecd1eadf..2a30d2cf 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -5,17 +5,15 @@ package freenet.library.uploader; import java.io.File; +import java.io.FileNotFoundException; import java.io.FilenameFilter; -import java.io.EOFException; import java.io.IOException; import java.io.DataInputStream; import java.io.FileInputStream; -import java.io.InputStream; import java.net.UnknownHostException; -import java.util.HashMap; -import java.util.Map; -import java.util.SortedSet; -import java.util.TreeSet; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; import net.pterodactylus.fcp.ClientHello; import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; @@ -25,10 +23,8 @@ import net.pterodactylus.fcp.NodeHello; import freenet.library.FactoryRegister; -import freenet.library.index.ProtoIndex; import freenet.library.index.TermEntry; -import freenet.library.index.TermPageEntry; -import freenet.library.index.TermEntryReaderWriter; +import freenet.library.util.exec.TaskAbortException; /** * Standalone program to do the merging. @@ -56,49 +52,28 @@ *
                  Done. */ final public class Merger { - /** Read the TermEntry's from the Bucket into newtrees and terms, and set up the index - * properties. - * @param data The Bucket containing TermPageEntry's etc serialised with TermEntryReaderWriter. - */ - private static Map> readTermsFrom(File f) { - Map> newtrees = new HashMap>(); - DataInputStream is = null; - try { - is = new DataInputStream(new FileInputStream(f)); - String line; - int laps = 0; - do { - line = is.readLine(); - System.out.println("Line: " + line); - if (laps > 100) { - System.err.println("Cannot get out of file header."); - System.exit(1); - } - } while (!"End".equals(line)); - try{ - while(true){ // Keep going til an EOFExcepiton is thrown - TermEntry readObject = TermEntryReaderWriter.getInstance().readObject(is); - SortedSet set = newtrees.get(readObject.subj); - if(set == null) - newtrees.put(readObject.subj, set = new TreeSet()); - set.add(readObject); - } - }catch(EOFException e){ - // EOF, do nothing - } - } catch (IOException ex) { - ex.printStackTrace(); - System.exit(1); - } finally { - try { - is.close(); - } catch (IOException e) { - System.err.println("Cannot close"); - System.exit(1); - } - } - return newtrees; - } + + static String[] getMatchingFiles(File directory, + final String baseFilename) { + return directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if (!(arg1.toLowerCase().startsWith(baseFilename))) { + return false; + } + File f = new File(arg0, arg1); + if (!f.isFile()) { + return false; + } + if (f.length() == 0) { + f.delete(); + return false; + } + return true; + } + + }); + } @@ -188,47 +163,77 @@ public boolean accept(File arg0, String arg1) { System.out.println("There are " + dirsToMerge.length + " old directories to merge."); if (dirsToMerge.length > 0) { + System.out.println("Merging the first one."); new DirectoryUploader(connection, new File(directory, dirsToMerge[0])).run(); return; } - String[] filesToMerge = directory.list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(UploaderPaths.BASE_FILENAME_PUSH_DATA))) return false; - File f = new File(arg0, arg1); - if(!f.isFile()) return false; - if(f.length() == 0) { f.delete(); return false; } - return true; - } - - }); + String filteredFilesBaseFilename = UploaderPaths.BASE_FILENAME_PUSH_DATA + "filtered."; + // Calculate the next name + int lastFoundFiltered = 0; + for (String filename : getMatchingFiles(directory, filteredFilesBaseFilename)) { + int numberFound = Integer.parseInt(filename.substring(filteredFilesBaseFilename.length())); + if (numberFound > lastFoundFiltered) { + lastFoundFiltered = numberFound; + } + } + System.out.println("Last found: " + lastFoundFiltered); + + String[] filesToMerge = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); System.out.println("There are " + filesToMerge.length + " files to merge."); + + DirectoryCreator creator = new DirectoryCreator(directory); + IndexPeeker peeker = new IndexPeeker(); + Set toBeRemoved = new HashSet(); + TermEntryFileWriter notMerged = null; + for (String s : filesToMerge) { System.out.println("File: " + s); - Map> terms = readTermsFrom(new File(s)); - System.out.println("terms:"); - SortedSet ss = null; - for (String t : terms.keySet()) { - ss = terms.get(t); - System.out.println("\t" + t + ", " + - ss.size() + " elements"); - for (TermEntry tt : ss) { - if (tt.entryType() == TermEntry.EntryType.PAGE) { - TermPageEntry tpe = (TermPageEntry) tt; - System.out.println("\t" + tpe.page + ":"); - for (Map.Entry entry : - tpe.posFragments.entrySet()) { - System.out.println("\t\t" + entry.getKey() + - " - " + entry.getValue()); - } - } - } - } + File file = new File(s); + FileInputStream fileInputStream; + try { + fileInputStream = new FileInputStream(file); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + return; + } + TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); + Iterator iterator = teri.iterator(); + IndexPeeker.Section section = peeker.getSectionFor("a"); + while (iterator.hasNext()) { + TermEntry tt = iterator.next(); + if (peeker.onTop(tt.subj) || + section.contains(tt.subj)) { + creator.putEntry(tt); + } else { + if (notMerged == null) { + lastFoundFiltered ++; + String filteredFilename = filteredFilesBaseFilename + lastFoundFiltered; + notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, filteredFilename)); + } + notMerged.write(tt); + if (notMerged.isFull()) { + notMerged.close(); + notMerged = null; + } + } + } + toBeRemoved.add(file); + } + notMerged.close(); + notMerged = null; + creator.done(); + for (File file : toBeRemoved) { + System.out.println("Removing file " + file); + file.delete(); } - } finally { + } catch (TaskAbortException e) { + e.printStackTrace(); + return; + } finally { connection.removeFcpListener(closeListener); if (connection != null) { connection.close(); diff --git a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java new file mode 100644 index 00000000..c5919553 --- /dev/null +++ b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java @@ -0,0 +1,62 @@ +package freenet.library.uploader; + +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Map; +import java.util.Map.Entry; + +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; + +class TermEntryFileWriter { + private DataOutputStream os; + int counter; + + public TermEntryFileWriter(Map params, + File file) { + counter = 0; + try { + os = new DataOutputStream(new FileOutputStream(file)); + } catch (FileNotFoundException e) { + e.printStackTrace(); + System.exit(1); + return; + } + try { + for (Entry entry : params.entrySet()) { + os.writeBytes(entry.getKey() + "=" + entry.getValue() + "\n"); + } + os.writeBytes("End\n"); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + } + + void write(TermEntry tt) { + try { + TermEntryReaderWriter.getInstance().writeObject(tt, os); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + counter ++; + } + + void close() { + try { + os.close(); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + System.out.println("Written new file with " + counter + " entries."); + } + + public boolean isFull() { + return counter >= 100000; + } +} diff --git a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java new file mode 100644 index 00000000..9afeeaa0 --- /dev/null +++ b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java @@ -0,0 +1,80 @@ +package freenet.library.uploader; + +import java.io.DataInputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; + +class TermEntryReaderIterator implements Iterable { + private DataInputStream is; + private Map header; + + public TermEntryReaderIterator(DataInputStream s) { + is = s; + header = new HashMap(5); + String line = ""; + int laps = 0; + do { + try { + line = is.readLine(); + System.out.println("Line: " + line); + String[] parts = line.split("=", 2); + if (parts.length >= 2) { + header.put(parts[0], parts[1]); + } + } catch (IOException e) { + System.err.println("Error: Not closed header."); + System.exit(1); + } + if (laps > 100) { + System.err.println("Error: Cannot get out of file header."); + System.exit(1); + } + } while (!"End".equals(line)); + } + + @Override + public Iterator iterator() { + return new Iterator() { + TermEntry lastRead = null; + + @Override + public boolean hasNext() { + if (lastRead != null) { + return true; + } + lastRead = next(); + return lastRead != null; + } + + @Override + public TermEntry next() { + if (lastRead != null) { + TermEntry t = lastRead; + lastRead = null; + return t; + } + try { + return TermEntryReaderWriter.getInstance().readObject(is); + } catch (IOException e) { + return null; + } + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + }; + } + + Map getHeader() { + return Collections.unmodifiableMap(header); + } +} \ No newline at end of file From 8450a131f1e3499f0576dcfab76962b2a88a4f68 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 18:57:28 +0000 Subject: [PATCH 045/180] Let also this pool go down to 0 threads when idle so that the script can eventually finish. --HG-- branch : eclipse-separation --- shared/src/freenet/library/util/concurrent/Executors.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared/src/freenet/library/util/concurrent/Executors.java b/shared/src/freenet/library/util/concurrent/Executors.java index 254dd26c..9d604792 100644 --- a/shared/src/freenet/library/util/concurrent/Executors.java +++ b/shared/src/freenet/library/util/concurrent/Executors.java @@ -50,7 +50,7 @@ public class Executors { synchronized (Executors.class) { if (default_exec == null) { default_exec = new ThreadPoolExecutor( - 1, 0x40, 60, TimeUnit.SECONDS, + 0, 0x40, 60, TimeUnit.SECONDS, new LinkedBlockingQueue(), new ThreadPoolExecutor.CallerRunsPolicy() ); From dafbfcfb34c975fb0c83e290e1d42769283d5f0e Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 19:00:54 +0000 Subject: [PATCH 046/180] Tune the parameters for amount of stored entries and amount of files. --HG-- branch : eclipse-separation --- src/plugins/Library/SpiderIndexUploader.java | 2 +- uploader/src/freenet/library/uploader/TermEntryFileWriter.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 17012c10..9b17dab4 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -98,7 +98,7 @@ public boolean accept(File arg0, String arg1) { }); countFilesToMerge = filesToMerge.length; - tooManyFilesToMerge = countFilesToMerge - 2; + tooManyFilesToMerge = countFilesToMerge - 20; } while (tooManyFilesToMerge > 0); System.out.println("There are " + countFilesToMerge + " files to merge."); } diff --git a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java index c5919553..47b9e51a 100644 --- a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java +++ b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java @@ -57,6 +57,6 @@ void close() { } public boolean isFull() { - return counter >= 100000; + return counter >= 300000; } } From e2071fdd0d193267f6bbe665f11802f3d37c8919 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 19:03:23 +0000 Subject: [PATCH 047/180] Don't finish the operation until the PutSuccessful is received. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/FcpArchiver.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 3845c3b3..ada8c81c 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -229,10 +229,6 @@ public void receivedSimpleProgress(FcpConnection c, progressTotal = sp.getTotal(); System.out.println("receivedSimpleProgess for " + token + ": " + sp.getSucceeded() + "/" + sp.getTotal()); - if (sp.isFinalizedTotal() && - sp.getSucceeded() == sp.getTotal()) { - markDone(); - } printLeft(); } From 1f370c3a7a8e8b475a3aa85a683e1efac8beb526 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 19:05:08 +0000 Subject: [PATCH 048/180] First working implementation of the IndexPeeker to group Terms. Fixed the order of the processing the files, first the filtered files, then the new files. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/IndexPeeker.java | 11 ++- .../src/freenet/library/uploader/Merger.java | 67 ++++++++++++------- .../library/uploader/UploaderPaths.java | 1 + 3 files changed, 53 insertions(+), 26 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index f309c8d3..fa0cafac 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -4,8 +4,14 @@ class IndexPeeker { class Section { + private String center; + + public Section(String string) { + center = string; + } + boolean contains(String subj) { - if (subj.substring(0, 1).equals("delta".substring(0, 1))) + if (subj.substring(0, 1).equals(center.substring(0, 1))) return true; return false; } @@ -18,7 +24,8 @@ boolean onTop(String subj) { } Section getSectionFor(String string) { - return new Section(); + System.out.println("Grouping around " + string); + return new Section(string); } } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 2a30d2cf..22949da1 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -141,7 +141,7 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti exitStatus = 1; return; } finally { - connection.removeFcpListener(helloListener); + connection.removeFcpListener(helloListener); } } helloListener = null; @@ -169,26 +169,36 @@ public boolean accept(File arg0, String arg1) { return; } - String filteredFilesBaseFilename = UploaderPaths.BASE_FILENAME_PUSH_DATA + "filtered."; // Calculate the next name int lastFoundFiltered = 0; - for (String filename : getMatchingFiles(directory, filteredFilesBaseFilename)) { - int numberFound = Integer.parseInt(filename.substring(filteredFilesBaseFilename.length())); + String[] filesToMerge1 = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_FILTERED_DATA); + System.out.println("There is " + filesToMerge1.length + " old files to merge."); + for (String filename : filesToMerge1) { + int numberFound = Integer.parseInt(filename.substring(UploaderPaths.BASE_FILENAME_FILTERED_DATA.length())); if (numberFound > lastFoundFiltered) { lastFoundFiltered = numberFound; } } System.out.println("Last found: " + lastFoundFiltered); - String[] filesToMerge = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); + String[] filesToMerge2 = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); - System.out.println("There are " + filesToMerge.length + " files to merge."); + System.out.println("There is " + filesToMerge2.length + " new files to merge."); DirectoryCreator creator = new DirectoryCreator(directory); IndexPeeker peeker = new IndexPeeker(); - Set toBeRemoved = new HashSet(); + IndexPeeker.Section section = null; + + Set toBeRemoved = new HashSet(); TermEntryFileWriter notMerged = null; - + String[] filesToMerge = new String[filesToMerge1.length + filesToMerge2.length]; + int pos = 0; + for (; pos < filesToMerge1.length; pos++) { + filesToMerge[pos] = filesToMerge1[pos]; + } + for (int i = 0; i < filesToMerge2.length; i++, pos++) { + filesToMerge[pos] = filesToMerge2[i]; + } for (String s : filesToMerge) { System.out.println("File: " + s); File file = new File(s); @@ -202,29 +212,38 @@ public boolean accept(File arg0, String arg1) { } TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); Iterator iterator = teri.iterator(); - IndexPeeker.Section section = peeker.getSectionFor("a"); while (iterator.hasNext()) { TermEntry tt = iterator.next(); - if (peeker.onTop(tt.subj) || - section.contains(tt.subj)) { + if (peeker.onTop(tt.subj)) { creator.putEntry(tt); - } else { - if (notMerged == null) { - lastFoundFiltered ++; - String filteredFilename = filteredFilesBaseFilename + lastFoundFiltered; - notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, filteredFilename)); - } - notMerged.write(tt); - if (notMerged.isFull()) { - notMerged.close(); - notMerged = null; - } + continue; + } + + if (section == null) { + section = peeker.getSectionFor(tt.subj); + } + if (section.contains(tt.subj)) { + creator.putEntry(tt); + continue; + } + + if (notMerged == null) { + lastFoundFiltered ++; + String filteredFilename = UploaderPaths.BASE_FILENAME_FILTERED_DATA + lastFoundFiltered; + notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, filteredFilename)); + } + notMerged.write(tt); + if (notMerged.isFull()) { + notMerged.close(); + notMerged = null; } } toBeRemoved.add(file); } - notMerged.close(); - notMerged = null; + if (notMerged != null) { + notMerged.close(); + notMerged = null; + } creator.done(); for (File file : toBeRemoved) { System.out.println("Removing file " + file); diff --git a/uploader/src/freenet/library/uploader/UploaderPaths.java b/uploader/src/freenet/library/uploader/UploaderPaths.java index 76b04486..35dbd27d 100644 --- a/uploader/src/freenet/library/uploader/UploaderPaths.java +++ b/uploader/src/freenet/library/uploader/UploaderPaths.java @@ -38,6 +38,7 @@ public class UploaderPaths { static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; + static final String BASE_FILENAME_FILTERED_DATA = "library.index.filtered."; static final String LIBRARY_CACHE = "library-spider-pushed-data-cache"; } From b46dfb21eb7c2828fc98545f439ceca7295f89a5 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 19:50:10 +0000 Subject: [PATCH 049/180] Refactored the interface to the IndexPeeker. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/IndexPeeker.java | 46 ++++++++++--------- .../src/freenet/library/uploader/Merger.java | 13 +----- 2 files changed, 26 insertions(+), 33 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index fa0cafac..5d5eabfb 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -1,31 +1,33 @@ package freenet.library.uploader; -class IndexPeeker { - - class Section { +import java.io.File; - private String center; +class IndexPeeker { + private File directory; + private String center = null; + + IndexPeeker(File dir) { + directory = dir; + } - public Section(String string) { - center = string; + /** + * If the subj is to be included. + * + * If subj is on top, include it. + * Let the first subj decide what part of the tree we match. + * Include subsequent terms if they are in the same part of the tree. + * + * @param subj The term to include. + * @return true if the term is included. + */ + boolean include(String subj) { + if (center == null) { + System.out.println("Grouping around " + subj); + center = subj; } - - boolean contains(String subj) { - if (subj.substring(0, 1).equals(center.substring(0, 1))) - return true; - return false; + if (center.substring(0, 1).equals(subj.substring(0, 1))) { + return true; } - - } - - boolean onTop(String subj) { - // TODO Auto-generated method stub return false; } - - Section getSectionFor(String string) { - System.out.println("Grouping around " + string); - return new Section(string); - } - } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 22949da1..8b990d07 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -186,8 +186,7 @@ public boolean accept(File arg0, String arg1) { System.out.println("There is " + filesToMerge2.length + " new files to merge."); DirectoryCreator creator = new DirectoryCreator(directory); - IndexPeeker peeker = new IndexPeeker(); - IndexPeeker.Section section = null; + IndexPeeker peeker = new IndexPeeker(directory); Set toBeRemoved = new HashSet(); TermEntryFileWriter notMerged = null; @@ -214,19 +213,11 @@ public boolean accept(File arg0, String arg1) { Iterator iterator = teri.iterator(); while (iterator.hasNext()) { TermEntry tt = iterator.next(); - if (peeker.onTop(tt.subj)) { + if (peeker.include(tt.subj)) { creator.putEntry(tt); continue; } - if (section == null) { - section = peeker.getSectionFor(tt.subj); - } - if (section.contains(tt.subj)) { - creator.putEntry(tt); - continue; - } - if (notMerged == null) { lastFoundFiltered ++; String filteredFilename = UploaderPaths.BASE_FILENAME_FILTERED_DATA + lastFoundFiltered; From d0a3190c504bb9e45e5da7471e5b630e93bff778 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 10 Apr 2015 20:26:25 +0000 Subject: [PATCH 050/180] Removed not used code and imports. --HG-- branch : eclipse-separation --- .../library/uploader/DirectoryUploader.java | 89 +------------------ .../library/uploader/UploaderLibrary.java | 14 --- 2 files changed, 1 insertion(+), 102 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 3937908c..66b1aac3 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -94,9 +94,6 @@ public void run() { * demand. SCALABILITY */ static final int MAX_DISK_ENTRY_SIZE = 10000; - /** Like pushNumber, the number of the current disk dir, used to - * create idxDiskDir. */ - private int dirNumber; static final String DISK_DIR_PREFIX = "library-temp-index-"; /** Directory the current idxDisk is saved in. */ File idxDiskDir; @@ -119,18 +116,7 @@ public void run() { static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; - - // This is a member variable because it is huge, and having huge - // stuff in local variables seems to upset the default garbage - // collector. It doesn't need to be synchronized because it's - // always used from mergeToDisk, which never runs in parallel. - private Map> newtrees; - // Ditto - private SortedSet terms; - ProtoIndexSerialiser srlDisk = null; - private ProtoIndexComponentSerialiser leafsrlDisk; - private boolean writeStringTo(File filename, String uri) { FileOutputStream fos = null; @@ -178,80 +164,8 @@ private String readStringFrom(File file) { e.printStackTrace(); } } - } - - - /** Create a new on-disk index from terms and newtrees. - * @return True if the size of any one item in the index is so - * large that we must upload immediately to Freenet. - * @throws TaskAbortException If something broke catastrophically. */ - private boolean createDiskIndex() throws TaskAbortException { - boolean tooBig = false; - // created a new index, fill it with data. - // DON'T MERGE, merge with a lot of data will deadlock. - // FIXME throw in update() if it will deadlock. - for(String key : terms) { - SkeletonBTreeSet tree = makeEntryTree(leafsrlDisk); - SortedSet toMerge = newtrees.get(key); - tree.addAll(toMerge); - if(toMerge.size() > MAX_DISK_ENTRY_SIZE) - tooBig = true; - toMerge = null; - tree.deflate(); - assert(tree.isBare()); - idxDisk.ttab.put(key, tree); - } - idxDisk.ttab.deflate(); - return tooBig; - } - - - /** Create a directory for an on-disk index. - * @return False if something broke and we can't continue. */ - private boolean createDiskDir() { - dirNumber++; - idxDiskDir = new File(DISK_DIR_PREFIX + Integer.toString(dirNumber)); - System.out.println("Created new disk dir for merging: "+idxDiskDir); - if(!(idxDiskDir.mkdir() || idxDiskDir.isDirectory())) { - System.err.println("Unable to create new disk dir: "+idxDiskDir); - synchronized(this) { - return false; - } - } - return true; - } + } - /** Set up the serialisers for an on-disk index. - * @return False if something broke and we can't continue. */ - private boolean makeDiskDirSerialiser() { - if(srlDisk == null) { - srlDisk = ProtoIndexSerialiser.forIndex(idxDiskDir); - LiveArchiver,SimpleProgress> archiver = - (LiveArchiver,SimpleProgress>)(srlDisk.getChildSerialiser()); - leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); - if(lastDiskIndexName == null) { - idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); - // FIXME more hacks: It's essential that we use the - // same FileArchiver instance here. - leafsrlDisk.setSerialiserFor(idxDisk); - } else { - try { - PullTask pull = new PullTask(lastDiskIndexName); - System.out.println("Pulling previous index "+lastDiskIndexName+" from disk so can update it."); - srlDisk.pull(pull); - System.out.println("Pulled previous index "+lastDiskIndexName+" from disk - updating..."); - idxDisk = pull.data; - if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxFreenet.getSerialiser()+" should be "+leafsrl); - } catch (TaskAbortException e) { - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - return false; - } - } - } - return true; - } static final String INDEX_DOCNAME = "index.yml"; @@ -575,7 +489,6 @@ private Closure>, TaskAbortException> tree.addAll(data); assert(tree.size() == data.size()); } else { - int oldSize = tree.size(); tree.update(data, null); // Note that it is possible for data.size() + // oldSize != tree.size(), because we might be diff --git a/uploader/src/freenet/library/uploader/UploaderLibrary.java b/uploader/src/freenet/library/uploader/UploaderLibrary.java index 8512ba23..08b55786 100644 --- a/uploader/src/freenet/library/uploader/UploaderLibrary.java +++ b/uploader/src/freenet/library/uploader/UploaderLibrary.java @@ -4,29 +4,15 @@ package freenet.library.uploader; import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.lang.UnsupportedOperationException; -import java.net.MalformedURLException; import java.security.MessageDigest; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; import freenet.library.ArchiverFactory; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.Priority; From 744bcf08d5a51168d1635302d75ca31270dda0e6 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 11 Apr 2015 05:18:59 +0000 Subject: [PATCH 051/180] Removed not used field. Added "assert". --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/FcpArchiver.java | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index ada8c81c..08c0b8b2 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -40,7 +40,6 @@ public class FcpArchiver private String mimeType; private int size; private Priority priorityLevel; - private String identifier; /** * Before synchronizing on stillRunning, be sure to synchronize @@ -50,10 +49,6 @@ public class FcpArchiver new HashMap(); private Thread cleanupThread; - private static int identifierCounter = 0; - private static String getNewIdentifier() { - return "FcpWriter" + (++identifierCounter); - } public FcpArchiver(FcpConnection fcpConnection, File directory, @@ -67,7 +62,6 @@ public FcpArchiver(FcpConnection fcpConnection, mimeType = mime; size = s; priorityLevel = pl; - identifier = getNewIdentifier(); } private net.pterodactylus.fcp.Priority getPriority() { @@ -300,6 +294,9 @@ String getURI() { @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, SimpleProgress progress) throws TaskAbortException { + if (connection == null) { + throw new IllegalArgumentException("No connection."); + } final String identifier = "FcpArchiver" + counter; final String token = "FcpArchiverPushLive" + counter; counter++; From e46a7e12328e1ceeda3992b4fca62424db55bca5 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 11 Apr 2015 13:29:25 +0000 Subject: [PATCH 052/180] Implementation that does the actual peeking in the Index to select only entries at a single location in the tree. --HG-- branch : eclipse-separation --- .../library/uploader/DirectoryUploader.java | 6 +- .../freenet/library/uploader/IndexPeeker.java | 59 +++++++++++++++++-- 2 files changed, 57 insertions(+), 8 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 66b1aac3..1be18e40 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -141,7 +141,7 @@ private boolean writeStringTo(File filename, String uri) { } } - private String readStringFrom(File file) { + static String readStringFrom(File file) { String ret; FileInputStream fis = null; try { @@ -179,7 +179,7 @@ protected void mergeToFreenet(File diskDir) { LiveArchiver,SimpleProgress> archiver = (LiveArchiver,SimpleProgress>)(s.getChildSerialiser()); ProtoIndexComponentSerialiser leaf = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); - String f = this.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); + String f = DirectoryUploader.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); if(f == null) { if(diskDir.list().length == 0) { System.err.println("Directory "+diskDir+" is empty. Nothing to merge."); @@ -314,7 +314,7 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } } - private String readFileLine(final String filename) { + static String readFileLine(final String filename) { File f = new File(filename); FileInputStream fis; try { diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 5d5eabfb..ea39d79c 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -1,15 +1,47 @@ package freenet.library.uploader; import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Set; + +import freenet.library.index.TermEntry; +import freenet.library.io.YamlReaderWriter; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; class IndexPeeker { private File directory; - private String center = null; - + private LinkedHashMap topTtab; + private Set topElements; + private boolean selected = false; + private String before; + private String after; + + private static final SkeletonBTreeMap> newtrees = + new SkeletonBTreeMap>(12); + IndexPeeker(File dir) { directory = dir; + String lastCHK = DirectoryUploader.readStringFrom(new File(directory, UploaderPaths.LAST_URL_FILENAME)); + String rootFilename = directory + "/" + UploaderPaths.LIBRARY_CACHE + "/" + lastCHK; + try { + LinkedHashMap top = (LinkedHashMap) new YamlReaderWriter().readObject(new FileInputStream(new File(rootFilename))); + LinkedHashMap ttab = (LinkedHashMap) top.get("ttab"); + topTtab = (LinkedHashMap) ttab.get("entries"); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + topElements = new HashSet(topTtab.keySet()); } + private static int compare(String a, String b) { + return SkeletonBTreeMap.compare(a, b, newtrees.comparator()); + } + /** * If the subj is to be included. * @@ -21,11 +53,28 @@ class IndexPeeker { * @return true if the term is included. */ boolean include(String subj) { - if (center == null) { + if (topElements.contains(subj)) { + return true; + } + if (!selected) { System.out.println("Grouping around " + subj); - center = subj; + String previous = null; + String next = null; + for (String iter : topTtab.keySet()) { + next = iter; + if (compare(subj, next) < 0) { + break; + } + previous = iter; + next = null; + } + before = previous; + after = next; + selected = true; + topTtab = null; } - if (center.substring(0, 1).equals(subj.substring(0, 1))) { + if ((before == null || compare(before, subj) < 0) && + (after == null || compare(subj, after) < 0)) { return true; } return false; From e5f699c21dabe6ad627076085ec3eef149e61cb1 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 11 Apr 2015 14:07:16 +0000 Subject: [PATCH 053/180] Made the directory creator resilient against already existing directories. Moved around the logic to avoid connecting on FCP when just creating directory. Adjusted trace messages. --HG-- branch : eclipse-separation --- .../library/uploader/DirectoryCreator.java | 18 +- .../library/uploader/DirectoryUploader.java | 9 +- .../src/freenet/library/uploader/Merger.java | 159 +++++++++--------- 3 files changed, 95 insertions(+), 91 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java index 88e44921..6d561737 100644 --- a/uploader/src/freenet/library/uploader/DirectoryCreator.java +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -22,11 +22,16 @@ class DirectoryCreator { private int countTerms; private ProtoIndexSerialiser srlDisk; private File newIndexDir; + private String nextIndexDirName; DirectoryCreator(File directory) { - int nextIndexDirNumber = 1; - String nextIndexDirName = UploaderPaths.DISK_DIR_PREFIX + nextIndexDirNumber; - newIndexDir = new File(directory, nextIndexDirName); + int nextIndexDirNumber = 0; + do { + nextIndexDirNumber ++; + nextIndexDirName = UploaderPaths.DISK_DIR_PREFIX + nextIndexDirNumber; + newIndexDir = new File(directory, nextIndexDirName); + } while (newIndexDir.exists()); + System.out.println("Writing into directory " + nextIndexDirName); newIndexDir.mkdir(); srlDisk = ProtoIndexSerialiser.forIndex(newIndexDir); LiveArchiver, SimpleProgress> archiver = @@ -86,10 +91,9 @@ public void done() throws TaskAbortException { PushTask task4 = new PushTask(idxDisk); srlDisk.push(task4); String uri = (String) task4.meta; - System.out.println("Created new directory, file root at " + uri + - " with " + countTerms + " terms."); writeStringTo(new File(newIndexDir, UploaderPaths.LAST_DISK_FILENAME), uri); + System.out.println("Created new directory " + nextIndexDirName + + ", file root at " + uri + + " with " + countTerms + " terms."); } - - } diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 1be18e40..020d46cf 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -50,12 +50,12 @@ class DirectoryUploader implements Runnable { } public void run() { - mergeToFreenet(directory); + mergeToFreenet(directory); } private String lastUploadURI; private boolean uskUploadDone; - + static final int MAX_HANDLING_COUNT = 5; // When pushing is broken, allow max handling to reach this level // before stalling forever to prevent running out of disk space. @@ -182,7 +182,7 @@ protected void mergeToFreenet(File diskDir) { String f = DirectoryUploader.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); if(f == null) { if(diskDir.list().length == 0) { - System.err.println("Directory "+diskDir+" is empty. Nothing to merge."); + System.err.println("Directory " + diskDir + " is empty - removing. Nothing to merge."); diskDir.delete(); return; } @@ -214,7 +214,7 @@ protected void mergeToFreenet(File diskDir) { * *very sure* there is no important data below it! */ private static boolean removeAll(File wd) { if(!wd.isDirectory()) { - System.err.println("DELETING FILE "+wd); + System.out.println("DELETING FILE "+wd); if(!wd.delete() && wd.exists()) { System.err.println("Could not delete file: " + wd); return false; @@ -240,7 +240,6 @@ private static boolean removeAll(File wd) { * @param diskDir The folder the on-disk index is stored in. */ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { - System.out.println("Merging on-disk index to Freenet: "+diskDir); if (lastUploadURI == null) { lastUploadURI = readStringFrom(new File(LAST_URL_FILENAME)); } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 8b990d07..5566556f 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -80,7 +80,6 @@ public boolean accept(File arg0, String arg1) { public static void main(String[] argv) { int exitStatus = 0; - System.out.println("Separate program started."); //if (!cwd.matches(".*/plugins")) { // System.err.println("Should be started in the freenet directory."); // System.exit(1); @@ -89,84 +88,85 @@ public static void main(String[] argv) { // Now we are in the Freenet directory. // The rest of the work is done here. FcpConnection connection = null; - - FcpAdapter closeListener = new FcpAdapter() { - public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { - System.out.println("Connection Closed - Aborting."); - System.exit(1); - } - }; + FcpAdapter closeListener = null; try { - try { - connection = new FcpConnection("127.0.0.1"); - connection.connect(); - } catch (UnknownHostException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } catch (IOException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } - final String clientName = "SpiderMerger"; - final FcpMessage hello = new ClientHello(clientName); - FcpAdapter helloListener = new FcpAdapter() { - public void receivedNodeHello(FcpConnection c, NodeHello nh) { - synchronized (hello) { - hello.notify(); - } - } - - public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { - System.out.println("Another " + clientName + " connected - Aborting."); - System.exit(1); - } - }; - connection.addFcpListener(helloListener); - - connection.addFcpListener(closeListener); - - synchronized (hello) { - try { - connection.sendMessage(hello); - hello.wait(); - } catch (InterruptedException e) { - System.err.println("Waiting for connection interrupted."); - exitStatus = 1; - return; - } catch (IOException e) { - System.err.println("Hello cannot write."); - exitStatus = 1; - return; - } finally { - connection.removeFcpListener(helloListener); - } - } - helloListener = null; - System.out.println("Connected"); - - UploaderLibrary.init(connection); - FactoryRegister.register(UploaderLibrary.getInstance()); - - final String[] dirsToMerge; - File directory = new File("."); - dirsToMerge = directory.list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; - return true; - } - - }); - - System.out.println("There are " + dirsToMerge.length + " old directories to merge."); - if (dirsToMerge.length > 0) { - System.out.println("Merging the first one."); - new DirectoryUploader(connection, - new File(directory, dirsToMerge[0])).run(); - return; + final String[] dirsToMerge; + File directory = new File("."); + dirsToMerge = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; + return true; + } + + }); + + if (dirsToMerge.length > 0) { + System.out.println("Merging directory " + dirsToMerge[0]); + closeListener = new FcpAdapter() { + public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { + System.out.println("Connection Closed - Aborting."); + System.exit(1); + } + }; + + try { + connection = new FcpConnection("127.0.0.1"); + connection.connect(); + } catch (UnknownHostException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } + final String clientName = "SpiderMerger"; + final FcpMessage hello = new ClientHello(clientName); + FcpAdapter helloListener = new FcpAdapter() { + public void receivedNodeHello(FcpConnection c, NodeHello nh) { + synchronized (hello) { + hello.notify(); + } + } + + public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { + System.out.println("Another " + clientName + " connected - Aborting."); + System.exit(1); + } + }; + connection.addFcpListener(helloListener); + + connection.addFcpListener(closeListener); + + synchronized (hello) { + try { + connection.sendMessage(hello); + hello.wait(); + } catch (InterruptedException e) { + System.err.println("Waiting for connection interrupted."); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Hello cannot write."); + exitStatus = 1; + return; + } finally { + connection.removeFcpListener(helloListener); + } + } + helloListener = null; + System.out.println("Connected"); + + UploaderLibrary.init(connection); + FactoryRegister.register(UploaderLibrary.getInstance()); + + new DirectoryUploader(connection, + new File(directory, dirsToMerge[0])).run(); + System.out.println("Upload completed."); + return; } // Calculate the next name @@ -244,12 +244,13 @@ public boolean accept(File arg0, String arg1) { e.printStackTrace(); return; } finally { - connection.removeFcpListener(closeListener); + if (closeListener != null) { + connection.removeFcpListener(closeListener); + } if (connection != null) { connection.close(); } } - System.out.println("Upload completed."); System.exit(exitStatus); } } From fc0050e2f3d6df4c13356abf83c88565305fd85a Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 11 Apr 2015 14:12:38 +0000 Subject: [PATCH 054/180] Removed confusing trace message. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/FcpArchiver.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 08c0b8b2..de74080a 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -174,10 +174,6 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { if (!identifier.equals(ps.getIdentifier())) return; System.out.println("receivedPutSuccessful for " + token); - System.out.println("Storing " + progressTotal + - " took " + - ((ps.getCompletionTime() - ps.getStartupTime()) / 1000) + "s"); - markDone(); } From c8f1ffda42a4243a38b98856d6ee4ee3630408e1 Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 15 Apr 2015 02:58:21 +0000 Subject: [PATCH 055/180] Let the files be a little bigger. (1000000 entries ~ 200M). --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/TermEntryFileWriter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java index 47b9e51a..bf19668f 100644 --- a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java +++ b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java @@ -57,6 +57,6 @@ void close() { } public boolean isFull() { - return counter >= 300000; + return counter >= 1000000; } } From bf349a1717a08e7923915e75d55695f57cd9a320 Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 15 Apr 2015 02:59:49 +0000 Subject: [PATCH 056/180] Buggfix: delay deflating the tree until it is all filled. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/DirectoryCreator.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java index 6d561737..12f1f121 100644 --- a/uploader/src/freenet/library/uploader/DirectoryCreator.java +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -5,6 +5,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.util.Map; +import java.util.Map.Entry; import freenet.library.index.ProtoIndex; import freenet.library.index.ProtoIndexComponentSerialiser; @@ -68,7 +69,6 @@ private static boolean writeStringTo(File filename, String uri) { } public void putEntry(TermEntry tt) throws TaskAbortException { - // TODO Auto-generated method stub SkeletonBTreeSet tree; if (idxDisk.ttab.containsKey(tt.subj)) { // merge @@ -78,14 +78,19 @@ public void putEntry(TermEntry tt) throws TaskAbortException { leafsrlDisk.setSerialiserFor(tree); } tree.add(tt); - tree.deflate(); - assert(tree.isBare()); + // tree.deflate(); + // assert(tree.isBare()); idxDisk.ttab.put(tt.subj, tree); countTerms++; } public void done() throws TaskAbortException { - // TODO Auto-generated method stub + for (Entry> entry : idxDisk.ttab.entrySet()) { + SkeletonBTreeSet tree = entry.getValue(); + tree.deflate(); + assert(tree.isBare()); + idxDisk.ttab.put(entry.getKey(), tree); + } idxDisk.ttab.deflate(); assert(idxDisk.ttab.isBare()); PushTask task4 = new PushTask(idxDisk); From f620113d37520b162283f8f3d878d4298754ac06 Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 15 Apr 2015 03:01:02 +0000 Subject: [PATCH 057/180] Refactored IndexPeeker to make it easy to do more section at the same time. Changed trace output. Allow specifying directory to merge from. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/IndexPeeker.java | 63 +++++++++++++------ .../src/freenet/library/uploader/Merger.java | 49 +++++++++++---- .../uploader/TermEntryReaderIterator.java | 1 - 3 files changed, 79 insertions(+), 34 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index ea39d79c..6c0e810a 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -5,6 +5,8 @@ import java.io.IOException; import java.util.HashSet; import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; import java.util.Set; import freenet.library.index.TermEntry; @@ -16,14 +18,16 @@ class IndexPeeker { private File directory; private LinkedHashMap topTtab; private Set topElements; - private boolean selected = false; - private String before; - private String after; + private List activeSections = null; + private int maxSections; private static final SkeletonBTreeMap> newtrees = new SkeletonBTreeMap>(12); IndexPeeker(File dir) { + this(dir, 1); + } + IndexPeeker(File dir, int sections) { directory = dir; String lastCHK = DirectoryUploader.readStringFrom(new File(directory, UploaderPaths.LAST_URL_FILENAME)); String rootFilename = directory + "/" + UploaderPaths.LIBRARY_CACHE + "/" + lastCHK; @@ -36,12 +40,43 @@ class IndexPeeker { System.exit(1); } topElements = new HashSet(topTtab.keySet()); + activeSections = new LinkedList(); + maxSections = sections; } private static int compare(String a, String b) { return SkeletonBTreeMap.compare(a, b, newtrees.comparator()); } + class ChoosenSection { + String before; + String after; + + ChoosenSection(String subj) { + System.out.println("Grouping around " + subj); + String previous = null; + String next = null; + for (String iter : topTtab.keySet()) { + next = iter; + if (compare(subj, next) < 0) { + break; + } + previous = iter; + next = null; + } + before = previous; + after = next; + } + + boolean include(String subj) { + if ((before == null || compare(before, subj) < 0) && + (after == null || compare(subj, after) < 0)) { + return true; + } + return false; + } + } + /** * If the subj is to be included. * @@ -56,25 +91,13 @@ boolean include(String subj) { if (topElements.contains(subj)) { return true; } - if (!selected) { - System.out.println("Grouping around " + subj); - String previous = null; - String next = null; - for (String iter : topTtab.keySet()) { - next = iter; - if (compare(subj, next) < 0) { - break; - } - previous = iter; - next = null; + for (ChoosenSection section : activeSections) { + if (section.include(subj)) { + return true; } - before = previous; - after = next; - selected = true; - topTtab = null; } - if ((before == null || compare(before, subj) < 0) && - (after == null || compare(subj, after) < 0)) { + if (activeSections.size() < maxSections) { + activeSections.add(new ChoosenSection(subj)); return true; } return false; diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 5566556f..91bfbd35 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -11,6 +11,7 @@ import java.io.DataInputStream; import java.io.FileInputStream; import java.net.UnknownHostException; +import java.text.Format; import java.util.HashSet; import java.util.Iterator; import java.util.Set; @@ -91,17 +92,28 @@ public static void main(String[] argv) { FcpAdapter closeListener = null; try { - final String[] dirsToMerge; + String[] dirsToMerge = null; File directory = new File("."); - dirsToMerge = directory.list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; - return true; - } - - }); - + for (String arg : argv) { + if (new File(directory, arg).isDirectory()) { + dirsToMerge = new String[1]; + dirsToMerge[0] = arg; + } else { + System.out.println("No such directory " + arg); + } + break; + } + if (dirsToMerge == null) { + dirsToMerge = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; + return true; + } + + }); + } + if (dirsToMerge.length > 0) { System.out.println("Merging directory " + dirsToMerge[0]); closeListener = new FcpAdapter() { @@ -163,8 +175,8 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti UploaderLibrary.init(connection); FactoryRegister.register(UploaderLibrary.getInstance()); - new DirectoryUploader(connection, - new File(directory, dirsToMerge[0])).run(); + File directoryToMerge = new File(directory, dirsToMerge[0]); + new DirectoryUploader(connection, directoryToMerge).run(); System.out.println("Upload completed."); return; } @@ -198,6 +210,10 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti for (int i = 0; i < filesToMerge2.length; i++, pos++) { filesToMerge[pos] = filesToMerge2[i]; } + + int totalTerms = 0; + int movedTerms = 0; + for (String s : filesToMerge) { System.out.println("File: " + s); File file = new File(s); @@ -213,8 +229,10 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti Iterator iterator = teri.iterator(); while (iterator.hasNext()) { TermEntry tt = iterator.next(); + totalTerms ++; if (peeker.include(tt.subj)) { creator.putEntry(tt); + movedTerms ++; continue; } @@ -240,9 +258,14 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti System.out.println("Removing file " + file); file.delete(); } + double percentage = new Double(movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; + System.out.format("Processed %d/%d terms (%.2f%%).%n", + movedTerms, + totalTerms, + percentage); } catch (TaskAbortException e) { e.printStackTrace(); - return; + exitStatus = 1; } finally { if (closeListener != null) { connection.removeFcpListener(closeListener); diff --git a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java index 9afeeaa0..9ddd894f 100644 --- a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java +++ b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java @@ -22,7 +22,6 @@ public TermEntryReaderIterator(DataInputStream s) { do { try { line = is.readLine(); - System.out.println("Line: " + line); String[] parts = line.split("=", 2); if (parts.length >= 2) { header.put(parts[0], parts[1]); From 380f66f1723a63f287deeaac64774173d43c5815 Mon Sep 17 00:00:00 2001 From: anonymous Date: Thu, 16 Apr 2015 05:08:40 +0000 Subject: [PATCH 058/180] Factored out the setting up and taking down of the Fcp session. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/FcpSession.java | 96 +++++++++++++++++++ .../src/freenet/library/uploader/Merger.java | 82 ++-------------- 2 files changed, 106 insertions(+), 72 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/FcpSession.java diff --git a/uploader/src/freenet/library/uploader/FcpSession.java b/uploader/src/freenet/library/uploader/FcpSession.java new file mode 100644 index 00000000..bb23d9dc --- /dev/null +++ b/uploader/src/freenet/library/uploader/FcpSession.java @@ -0,0 +1,96 @@ +package freenet.library.uploader; + +import java.io.IOException; +import java.net.UnknownHostException; + +import net.pterodactylus.fcp.ClientHello; +import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpMessage; +import net.pterodactylus.fcp.NodeHello; + +public class FcpSession { + + private FcpAdapter closeListener; + private FcpConnection connection; + private int exitStatus; + + public FcpSession() { + exitStatus = 0; + + closeListener = new FcpAdapter() { + public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { + System.out.println("Connection Closed - Aborting."); + System.exit(1); + } + }; + + try { + connection = new FcpConnection("127.0.0.1"); + connection.connect(); + } catch (UnknownHostException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Cannot connect to Node"); + exitStatus = 1; + return; + } + final String clientName = "SpiderMerger"; + final FcpMessage hello = new ClientHello(clientName); + FcpAdapter helloListener = new FcpAdapter() { + public void receivedNodeHello(FcpConnection c, NodeHello nh) { + synchronized (hello) { + hello.notify(); + } + } + + public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { + System.out.println("Another " + clientName + " connected - Aborting."); + System.exit(1); + } + }; + connection.addFcpListener(helloListener); + + connection.addFcpListener(closeListener); + + synchronized (hello) { + try { + connection.sendMessage(hello); + hello.wait(); + } catch (InterruptedException e) { + System.err.println("Waiting for connection interrupted."); + exitStatus = 1; + return; + } catch (IOException e) { + System.err.println("Hello cannot write."); + exitStatus = 1; + return; + } finally { + connection.removeFcpListener(helloListener); + } + } + helloListener = null; + System.out.println("Connected"); + + } + + public void close() { + if (closeListener != null) { + connection.removeFcpListener(closeListener); + } + if (connection != null) { + connection.close(); + } + } + + public FcpConnection getConnection() { + return connection; + } + + public int getStatus() { + return exitStatus; + } +} diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 91bfbd35..95850488 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -6,22 +6,13 @@ import java.io.File; import java.io.FileNotFoundException; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.DataInputStream; +import java.io.FilenameFilter;import java.io.DataInputStream; import java.io.FileInputStream; -import java.net.UnknownHostException; -import java.text.Format; import java.util.HashSet; import java.util.Iterator; import java.util.Set; -import net.pterodactylus.fcp.ClientHello; -import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; -import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FcpMessage; -import net.pterodactylus.fcp.NodeHello; import freenet.library.FactoryRegister; import freenet.library.index.TermEntry; @@ -54,6 +45,10 @@ */ final public class Merger { + private static FcpSession session; + + + static String[] getMatchingFiles(File directory, final String baseFilename) { return directory.list(new FilenameFilter() { @@ -89,7 +84,6 @@ public static void main(String[] argv) { // Now we are in the Freenet directory. // The rest of the work is done here. FcpConnection connection = null; - FcpAdapter closeListener = null; try { String[] dirsToMerge = null; @@ -116,62 +110,8 @@ public boolean accept(File arg0, String arg1) { if (dirsToMerge.length > 0) { System.out.println("Merging directory " + dirsToMerge[0]); - closeListener = new FcpAdapter() { - public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { - System.out.println("Connection Closed - Aborting."); - System.exit(1); - } - }; - - try { - connection = new FcpConnection("127.0.0.1"); - connection.connect(); - } catch (UnknownHostException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } catch (IOException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } - final String clientName = "SpiderMerger"; - final FcpMessage hello = new ClientHello(clientName); - FcpAdapter helloListener = new FcpAdapter() { - public void receivedNodeHello(FcpConnection c, NodeHello nh) { - synchronized (hello) { - hello.notify(); - } - } - - public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnection, CloseConnectionDuplicateClientName closeConnectionDuplicateClientName) { - System.out.println("Another " + clientName + " connected - Aborting."); - System.exit(1); - } - }; - connection.addFcpListener(helloListener); - - connection.addFcpListener(closeListener); - - synchronized (hello) { - try { - connection.sendMessage(hello); - hello.wait(); - } catch (InterruptedException e) { - System.err.println("Waiting for connection interrupted."); - exitStatus = 1; - return; - } catch (IOException e) { - System.err.println("Hello cannot write."); - exitStatus = 1; - return; - } finally { - connection.removeFcpListener(helloListener); - } - } - helloListener = null; - System.out.println("Connected"); - + session = new FcpSession(); + connection = session.getConnection(); UploaderLibrary.init(connection); FactoryRegister.register(UploaderLibrary.getInstance()); @@ -267,12 +207,10 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti e.printStackTrace(); exitStatus = 1; } finally { - if (closeListener != null) { - connection.removeFcpListener(closeListener); + session.close(); + if (exitStatus == 0) { + exitStatus = session.getStatus(); } - if (connection != null) { - connection.close(); - } } System.exit(exitStatus); } From 4053787f6802a12c06f67158cc246c92eac6db4e Mon Sep 17 00:00:00 2001 From: anonymous Date: Thu, 16 Apr 2015 05:52:17 +0000 Subject: [PATCH 059/180] Buggfix for the non-session case. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/Merger.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 95850488..223fe9d2 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -207,9 +207,11 @@ public boolean accept(File arg0, String arg1) { e.printStackTrace(); exitStatus = 1; } finally { - session.close(); - if (exitStatus == 0) { - exitStatus = session.getStatus(); + if (session != null) { + session.close(); + if (exitStatus == 0) { + exitStatus = session.getStatus(); + } } } System.exit(exitStatus); From 195c8df6d89182ad3de7bb9c8ac864a0d04d75b5 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 17 Apr 2015 06:09:23 +0000 Subject: [PATCH 060/180] Changed the handling of files to process all not previously terms each time a selected is processed. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/Merger.java | 229 +++++++++++------- .../library/uploader/UploaderPaths.java | 4 +- 2 files changed, 148 insertions(+), 85 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 223fe9d2..0dded72e 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -8,14 +8,20 @@ import java.io.FileNotFoundException; import java.io.FilenameFilter;import java.io.DataInputStream; import java.io.FileInputStream; +import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.TreeMap; import net.pterodactylus.fcp.FcpConnection; import freenet.library.FactoryRegister; import freenet.library.index.TermEntry; +import freenet.library.util.BTreeMap.PairIterable; import freenet.library.util.exec.TaskAbortException; /** @@ -47,7 +53,9 @@ final public class Merger { private static FcpSession session; - + private static final String SELECTED = UploaderPaths.BASE_FILENAME_DATA + "selected."; + private static final String FILTERED = UploaderPaths.BASE_FILENAME_DATA + "filtered."; + private static final String PROCESSED = UploaderPaths.BASE_FILENAME_DATA + "processed."; static String[] getMatchingFiles(File directory, final String baseFilename) { @@ -121,88 +129,7 @@ public boolean accept(File arg0, String arg1) { return; } - // Calculate the next name - int lastFoundFiltered = 0; - String[] filesToMerge1 = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_FILTERED_DATA); - System.out.println("There is " + filesToMerge1.length + " old files to merge."); - for (String filename : filesToMerge1) { - int numberFound = Integer.parseInt(filename.substring(UploaderPaths.BASE_FILENAME_FILTERED_DATA.length())); - if (numberFound > lastFoundFiltered) { - lastFoundFiltered = numberFound; - } - } - System.out.println("Last found: " + lastFoundFiltered); - - String[] filesToMerge2 = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); - - System.out.println("There is " + filesToMerge2.length + " new files to merge."); - - DirectoryCreator creator = new DirectoryCreator(directory); - IndexPeeker peeker = new IndexPeeker(directory); - - Set toBeRemoved = new HashSet(); - TermEntryFileWriter notMerged = null; - String[] filesToMerge = new String[filesToMerge1.length + filesToMerge2.length]; - int pos = 0; - for (; pos < filesToMerge1.length; pos++) { - filesToMerge[pos] = filesToMerge1[pos]; - } - for (int i = 0; i < filesToMerge2.length; i++, pos++) { - filesToMerge[pos] = filesToMerge2[i]; - } - - int totalTerms = 0; - int movedTerms = 0; - - for (String s : filesToMerge) { - System.out.println("File: " + s); - File file = new File(s); - FileInputStream fileInputStream; - try { - fileInputStream = new FileInputStream(file); - } catch (FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - return; - } - TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); - Iterator iterator = teri.iterator(); - while (iterator.hasNext()) { - TermEntry tt = iterator.next(); - totalTerms ++; - if (peeker.include(tt.subj)) { - creator.putEntry(tt); - movedTerms ++; - continue; - } - - if (notMerged == null) { - lastFoundFiltered ++; - String filteredFilename = UploaderPaths.BASE_FILENAME_FILTERED_DATA + lastFoundFiltered; - notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, filteredFilename)); - } - notMerged.write(tt); - if (notMerged.isFull()) { - notMerged.close(); - notMerged = null; - } - } - toBeRemoved.add(file); - } - if (notMerged != null) { - notMerged.close(); - notMerged = null; - } - creator.done(); - for (File file : toBeRemoved) { - System.out.println("Removing file " + file); - file.delete(); - } - double percentage = new Double(movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; - System.out.format("Processed %d/%d terms (%.2f%%).%n", - movedTerms, - totalTerms, - percentage); + createMergeDirectory(directory); } catch (TaskAbortException e) { e.printStackTrace(); exitStatus = 1; @@ -216,4 +143,140 @@ public boolean accept(File arg0, String arg1) { } System.exit(exitStatus); } + + + private static void createMergeDirectory(File directory) throws TaskAbortException { + String[] selectedFilesToMerge = getMatchingFiles(directory, SELECTED); + System.out.println("There is " + selectedFilesToMerge.length + " selected files."); + + String [] filteredFilesToMerge = getMatchingFiles(directory, FILTERED); + System.out.println("There is " + filteredFilesToMerge.length + " filtered files."); + + String [] processedFilesToMerge = getMatchingFiles(directory, PROCESSED); + System.out.println("There is " + processedFilesToMerge.length + " processed files."); + + String[] newFilesToMerge = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); + System.out.println("There is " + newFilesToMerge.length + " new files."); + + // Calculate the next number of filtered and processed files. + int lastFoundNumber = 0; + for (String filename : filteredFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(FILTERED.length())); + if (numberFound > lastFoundNumber) { + lastFoundNumber = numberFound; + } + } + for (String filename : processedFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(PROCESSED.length())); + if (numberFound > lastFoundNumber) { + lastFoundNumber = numberFound; + } + } + System.out.println("Last found: " + lastFoundNumber); + + int lastSelected = 0; + + DirectoryCreator creator = new DirectoryCreator(directory); + + Map writers = + new HashMap(); + IndexPeeker peeker = new IndexPeeker(directory); + + Set toBeRemoved = new HashSet(); + List filesToMerge = new ArrayList(); + String restBase; + if (selectedFilesToMerge.length > 0) { + filesToMerge.add(selectedFilesToMerge[0]); + restBase = PROCESSED; + } else { + for (int i = 0; i < filteredFilesToMerge.length; i++) { + filesToMerge.add(filteredFilesToMerge[i]); + } + restBase = FILTERED; + } + for (int i = 0; i < processedFilesToMerge.length; i++) { + filesToMerge.add(processedFilesToMerge[i]); + } + for (int i = 0; i < newFilesToMerge.length; i++) { + filesToMerge.add(newFilesToMerge[i]); + } + TermEntryFileWriter notMerged = null; + + int totalTerms = 0; + int movedTerms = 0; + + for (String s : filesToMerge) { + System.out.println("File: " + s); + File file = new File(s); + FileInputStream fileInputStream; + try { + fileInputStream = new FileInputStream(file); + } catch (FileNotFoundException e) { + e.printStackTrace(); + return; + } + TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); + Iterator iterator = teri.iterator(); + while (iterator.hasNext()) { + TermEntry tt = iterator.next(); + totalTerms ++; + if (peeker.include(tt.subj)) { + creator.putEntry(tt); + movedTerms ++; + continue; + } + + if (selectedFilesToMerge.length == 0) { + // They are all to be sorted. + boolean found = false; + for (Map.Entry entry : writers.entrySet()) { + if (entry.getKey().include(tt.subj)) { + entry.getValue().write(tt); + found = true; + break; + } + } + if (found) { + continue; + } else if (writers.size() < 50) { + lastSelected ++; + String selectedFilename = SELECTED + lastSelected; + IndexPeeker p = new IndexPeeker(directory); + TermEntryFileWriter t = new TermEntryFileWriter(teri.getHeader(), + new File(directory, selectedFilename)); + if (p.include(tt.subj)) { + writers.put(p, t); + t.write(tt); + } + continue; + } + } + if (notMerged == null) { + lastFoundNumber ++; + String restFilename = restBase + lastFoundNumber; + notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, restFilename)); + } + notMerged.write(tt); + if (notMerged.isFull()) { + notMerged.close(); + notMerged = null; + } + } + toBeRemoved.add(file); + } + if (notMerged != null) { + notMerged.close(); + notMerged = null; + } + creator.done(); + for (File file : toBeRemoved) { + System.out.println("Removing file " + file); + file.delete(); + } + double percentage = new Double(movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; + System.out.format("Processed %d/%d terms (%.2f%%).%n", + movedTerms, + totalTerms, + percentage); + } } diff --git a/uploader/src/freenet/library/uploader/UploaderPaths.java b/uploader/src/freenet/library/uploader/UploaderPaths.java index 35dbd27d..5be870e2 100644 --- a/uploader/src/freenet/library/uploader/UploaderPaths.java +++ b/uploader/src/freenet/library/uploader/UploaderPaths.java @@ -37,8 +37,8 @@ public class UploaderPaths { static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; - static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; - static final String BASE_FILENAME_FILTERED_DATA = "library.index.filtered."; + static final String BASE_FILENAME_DATA = "library.index."; + static final String BASE_FILENAME_PUSH_DATA = BASE_FILENAME_DATA + "data."; static final String LIBRARY_CACHE = "library-spider-pushed-data-cache"; } From 9f9b52746cde7449fd3c8015cc08322c33774ab0 Mon Sep 17 00:00:00 2001 From: anonymous Date: Fri, 24 Apr 2015 20:38:51 +0000 Subject: [PATCH 061/180] Made it possible to specify the client name to FcpSession. --HG-- branch : eclipse-separation --- build.xml | 4 +-- .../freenet/library/uploader/FcpSession.java | 27 +++++-------------- .../src/freenet/library/uploader/Merger.java | 3 ++- 3 files changed, 11 insertions(+), 23 deletions(-) diff --git a/build.xml b/build.xml index eb18e43e..3435160f 100644 --- a/build.xml +++ b/build.xml @@ -6,8 +6,8 @@ - - + + diff --git a/uploader/src/freenet/library/uploader/FcpSession.java b/uploader/src/freenet/library/uploader/FcpSession.java index bb23d9dc..5f51632b 100644 --- a/uploader/src/freenet/library/uploader/FcpSession.java +++ b/uploader/src/freenet/library/uploader/FcpSession.java @@ -1,7 +1,6 @@ package freenet.library.uploader; import java.io.IOException; -import java.net.UnknownHostException; import net.pterodactylus.fcp.ClientHello; import net.pterodactylus.fcp.CloseConnectionDuplicateClientName; @@ -16,7 +15,11 @@ public class FcpSession { private FcpConnection connection; private int exitStatus; - public FcpSession() { + public FcpSession() throws IllegalStateException, IOException { + this("SpiderMerger"); + } + + public FcpSession(final String clientName) throws IllegalStateException, IOException { exitStatus = 0; closeListener = new FcpAdapter() { @@ -26,19 +29,8 @@ public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { } }; - try { - connection = new FcpConnection("127.0.0.1"); - connection.connect(); - } catch (UnknownHostException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } catch (IOException e) { - System.err.println("Cannot connect to Node"); - exitStatus = 1; - return; - } - final String clientName = "SpiderMerger"; + connection = new FcpConnection("127.0.0.1"); + connection.connect(); final FcpMessage hello = new ClientHello(clientName); FcpAdapter helloListener = new FcpAdapter() { public void receivedNodeHello(FcpConnection c, NodeHello nh) { @@ -64,17 +56,12 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti System.err.println("Waiting for connection interrupted."); exitStatus = 1; return; - } catch (IOException e) { - System.err.println("Hello cannot write."); - exitStatus = 1; - return; } finally { connection.removeFcpListener(helloListener); } } helloListener = null; System.out.println("Connected"); - } public void close() { diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 0dded72e..4b65c6b7 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -8,6 +8,7 @@ import java.io.FileNotFoundException; import java.io.FilenameFilter;import java.io.DataInputStream; import java.io.FileInputStream; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -130,7 +131,7 @@ public boolean accept(File arg0, String arg1) { } createMergeDirectory(directory); - } catch (TaskAbortException e) { + } catch (TaskAbortException | IllegalStateException | IOException e) { e.printStackTrace(); exitStatus = 1; } finally { From ede1ff6e861c7173f0183228d6fdb553c14d7a34 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 25 Apr 2015 19:26:35 +0000 Subject: [PATCH 062/180] Process the files in the correct order. Reprocess the selected files if needed. --HG-- branch : eclipse-separation --- uploader/.classpath | 1 + .../src/freenet/library/uploader/Merger.java | 90 +++++++++++++++++-- .../uploader/MergerComparatorTest.java | 30 +++++++ 3 files changed, 114 insertions(+), 7 deletions(-) create mode 100644 uploader/test/freenet/library/uploader/MergerComparatorTest.java diff --git a/uploader/.classpath b/uploader/.classpath index f760e33a..4198d1ae 100644 --- a/uploader/.classpath +++ b/uploader/.classpath @@ -5,5 +5,6 @@ + diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 4b65c6b7..c2afd976 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -10,6 +10,8 @@ import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -57,10 +59,64 @@ final public class Merger { private static final String SELECTED = UploaderPaths.BASE_FILENAME_DATA + "selected."; private static final String FILTERED = UploaderPaths.BASE_FILENAME_DATA + "filtered."; private static final String PROCESSED = UploaderPaths.BASE_FILENAME_DATA + "processed."; + + static final Comparator comparator = new StringNumberComparator(); + + static class StringNumberComparator implements Comparator { + @Override + public int compare(String a, String b) { + int ai; + int bi; + for (ai = 0, bi = 0; ai < a.length() && bi < b.length(); ai++, bi++) { + if (a.substring(ai, ai + 1).matches("[0-9]") + && a.substring(bi, bi + 1).matches("[0-9]")) { + int aii; + for (aii = ai + 1; aii < a.length(); aii++) { + if (!a.substring(aii, aii + 1).matches("[0-9]")) { + break; + } + } + int bii; + for (bii = bi + 1; bii < b.length(); bii++) { + if (!b.substring(bii, bii + 1).matches("[0-9]")) { + break; + } + } + try { + int ret = Integer.valueOf(a.substring(ai, aii)).compareTo( + Integer.valueOf(b.substring(bi, bii))); + if (ret != 0) { + return ret; + } + + ai = aii - 1; + bi = bii - 1; + continue; + } catch (NumberFormatException e) { + continue; + } + } + int ret = a.charAt(ai) - b.charAt(bi); + if (ret != 0) { + return ret; + } + } + if (ai < a.length()) { + return 1; + } + if (bi < b.length()) { + return -1; + } + return 0; + } + } + /** + * Return an array with the filenames in order. + */ static String[] getMatchingFiles(File directory, final String baseFilename) { - return directory.list(new FilenameFilter() { + String[] array = directory.list(new FilenameFilter() { public boolean accept(File arg0, String arg1) { if (!(arg1.toLowerCase().startsWith(baseFilename))) { @@ -76,12 +132,12 @@ public boolean accept(File arg0, String arg1) { } return true; } - }); + Arrays.sort(array, comparator); + return array; } - public static void main(String[] argv) { int exitStatus = 0; @@ -176,6 +232,12 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti System.out.println("Last found: " + lastFoundNumber); int lastSelected = 0; + for (String filename : selectedFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(SELECTED.length())); + if (numberFound > lastSelected) { + lastSelected = numberFound; + } + } DirectoryCreator creator = new DirectoryCreator(directory); @@ -186,10 +248,24 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti Set toBeRemoved = new HashSet(); List filesToMerge = new ArrayList(); String restBase; + boolean createSelectedFiles = false; if (selectedFilesToMerge.length > 0) { - filesToMerge.add(selectedFilesToMerge[0]); - restBase = PROCESSED; + if (processedFilesToMerge.length > 1 + && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { + createSelectedFiles = true; + for (int i = 0; i < selectedFilesToMerge.length; i++) { + filesToMerge.add(selectedFilesToMerge[i]); + } + for (int i = 0; i < filteredFilesToMerge.length; i++) { + filesToMerge.add(filteredFilesToMerge[i]); + } + restBase = FILTERED; + } else { + filesToMerge.add(selectedFilesToMerge[0]); + restBase = PROCESSED; + } } else { + createSelectedFiles = true; for (int i = 0; i < filteredFilesToMerge.length; i++) { filesToMerge.add(filteredFilesToMerge[i]); } @@ -227,7 +303,7 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti continue; } - if (selectedFilesToMerge.length == 0) { + if (createSelectedFiles) { // They are all to be sorted. boolean found = false; for (Map.Entry entry : writers.entrySet()) { @@ -239,7 +315,7 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti } if (found) { continue; - } else if (writers.size() < 50) { + } else if (writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { lastSelected ++; String selectedFilename = SELECTED + lastSelected; IndexPeeker p = new IndexPeeker(directory); diff --git a/uploader/test/freenet/library/uploader/MergerComparatorTest.java b/uploader/test/freenet/library/uploader/MergerComparatorTest.java new file mode 100644 index 00000000..2102b66d --- /dev/null +++ b/uploader/test/freenet/library/uploader/MergerComparatorTest.java @@ -0,0 +1,30 @@ +package freenet.library.uploader; + +import junit.framework.TestCase; + +public class MergerComparatorTest extends TestCase { + + public void testComparator() { + assertTrue(Merger.comparator.compare("a", "b") < 0); + assertTrue(Merger.comparator.compare("b", "a") > 0); + assertTrue(Merger.comparator.compare("a", "a") == 0); + + assertTrue(Merger.comparator.compare("3", "5") < 0); + assertTrue(Merger.comparator.compare("7", "5") > 0); + assertTrue(Merger.comparator.compare("4", "4") == 0); + + assertTrue(Merger.comparator.compare("a", "ab") < 0); + assertTrue(Merger.comparator.compare("ab", "a") > 0); + + assertTrue(Merger.comparator.compare("abc4", "abc00004") == 0); + assertTrue(Merger.comparator.compare("abc4", "abc00005") < 0); + assertTrue(Merger.comparator.compare("abc5", "abc00004") > 0); + assertTrue(Merger.comparator.compare("abc00003", "abc4") < 0); + assertTrue(Merger.comparator.compare("abc00004", "abc3") > 0); + + assertTrue(Merger.comparator.compare("abc4a", "abc00004a") == 0); + assertTrue(Merger.comparator.compare("abc4a", "abc00004b") < 0); + assertTrue(Merger.comparator.compare("abc4b", "abc00004a") > 0); + } + +} From e292cce07703c42671113734bf58b00f1e16f35a Mon Sep 17 00:00:00 2001 From: anonymous Date: Tue, 5 May 2015 06:09:34 +0000 Subject: [PATCH 063/180] Factored out the FileClientPutter. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/FcpArchiver.java | 271 ++---------------- .../library/uploader/FileClientPutter.java | 266 +++++++++++++++++ 2 files changed, 283 insertions(+), 254 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/FileClientPutter.java diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index de74080a..7e1cb2ce 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -2,25 +2,11 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import net.pterodactylus.fcp.ClientPut; -import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FinishedCompression; -import net.pterodactylus.fcp.PutFailed; -import net.pterodactylus.fcp.PutFetchable; -import net.pterodactylus.fcp.PutSuccessful; -import net.pterodactylus.fcp.StartedCompression; -import net.pterodactylus.fcp.URIGenerated; -import net.pterodactylus.fcp.Verbosity; import freenet.copied.Base64; import freenet.copied.SHA256; import freenet.library.Priority; @@ -31,9 +17,9 @@ import freenet.library.util.exec.TaskAbortException; -public class FcpArchiver +public class FcpArchiver + extends FileClientPutter implements LiveArchiver { - private FcpConnection connection; private File cacheDir; private ObjectStreamReader reader; private ObjectStreamWriter writer; @@ -41,21 +27,13 @@ public class FcpArchiver private int size; private Priority priorityLevel; - /** - * Before synchronizing on stillRunning, be sure to synchronize - * connection! - */ - private Map stillRunning = - new HashMap(); - private Thread cleanupThread; - public FcpArchiver(FcpConnection fcpConnection, File directory, S rw, String mime, int s, Priority pl) { - connection = fcpConnection; + super(fcpConnection); cacheDir = directory; reader = rw; writer = rw; @@ -64,7 +42,8 @@ public FcpArchiver(FcpConnection fcpConnection, priorityLevel = pl; } - private net.pterodactylus.fcp.Priority getPriority() { + @Override + protected net.pterodactylus.fcp.Priority getPriority() { switch (priorityLevel) { case Interactive: return net.pterodactylus.fcp.Priority.interactive; @@ -129,163 +108,6 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, " in the cache."); } - private class PushAdapter extends FcpAdapter { - private ClientPut putter; - private String identifier; - private String token; - private String uri; - private int progressTotal; - private int progressCompleted; - private boolean done; - - public PushAdapter(ClientPut p, String i, String t) { - putter = p; - identifier = i; - token = t; - uri = null; - progressTotal = 0; - progressCompleted = 0; - synchronized (stillRunning) { - stillRunning.put(token, this); - printLeft(); - } - } - - /** - * Show the amount of outstanding work. - */ - void printLeft() { - int total = 0; - int completed = 0; - synchronized (stillRunning) { - for (Map.Entry entry : stillRunning.entrySet()) { - total += entry.getValue().progressTotal; - completed += entry.getValue().progressCompleted; - } - System.out.println("Outstanding " + stillRunning.size() + " jobs " + - "(" + completed + "/" + total + ")"); - } - } - - @Override - public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { - assert c == connection; - assert ps != null; - if (!identifier.equals(ps.getIdentifier())) - return; - System.out.println("receivedPutSuccessful for " + token); - markDone(); - } - - @Override - public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { - assert c == connection; - assert pf != null; - if (!identifier.equals(pf.getIdentifier())) - return; - System.out.println("receivedPutFetchable for " + token); - synchronized (this) { - this.notifyAll(); - } - } - - - @Override - public void receivedPutFailed(FcpConnection c, PutFailed pf) { - assert c == connection; - assert pf != null; - if (!identifier.equals(pf.getIdentifier())) - return; - synchronized (putter) { - putter.notify(); - } - System.out.println("receivedPutFailed for " + token); - markDone(); - } - - @Override - public void receivedSimpleProgress(FcpConnection c, - net.pterodactylus.fcp.SimpleProgress sp) { - assert c == connection; - assert sp != null; - if (!identifier.equals(sp.getIdentifier())) - return; - if (sp.getFailed() > 0 || - sp.getFatallyFailed() > 0) { - System.out.println(token + "failed - aborted."); - markDone(); - } - progressCompleted = sp.getSucceeded(); - progressTotal = sp.getTotal(); - System.out.println("receivedSimpleProgess for " + token + ": " + - sp.getSucceeded() + "/" + sp.getTotal()); - printLeft(); - } - - @Override - public void receivedStartedCompression(FcpConnection c, - StartedCompression startedCompression) { - assert c == connection; - assert startedCompression != null; - if (!identifier.equals(startedCompression.getIdentifier())) - return; - System.out.println("receivedStartedCompression for " + token); - } - - @Override - public void receviedFinishedCompression(FcpConnection c, - FinishedCompression finishedCompression) { - assert c == connection; - assert finishedCompression != null; - if (!identifier.equals(finishedCompression.getIdentifier())) - return; - System.out.println("receivedFinishedCompression for " + token); - } - - public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { - assert c == connection; - assert uriGenerated != null; - if (!identifier.equals(uriGenerated.getIdentifier())) - return; - System.out.println("receivedURIGenerated for " + token); - uri = uriGenerated.getURI(); - synchronized (this) { - this.notifyAll(); - } - } - - private void markDone() { - done = true; - synchronized (this) { - this.notifyAll(); - } - // Signal to the cleanup thread: - synchronized (stillRunning) { - stillRunning.notifyAll(); - } - } - - private void forgetAboutThis() { - assert done; - connection.removeFcpListener(this); - synchronized (stillRunning) { - stillRunning.remove(token); - stillRunning.notifyAll(); - printLeft(); - } - } - - boolean isDone() { - return done; - } - - String getURI() { - return uri; - } - }; - - - private static int counter = 1; @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, @@ -293,16 +115,10 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, if (connection == null) { throw new IllegalArgumentException("No connection."); } - final String identifier = "FcpArchiver" + counter; + final String token = "FcpArchiverPushLive" + counter; - counter++; - final ClientPut putter = new ClientPut("CHK@", identifier); - putter.setClientToken(token); - putter.setEarlyEncode(true); - putter.setPriority(getPriority()); - putter.setVerbosity(Verbosity.ALL); - - // Writing to file. + + // Writing to file. File file = new File(cacheDir, token); FileOutputStream fileOut = null; try { @@ -317,31 +133,19 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, throw new TaskAbortException("Cannot close file " + file, e); } } - - final long dataLength = file.length(); - putter.setDataLength(dataLength); - - FileInputStream in; + + PushAdapter putterListener; try { - in = new FileInputStream(file); - } catch (FileNotFoundException e) { - throw new TaskAbortException("Cannot read from file " + file, e); + putterListener = startFileUpload(token, file); + } catch (IOException e1) { + throw new TaskAbortException("Cannot start upload of file " + file, e1); } - putter.setPayloadInputStream(in); - PushAdapter putterListener = new PushAdapter(putter, identifier, token); - connection.addFcpListener(putterListener); - try { - if (progress != null) { - progress.addPartKnown(1, true); - } - connection.sendMessage(putter); - in.close(); - } catch (IOException e) { - throw new TaskAbortException("Cannot send message", e); - } + if (progress != null) { + progress.addPartKnown(1, true); + } - // Wait for identifier + // Wait for identifier synchronized (putterListener) { while (putterListener.getURI() == null) { try { @@ -363,47 +167,6 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, startCleanupThread(); } - private synchronized void startCleanupThread() { - if (cleanupThread == null) { - cleanupThread = new Thread( - new Runnable() { - public void run () { - boolean moreJobs = false; - do { - if (moreJobs) { - synchronized (stillRunning) { - try { - stillRunning.wait(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - Set copy; - synchronized (stillRunning) { - copy = new HashSet(stillRunning.values()); - } - for (PushAdapter pa : copy) { - if (pa.isDone()) { - pa.forgetAboutThis(); - } - } - } - synchronized (stillRunning) { - moreJobs = !stillRunning.isEmpty(); - } - } while (moreJobs); - removeCleanupThread(); - } - } - ); - cleanupThread.start(); - } - } - - private synchronized void removeCleanupThread() { - cleanupThread = null; - } @Override public void waitForAsyncInserts() throws TaskAbortException { diff --git a/uploader/src/freenet/library/uploader/FileClientPutter.java b/uploader/src/freenet/library/uploader/FileClientPutter.java new file mode 100644 index 00000000..91df1269 --- /dev/null +++ b/uploader/src/freenet/library/uploader/FileClientPutter.java @@ -0,0 +1,266 @@ +package freenet.library.uploader; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FinishedCompression; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutFetchable; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.StartedCompression; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; + +class FileClientPutter { + protected FcpConnection connection; + + /** + * Before synchronizing on stillRunning, be sure to synchronize + * connection! + */ + protected Map stillRunning = + new HashMap(); + private Thread cleanupThread; + + FileClientPutter(FcpConnection fcpConnection) { + connection = fcpConnection; + } + + protected net.pterodactylus.fcp.Priority getPriority() { + return net.pterodactylus.fcp.Priority.bulkSplitfile; + } + + protected class PushAdapter extends FcpAdapter { + private ClientPut putter; + private String identifier; + private String token; + private String uri; + private int progressTotal; + private int progressCompleted; + private boolean done; + + public PushAdapter(ClientPut p, String i, String t) { + putter = p; + identifier = i; + token = t; + uri = null; + progressTotal = 0; + progressCompleted = 0; + synchronized (stillRunning) { + stillRunning.put(token, this); + printLeft(); + } + } + + /** + * Show the amount of outstanding work. + */ + void printLeft() { + int total = 0; + int completed = 0; + synchronized (stillRunning) { + for (Map.Entry entry : stillRunning.entrySet()) { + total += entry.getValue().progressTotal; + completed += entry.getValue().progressCompleted; + } + System.out.println("Outstanding " + stillRunning.size() + " jobs " + + "(" + completed + "/" + total + ")"); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { + assert c == connection; + assert ps != null; + if (!identifier.equals(ps.getIdentifier())) + return; + System.out.println("receivedPutSuccessful for " + token); + markDone(); + } + + @Override + public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + System.out.println("receivedPutFetchable for " + token); + synchronized (this) { + this.notifyAll(); + } + } + + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + synchronized (putter) { + putter.notify(); + } + System.out.println("receivedPutFailed for " + token); + System.exit(1); + markDone(); + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!identifier.equals(sp.getIdentifier())) + return; + if (sp.getFailed() > 0 || + sp.getFatallyFailed() > 0) { + System.out.println(token + " failed - aborted."); + markDone(); + } + progressCompleted = sp.getSucceeded(); + progressTotal = sp.getTotal(); + System.out.println("receivedSimpleProgess for " + token + ": " + + sp.getSucceeded() + "/" + sp.getTotal()); + printLeft(); + } + + @Override + public void receivedStartedCompression(FcpConnection c, + StartedCompression startedCompression) { + assert c == connection; + assert startedCompression != null; + if (!identifier.equals(startedCompression.getIdentifier())) + return; + System.out.println("receivedStartedCompression for " + token); + } + + @Override + public void receviedFinishedCompression(FcpConnection c, + FinishedCompression finishedCompression) { + assert c == connection; + assert finishedCompression != null; + if (!identifier.equals(finishedCompression.getIdentifier())) + return; + System.out.println("receivedFinishedCompression for " + token); + } + + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + if (!identifier.equals(uriGenerated.getIdentifier())) + return; + System.out.println("receivedURIGenerated for " + token); + uri = uriGenerated.getURI(); + synchronized (this) { + this.notifyAll(); + } + } + + private void markDone() { + done = true; + synchronized (this) { + this.notifyAll(); + } + // Signal to the cleanup thread: + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + + private void forgetAboutThis() { + assert done; + connection.removeFcpListener(this); + synchronized (stillRunning) { + stillRunning.remove(token); + stillRunning.notifyAll(); + printLeft(); + } + } + + boolean isDone() { + return done; + } + + String getURI() { + return uri; + } + }; + + + protected static int counter = 1; + + PushAdapter startFileUpload(final String token, File file) + throws IOException { + final String identifier = "FcpArchiver" + counter; + counter++; + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setClientToken(token); + putter.setEarlyEncode(true); + putter.setPriority(getPriority()); + putter.setVerbosity(Verbosity.ALL); + + final long dataLength = file.length(); + putter.setDataLength(dataLength); + + FileInputStream in = new FileInputStream(file); + putter.setPayloadInputStream(in); + + PushAdapter putterListener = new PushAdapter(putter, identifier, token); + connection.addFcpListener(putterListener); + + connection.sendMessage(putter); + in.close(); + return putterListener; + } + + protected synchronized void startCleanupThread() { + if (cleanupThread == null) { + cleanupThread = new Thread( + new Runnable() { + public void run () { + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (PushAdapter pa : copy) { + if (pa.isDone()) { + pa.forgetAboutThis(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + removeCleanupThread(); + } + } + ); + cleanupThread.start(); + } + } + + private synchronized void removeCleanupThread() { + cleanupThread = null; + } + +} From 60731c0b26b52fc67a7d5dcd37acdff01603041e Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 6 May 2015 20:11:24 +0000 Subject: [PATCH 064/180] Moved the cleanup thread entirely into the FileClientPutter. Added explicit exceptions in some exotic cases. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/FcpArchiver.java | 10 +++++++--- .../freenet/library/uploader/FileClientPutter.java | 13 +++++++++++-- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 7e1cb2ce..0b12bfaa 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -78,6 +78,8 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, cacheKey = (String) task.meta; } else if (task.meta instanceof byte[]) { cacheKey = Base64.encode(SHA256.digest((byte[]) task.meta)); + } else { + throw new IllegalArgumentException("Cannot understand task.meta: " + task.meta); } try { @@ -89,7 +91,11 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, InputStream is = new FileInputStream(cached); task.data = (T) reader.readObject(is); is.close(); + } else { + throw new RuntimeException("Failed to read content from " + cached); } + } else { + throw new RuntimeException("Failed to read content from " + cacheDir); } if (progress != null) { @@ -98,7 +104,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, } catch (IOException e) { System.out.println("IOException:"); e.printStackTrace(); - throw new TaskAbortException("Failed to read content from local tempbucket", e, true); + throw new TaskAbortException("Failed to read content from " + cacheKey, e, true); } return; } @@ -163,8 +169,6 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, // Moving file. file.renameTo(new File(cacheDir, putterListener.getURI())); - - startCleanupThread(); } diff --git a/uploader/src/freenet/library/uploader/FileClientPutter.java b/uploader/src/freenet/library/uploader/FileClientPutter.java index 91df1269..a37c57bb 100644 --- a/uploader/src/freenet/library/uploader/FileClientPutter.java +++ b/uploader/src/freenet/library/uploader/FileClientPutter.java @@ -37,6 +37,12 @@ class FileClientPutter { protected net.pterodactylus.fcp.Priority getPriority() { return net.pterodactylus.fcp.Priority.bulkSplitfile; } + + int getQueuedSize() { + synchronized (stillRunning) { + return stillRunning.size(); + } + } protected class PushAdapter extends FcpAdapter { private ClientPut putter; @@ -71,7 +77,7 @@ void printLeft() { total += entry.getValue().progressTotal; completed += entry.getValue().progressCompleted; } - System.out.println("Outstanding " + stillRunning.size() + " jobs " + + System.out.println("Outstanding " + stillRunning.size() + " ClientPut jobs " + "(" + completed + "/" + total + ")"); } } @@ -218,10 +224,13 @@ PushAdapter startFileUpload(final String token, File file) connection.sendMessage(putter); in.close(); + + startCleanupThread(); + return putterListener; } - protected synchronized void startCleanupThread() { + private synchronized void startCleanupThread() { if (cleanupThread == null) { cleanupThread = new Thread( new Runnable() { From 94cf0376a89609a05e2261ca4e34029ef993b29e Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 6 May 2015 21:28:16 +0000 Subject: [PATCH 065/180] Reverted back to 1148 since it didn't work. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/FcpArchiver.java | 281 ++++++++++++++++-- .../library/uploader/FileClientPutter.java | 275 ----------------- 2 files changed, 257 insertions(+), 299 deletions(-) delete mode 100644 uploader/src/freenet/library/uploader/FileClientPutter.java diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 0b12bfaa..de74080a 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -2,11 +2,25 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FinishedCompression; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutFetchable; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.StartedCompression; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; import freenet.copied.Base64; import freenet.copied.SHA256; import freenet.library.Priority; @@ -17,9 +31,9 @@ import freenet.library.util.exec.TaskAbortException; -public class FcpArchiver - extends FileClientPutter +public class FcpArchiver implements LiveArchiver { + private FcpConnection connection; private File cacheDir; private ObjectStreamReader reader; private ObjectStreamWriter writer; @@ -27,13 +41,21 @@ public class FcpArchiver private int size; private Priority priorityLevel; + /** + * Before synchronizing on stillRunning, be sure to synchronize + * connection! + */ + private Map stillRunning = + new HashMap(); + private Thread cleanupThread; + public FcpArchiver(FcpConnection fcpConnection, File directory, S rw, String mime, int s, Priority pl) { - super(fcpConnection); + connection = fcpConnection; cacheDir = directory; reader = rw; writer = rw; @@ -42,8 +64,7 @@ public FcpArchiver(FcpConnection fcpConnection, priorityLevel = pl; } - @Override - protected net.pterodactylus.fcp.Priority getPriority() { + private net.pterodactylus.fcp.Priority getPriority() { switch (priorityLevel) { case Interactive: return net.pterodactylus.fcp.Priority.interactive; @@ -78,8 +99,6 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, cacheKey = (String) task.meta; } else if (task.meta instanceof byte[]) { cacheKey = Base64.encode(SHA256.digest((byte[]) task.meta)); - } else { - throw new IllegalArgumentException("Cannot understand task.meta: " + task.meta); } try { @@ -91,11 +110,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, InputStream is = new FileInputStream(cached); task.data = (T) reader.readObject(is); is.close(); - } else { - throw new RuntimeException("Failed to read content from " + cached); } - } else { - throw new RuntimeException("Failed to read content from " + cacheDir); } if (progress != null) { @@ -104,7 +119,7 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, } catch (IOException e) { System.out.println("IOException:"); e.printStackTrace(); - throw new TaskAbortException("Failed to read content from " + cacheKey, e, true); + throw new TaskAbortException("Failed to read content from local tempbucket", e, true); } return; } @@ -114,6 +129,163 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, " in the cache."); } + private class PushAdapter extends FcpAdapter { + private ClientPut putter; + private String identifier; + private String token; + private String uri; + private int progressTotal; + private int progressCompleted; + private boolean done; + + public PushAdapter(ClientPut p, String i, String t) { + putter = p; + identifier = i; + token = t; + uri = null; + progressTotal = 0; + progressCompleted = 0; + synchronized (stillRunning) { + stillRunning.put(token, this); + printLeft(); + } + } + + /** + * Show the amount of outstanding work. + */ + void printLeft() { + int total = 0; + int completed = 0; + synchronized (stillRunning) { + for (Map.Entry entry : stillRunning.entrySet()) { + total += entry.getValue().progressTotal; + completed += entry.getValue().progressCompleted; + } + System.out.println("Outstanding " + stillRunning.size() + " jobs " + + "(" + completed + "/" + total + ")"); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { + assert c == connection; + assert ps != null; + if (!identifier.equals(ps.getIdentifier())) + return; + System.out.println("receivedPutSuccessful for " + token); + markDone(); + } + + @Override + public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + System.out.println("receivedPutFetchable for " + token); + synchronized (this) { + this.notifyAll(); + } + } + + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed pf) { + assert c == connection; + assert pf != null; + if (!identifier.equals(pf.getIdentifier())) + return; + synchronized (putter) { + putter.notify(); + } + System.out.println("receivedPutFailed for " + token); + markDone(); + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!identifier.equals(sp.getIdentifier())) + return; + if (sp.getFailed() > 0 || + sp.getFatallyFailed() > 0) { + System.out.println(token + "failed - aborted."); + markDone(); + } + progressCompleted = sp.getSucceeded(); + progressTotal = sp.getTotal(); + System.out.println("receivedSimpleProgess for " + token + ": " + + sp.getSucceeded() + "/" + sp.getTotal()); + printLeft(); + } + + @Override + public void receivedStartedCompression(FcpConnection c, + StartedCompression startedCompression) { + assert c == connection; + assert startedCompression != null; + if (!identifier.equals(startedCompression.getIdentifier())) + return; + System.out.println("receivedStartedCompression for " + token); + } + + @Override + public void receviedFinishedCompression(FcpConnection c, + FinishedCompression finishedCompression) { + assert c == connection; + assert finishedCompression != null; + if (!identifier.equals(finishedCompression.getIdentifier())) + return; + System.out.println("receivedFinishedCompression for " + token); + } + + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + if (!identifier.equals(uriGenerated.getIdentifier())) + return; + System.out.println("receivedURIGenerated for " + token); + uri = uriGenerated.getURI(); + synchronized (this) { + this.notifyAll(); + } + } + + private void markDone() { + done = true; + synchronized (this) { + this.notifyAll(); + } + // Signal to the cleanup thread: + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + + private void forgetAboutThis() { + assert done; + connection.removeFcpListener(this); + synchronized (stillRunning) { + stillRunning.remove(token); + stillRunning.notifyAll(); + printLeft(); + } + } + + boolean isDone() { + return done; + } + + String getURI() { + return uri; + } + }; + + + private static int counter = 1; @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, @@ -121,10 +293,16 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, if (connection == null) { throw new IllegalArgumentException("No connection."); } - + final String identifier = "FcpArchiver" + counter; final String token = "FcpArchiverPushLive" + counter; - - // Writing to file. + counter++; + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setClientToken(token); + putter.setEarlyEncode(true); + putter.setPriority(getPriority()); + putter.setVerbosity(Verbosity.ALL); + + // Writing to file. File file = new File(cacheDir, token); FileOutputStream fileOut = null; try { @@ -139,19 +317,31 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, throw new TaskAbortException("Cannot close file " + file, e); } } - - PushAdapter putterListener; + + final long dataLength = file.length(); + putter.setDataLength(dataLength); + + FileInputStream in; try { - putterListener = startFileUpload(token, file); - } catch (IOException e1) { - throw new TaskAbortException("Cannot start upload of file " + file, e1); + in = new FileInputStream(file); + } catch (FileNotFoundException e) { + throw new TaskAbortException("Cannot read from file " + file, e); } + putter.setPayloadInputStream(in); - if (progress != null) { - progress.addPartKnown(1, true); - } + PushAdapter putterListener = new PushAdapter(putter, identifier, token); + connection.addFcpListener(putterListener); + try { + if (progress != null) { + progress.addPartKnown(1, true); + } + connection.sendMessage(putter); + in.close(); + } catch (IOException e) { + throw new TaskAbortException("Cannot send message", e); + } - // Wait for identifier + // Wait for identifier synchronized (putterListener) { while (putterListener.getURI() == null) { try { @@ -169,8 +359,51 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, // Moving file. file.renameTo(new File(cacheDir, putterListener.getURI())); + + startCleanupThread(); } + private synchronized void startCleanupThread() { + if (cleanupThread == null) { + cleanupThread = new Thread( + new Runnable() { + public void run () { + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (PushAdapter pa : copy) { + if (pa.isDone()) { + pa.forgetAboutThis(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + removeCleanupThread(); + } + } + ); + cleanupThread.start(); + } + } + + private synchronized void removeCleanupThread() { + cleanupThread = null; + } @Override public void waitForAsyncInserts() throws TaskAbortException { diff --git a/uploader/src/freenet/library/uploader/FileClientPutter.java b/uploader/src/freenet/library/uploader/FileClientPutter.java deleted file mode 100644 index a37c57bb..00000000 --- a/uploader/src/freenet/library/uploader/FileClientPutter.java +++ /dev/null @@ -1,275 +0,0 @@ -package freenet.library.uploader; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import net.pterodactylus.fcp.ClientPut; -import net.pterodactylus.fcp.FcpAdapter; -import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FinishedCompression; -import net.pterodactylus.fcp.PutFailed; -import net.pterodactylus.fcp.PutFetchable; -import net.pterodactylus.fcp.PutSuccessful; -import net.pterodactylus.fcp.StartedCompression; -import net.pterodactylus.fcp.URIGenerated; -import net.pterodactylus.fcp.Verbosity; - -class FileClientPutter { - protected FcpConnection connection; - - /** - * Before synchronizing on stillRunning, be sure to synchronize - * connection! - */ - protected Map stillRunning = - new HashMap(); - private Thread cleanupThread; - - FileClientPutter(FcpConnection fcpConnection) { - connection = fcpConnection; - } - - protected net.pterodactylus.fcp.Priority getPriority() { - return net.pterodactylus.fcp.Priority.bulkSplitfile; - } - - int getQueuedSize() { - synchronized (stillRunning) { - return stillRunning.size(); - } - } - - protected class PushAdapter extends FcpAdapter { - private ClientPut putter; - private String identifier; - private String token; - private String uri; - private int progressTotal; - private int progressCompleted; - private boolean done; - - public PushAdapter(ClientPut p, String i, String t) { - putter = p; - identifier = i; - token = t; - uri = null; - progressTotal = 0; - progressCompleted = 0; - synchronized (stillRunning) { - stillRunning.put(token, this); - printLeft(); - } - } - - /** - * Show the amount of outstanding work. - */ - void printLeft() { - int total = 0; - int completed = 0; - synchronized (stillRunning) { - for (Map.Entry entry : stillRunning.entrySet()) { - total += entry.getValue().progressTotal; - completed += entry.getValue().progressCompleted; - } - System.out.println("Outstanding " + stillRunning.size() + " ClientPut jobs " + - "(" + completed + "/" + total + ")"); - } - } - - @Override - public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { - assert c == connection; - assert ps != null; - if (!identifier.equals(ps.getIdentifier())) - return; - System.out.println("receivedPutSuccessful for " + token); - markDone(); - } - - @Override - public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { - assert c == connection; - assert pf != null; - if (!identifier.equals(pf.getIdentifier())) - return; - System.out.println("receivedPutFetchable for " + token); - synchronized (this) { - this.notifyAll(); - } - } - - - @Override - public void receivedPutFailed(FcpConnection c, PutFailed pf) { - assert c == connection; - assert pf != null; - if (!identifier.equals(pf.getIdentifier())) - return; - synchronized (putter) { - putter.notify(); - } - System.out.println("receivedPutFailed for " + token); - System.exit(1); - markDone(); - } - - @Override - public void receivedSimpleProgress(FcpConnection c, - net.pterodactylus.fcp.SimpleProgress sp) { - assert c == connection; - assert sp != null; - if (!identifier.equals(sp.getIdentifier())) - return; - if (sp.getFailed() > 0 || - sp.getFatallyFailed() > 0) { - System.out.println(token + " failed - aborted."); - markDone(); - } - progressCompleted = sp.getSucceeded(); - progressTotal = sp.getTotal(); - System.out.println("receivedSimpleProgess for " + token + ": " + - sp.getSucceeded() + "/" + sp.getTotal()); - printLeft(); - } - - @Override - public void receivedStartedCompression(FcpConnection c, - StartedCompression startedCompression) { - assert c == connection; - assert startedCompression != null; - if (!identifier.equals(startedCompression.getIdentifier())) - return; - System.out.println("receivedStartedCompression for " + token); - } - - @Override - public void receviedFinishedCompression(FcpConnection c, - FinishedCompression finishedCompression) { - assert c == connection; - assert finishedCompression != null; - if (!identifier.equals(finishedCompression.getIdentifier())) - return; - System.out.println("receivedFinishedCompression for " + token); - } - - public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { - assert c == connection; - assert uriGenerated != null; - if (!identifier.equals(uriGenerated.getIdentifier())) - return; - System.out.println("receivedURIGenerated for " + token); - uri = uriGenerated.getURI(); - synchronized (this) { - this.notifyAll(); - } - } - - private void markDone() { - done = true; - synchronized (this) { - this.notifyAll(); - } - // Signal to the cleanup thread: - synchronized (stillRunning) { - stillRunning.notifyAll(); - } - } - - private void forgetAboutThis() { - assert done; - connection.removeFcpListener(this); - synchronized (stillRunning) { - stillRunning.remove(token); - stillRunning.notifyAll(); - printLeft(); - } - } - - boolean isDone() { - return done; - } - - String getURI() { - return uri; - } - }; - - - protected static int counter = 1; - - PushAdapter startFileUpload(final String token, File file) - throws IOException { - final String identifier = "FcpArchiver" + counter; - counter++; - final ClientPut putter = new ClientPut("CHK@", identifier); - putter.setClientToken(token); - putter.setEarlyEncode(true); - putter.setPriority(getPriority()); - putter.setVerbosity(Verbosity.ALL); - - final long dataLength = file.length(); - putter.setDataLength(dataLength); - - FileInputStream in = new FileInputStream(file); - putter.setPayloadInputStream(in); - - PushAdapter putterListener = new PushAdapter(putter, identifier, token); - connection.addFcpListener(putterListener); - - connection.sendMessage(putter); - in.close(); - - startCleanupThread(); - - return putterListener; - } - - private synchronized void startCleanupThread() { - if (cleanupThread == null) { - cleanupThread = new Thread( - new Runnable() { - public void run () { - boolean moreJobs = false; - do { - if (moreJobs) { - synchronized (stillRunning) { - try { - stillRunning.wait(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - Set copy; - synchronized (stillRunning) { - copy = new HashSet(stillRunning.values()); - } - for (PushAdapter pa : copy) { - if (pa.isDone()) { - pa.forgetAboutThis(); - } - } - } - synchronized (stillRunning) { - moreJobs = !stillRunning.isEmpty(); - } - } while (moreJobs); - removeCleanupThread(); - } - } - ); - cleanupThread.start(); - } - } - - private synchronized void removeCleanupThread() { - cleanupThread = null; - } - -} From 36f4017f2a52e93e7833d6644febb6b602ac49e9 Mon Sep 17 00:00:00 2001 From: anonymous Date: Sun, 24 May 2015 07:55:54 +0000 Subject: [PATCH 066/180] Initial revision of an index downloader. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/DownloadAll.java | 803 ++++++++++++++++++ 1 file changed, 803 insertions(+) create mode 100644 uploader/src/freenet/library/uploader/DownloadAll.java diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java new file mode 100644 index 00000000..ae735c1f --- /dev/null +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -0,0 +1,803 @@ +/* + */ + +package freenet.library.uploader; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; +import java.util.WeakHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; + +import net.pterodactylus.fcp.AllData; +import net.pterodactylus.fcp.ClientGet; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.GetFailed; +import net.pterodactylus.fcp.Priority; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.SubscribeUSK; +import net.pterodactylus.fcp.SubscribedUSKUpdate; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; +import freenet.library.io.YamlReaderWriter; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.BinInfo; + +/** + * Class to download the entire index. + */ +public class DownloadAll { + private static final int PARALLEL_JOBS = 10; + public final Map stillRunning = new HashMap(); + private String uri; + private String newUri; + private int edition; + private FcpConnection connection; + private static int getterCounter = 0; + private static int uploadCounter = 0; + private LinkedBlockingQueue objectQueue = + new LinkedBlockingQueue(); + private Thread cleanupThread; + private List roots = new ArrayList(); + + private ExecutorService uploadStarter = null; + private Map> ongoingUploads = null; + + private int successful = 0; + private int successfulBlocks = 0; + private long successfulBytes = 0; + private int failed = 0; + private int avoidFetching = 0; + + private Random rand = new Random(); + + public DownloadAll(String u) { + uri = u; + } + + public static class WeakHashSet + implements Set { + /** + * We just use the keys and let all values be TOKEN. + */ + private Map map = new WeakHashMap(); + private static Object TOKEN = new Object(); + + @Override + public boolean add(T arg0) { + if (map.containsKey(arg0)) { + return false; + } else { + map.put(arg0, TOKEN); + return true; + } + } + + @Override + public boolean addAll(Collection arg0) { + boolean retval = false; + for (T ele : arg0) { + if (add(ele)) { + retval = true; + } + } + return retval; + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public boolean contains(Object arg0) { + return map.containsKey(arg0); + } + + @Override + public boolean containsAll(Collection arg0) { + for (Object ele : arg0) { + if (!contains(ele)) { + return false; + } + } + return true; + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + @Override + public Iterator iterator() { + return map.keySet().iterator(); + } + + @Override + public boolean remove(Object arg0) { + return map.remove(arg0) != null; + } + + @Override + public boolean removeAll(Collection arg0) { + boolean retval = true; + for (Object ele : arg0) { + if (!remove(ele)) { + retval = false; + } + } + return retval; + } + + @Override + public boolean retainAll(Collection arg0) { + boolean retval = false; + for (T ele : map.keySet()) { + if (!arg0.contains(ele)) { + if (map.remove(ele) != null) { + retval = true; + } + } + } + return retval; + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Object[] toArray() { + return map.keySet().toArray(); + } + + @Override + public T[] toArray(T[] arg0) { + return map.keySet().toArray(arg0); + } + + } + + /** + * A class to keep track of what pages are fetched and how they are related + * to other fetched pages. The purpose of this is to avoid fetching stuff + * related only to "old" editions. + */ + private static class FetchedPage { + /** + * This is really a Set but there is no WeakSet so we use the keys + * and let all values be TOKEN. + */ + private Set parents = new WeakHashSet(); + private Set children = new HashSet(); + + private String uri; + private boolean succeeded; + private boolean failed; + + FetchedPage(String u) { + uri = u; + } + + void addParent(FetchedPage fp) { + parents.add(fp); + } + + void addChild(FetchedPage fp) { + children.add(fp); + } + + FetchedPage newChild(String u) { + FetchedPage child = new FetchedPage(u); + child.addParent(this); + addChild(child); + return child; + } + + String getURI() { + return uri; + } + + String getPath() { + for (FetchedPage parent : parents) { + return parent.getPath() + "->" + uri; + } + return uri; + } + + boolean hasParent() { + return !parents.isEmpty(); + } + + int getTreeSize() { + int size = 1; + for (FetchedPage child : children) { + size += child.getTreeSize(); + } + return size; + } + + int getTreeSizeSucceeded() { + int size = succeeded ? 1 : 0; + for (FetchedPage child : children) { + size += child.getTreeSizeSucceeded(); + } + return size; + } + + int getTreeSizeFailed() { + int size = failed ? 1 : 0; + for (FetchedPage child : children) { + size += child.getTreeSizeFailed(); + } + return size; + } + + void didFail() { + failed = true; + } + + void didSucceed() { + failed = false; + succeeded = true; + } + + public FetchedPage findUri(String u) { + if (u.equals(uri)) { + return this; + } + for (FetchedPage child : children) { + FetchedPage found = child.findUri(u); + if (found != null) { + return found; + } + } + return null; + } + } + + private class USKUpdateAdapter extends FcpAdapter { + + private boolean updated = false; + private Object subscriber; + + public USKUpdateAdapter(Object s) { + subscriber = s; + } + + @Override + public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { + assert fcpConnection == connection; + if (subscribedUSKUpdate.getNewKnownGood()) { + updated = true; + newUri = subscribedUSKUpdate.getURI(); + edition = subscribedUSKUpdate.getEdition(); + synchronized (subscriber) { + subscriber.notify(); + } + } + } + + public void restart() { + if (updated) { + updated = false; + System.out.println("Found: " + newUri + " Edition: " + edition); + FetchedPage rootPage = new FetchedPage(newUri); + synchronized (roots) { + roots.add(rootPage); + while (roots.size() > 2) { + roots.remove(0); + } + } + new GetAdapter(rootPage.newChild(newUri)); + } + } + } + + private class GetAdapter extends FcpAdapter { + private ClientGet getter; + private String token; + private FetchedPage page; + private int progressTotal; + private int progressRequired; + private int progressCompleted; + private boolean done; + + public GetAdapter(FetchedPage u) { + page = u; + getterCounter ++; + token = "Getter" + getterCounter; + getter = new ClientGet(page.getURI(), token); + getter.setPriority(Priority.prefetch); + getter.setVerbosity(Verbosity.ALL); + + waitForSlot(); + connection.addFcpListener(this); + try { + connection.sendMessage(getter); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + synchronized (stillRunning) { + stillRunning.put(page, this); + stillRunning.notifyAll(); + } + } + + /** + * Show the amount of outstanding work. + */ + void printLeft() { + int total = 0; + int required = 0; + int completed = 0; + synchronized (stillRunning) { + for (GetAdapter value : stillRunning.values()) { + total += value.progressTotal; + required += value.progressRequired; + completed += value.progressCompleted; + } + System.out.println("Outstanding " + stillRunning.size() + " ClientGet jobs " + + "(" + completed + "/" + required + "/" + total + ") "); + } + showProgress(); + } + + private void processUri(String uri) { + synchronized (roots) { + for (FetchedPage root : roots) { + FetchedPage foundChild = root.findUri(uri); + if (foundChild != null) { + page.addChild(foundChild); + foundChild.addParent(page); + return; + } + } + } + objectQueue.offer(page.newChild(uri)); + } + + @Override + public void receivedAllData(FcpConnection c, AllData ad) { + assert c == connection; + assert ad != null; + if (!token.equals(ad.getIdentifier())) { + return; + } + System.out.println("receivedAllData for " + token + + " adding to the " + objectQueue.size() + " elements in the queue."); + page.didSucceed(); + try { + try { + Map map = + (LinkedHashMap) + new YamlReaderWriter().readObject(ad.getPayloadInputStream()); + if (map.containsKey("ttab")) { + Map map2 = (Map) map.get("ttab"); + if (map2.containsKey("entries")) { + System.out.println("Contains ttab.entries"); + Map entries = + (Map) map2.get("entries"); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + processUri(u); + } catch (ClassCastException e) { + System.out.println("Cannot process " + value.getID()); + } + } + Map subnodes = + (Map) map2.get("subnodes"); + for (String key : subnodes.keySet()) { + processUri(key); + } + return; + } + } + if (map.containsKey("lkey") && + map.containsKey("rkey") && + map.containsKey("entries")) { + System.out.println("Contains entries"); + Map entries = + (Map) map.get("entries"); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + processUri(u); + } catch (ClassCastException e) { + System.out.println("Cannot process " + value.getID()); + } + } + return; + } + Entry entry = map.entrySet().iterator().next(); + if (entry.getValue() instanceof Map) { + Map map2 = (Map) entry.getValue(); + if (map2.containsKey("node_min") + && map2.containsKey("size") + && map2.containsKey("entries")) { + return; + } + } + System.out.println("Cannot understand contents: " + map); + System.exit(1); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + } finally { + markDone(); + System.out.println("receivedAllData for " + token + " done."); + successful ++; + successfulBlocks += progressCompleted; + successfulBytes += ad.getDataLength(); + showProgress(); + } + } + + @Override + public void receivedGetFailed(FcpConnection c, GetFailed gf) { + assert c == connection; + assert gf != null; + if (!token.equals(gf.getIdentifier())) { + return; + } + synchronized (getter) { + getter.notify(); + } + System.out.println("receivedGetFailed for " + token + " (" + page + ")."); + // System.exit(1); + page.didFail(); + markDone(); + failed ++; + showProgress(); + upload(page.getURI(), new Runnable() { + public void run() { + objectQueue.offer(page); + } + }); + } + + /** + * We have detected that we cannot download a certain CHK. + * + * If we are running on the host where this CHK is actually cached, + * lets upload it from the cache in an attempt to repair. + * + * @param filename of the file to upload. + */ + public boolean upload(final String filename, final Runnable callback) { + final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); + if (!dir.canRead()) { + return false; + } + final File file = new File(dir, filename); + if (!file.canRead()) { + System.err.println("Cannot find " + file + " in the cache."); + return false; + } + if (uploadStarter == null) { + uploadStarter = Executors.newSingleThreadExecutor(); + uploadStarter.execute(new Runnable() { + public void run() { + connection.addFcpListener(new FcpAdapter() { + @Override + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + String identifier = uriGenerated.getIdentifier(); + String chk = ongoingUploads.get(identifier).getKey(); + if (!uriGenerated.getURI().equals(chk)) { + System.err.println("Were supposed to upload " + chk + + " but calculated to " + uriGenerated.getURI()); + System.exit(1); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { + assert c == connection; + assert putSuccessful != null; + String identifier = putSuccessful.getIdentifier(); + ongoingUploads.get(identifier).getValue().run(); + ongoingUploads.remove(identifier); + }; + }); + } + }); + ongoingUploads = new HashMap>(); + } + uploadStarter.execute(new Runnable() { + public void run() { + uploadCounter++; + final String identifier = "Upload" + uploadCounter; + ongoingUploads.put(identifier, new AbstractMap.SimpleImmutableEntry(filename, callback)); + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setEarlyEncode(true); + putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); + putter.setVerbosity(Verbosity.NONE); + final long dataLength = file.length(); + putter.setDataLength(dataLength); + FileInputStream in; + try { + in = new FileInputStream(file); + putter.setPayloadInputStream(in); + connection.sendMessage(putter); + in.close(); + in = null; + } catch (IOException e) { + e.printStackTrace(); + System.err.println("Upload failed for " + file); + } + } + }); + return true; + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!token.equals(sp.getIdentifier())) { + return; + } + progressTotal = sp.getTotal(); + progressRequired = sp.getRequired(); + progressCompleted = sp.getSucceeded(); + printLeft(); + } + + + private void markDone() { + done = true; + synchronized (this) { + this.notifyAll(); + } + // Signal to the cleanup thread: + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + + private void forgetAboutThis() { + assert done; + connection.removeFcpListener(this); + synchronized (stillRunning) { + stillRunning.remove(page); + // Signal to the + stillRunning.notifyAll(); + printLeft(); + } + } + + boolean isDone() { + return done; + } + }; + + + public void doit() { + FcpSession session; + try { + session = new FcpSession("DownloaderFor" + uri); + } catch (IllegalStateException | IOException e1) { + e1.printStackTrace(); + return; + } + try { + connection = session.getConnection(); + if (connection == null) { + throw new IllegalArgumentException("No connection."); + } + final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); + subscriber.setActive(true); + + final USKUpdateAdapter subscriberListener = new USKUpdateAdapter(subscriber); + connection.addFcpListener(subscriberListener); + + synchronized (subscriber) { + try { + connection.sendMessage(subscriber); + subscriber.wait(); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for connection interrupted."); + } catch (IOException e) { + throw new RuntimeException("Hello cannot write."); + } + } + subscriberListener.restart(); + + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + System.out.println("Queue empty. " + + "Still running " + + stillRunning.size() + "."); + stillRunning.wait(20000); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + } + boolean empty = true; + do { + if (!empty) { + try { + FetchedPage taken = objectQueue.take(); + while (!taken.hasParent()) { + taken = null; + avoidFetching++; + if (objectQueue.isEmpty()) { + break; + } + taken = objectQueue.take(); + } + // Randomize the order by rotating the queue + int maxLaps = objectQueue.size() / PARALLEL_JOBS; + if (maxLaps == 0) { + maxLaps = 1; + } + int rotateLaps = rand.nextInt(maxLaps); + for (int i = 0; i < rotateLaps; i++) { + objectQueue.offer(taken); + taken = objectQueue.take(); + while (!taken.hasParent()) { + taken = null; + avoidFetching++; + if (objectQueue.isEmpty()) { + break; + } + taken = objectQueue.take(); + } + } + System.out.println("Rotated " + rotateLaps); + if (taken == null) { + break; + } + new GetAdapter(taken); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + subscriberListener.restart(); + empty = objectQueue.isEmpty(); + } while (!empty); + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + connection.removeFcpListener(subscriberListener); + } finally { + removeCleanupThread(); + session.close(); + connection = null; + } + showProgress(); + } + + + private void showProgress() { + System.out.println("Fetches: Successful: " + successful + + " blocks: " + successfulBlocks + + " bytes: " + successfulBytes + + " Failed: " + failed + + " Avoided: " + avoidFetching + "."); + + StringBuilder sb = new StringBuilder(); + synchronized (roots) { + for (FetchedPage root : roots) { + if (sb.length() > 0) { + sb.append(", "); + } + String edition = root.getURI().substring(uri.length()); + sb.append(edition); + int treeSize = root.getTreeSize(); + int succeeded = root.getTreeSizeSucceeded(); + int failed = root.getTreeSizeFailed(); + if (failed > 0) { + sb.append(new Formatter().format(" FAILED: %.2f%%.", 100.0 * failed / (failed + succeeded))); + } + sb.append(new Formatter().format(" Fetched: %.2f%%.", 100.0 * (failed + succeeded) / treeSize)); + sb.append(" ("); + sb.append(succeeded); + sb.append("/"); + sb.append(treeSize); + + if (failed > 0) { + sb.append(" (and "); + sb.append(failed); + sb.append(" failed)"); + } + + sb.append(")"); + } + } + + System.out.println("Editions: " + sb.toString()); + } + + public static void main(String[] argv) { + new DownloadAll(argv[1]).doit(); + } + + public void waitForSlot() { + startCleanupThread(); + synchronized (stillRunning) { + try { + while (stillRunning.size() >= PARALLEL_JOBS) { + stillRunning.wait(); + } + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + } + + private synchronized void startCleanupThread() { + if (cleanupThread == null) { + cleanupThread = new Thread( + new Runnable() { + public void run () { + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(1234567); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (GetAdapter ga : copy) { + if (ga.isDone()) { + ga.forgetAboutThis(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + removeCleanupThread(); + } + } + ); + cleanupThread.start(); + } + } + + private synchronized void removeCleanupThread() { + cleanupThread = null; + } +} From 5298395d16122cd7bc8e6eef7d1cff2ca0f56b64 Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 3 Jun 2015 05:31:52 +0000 Subject: [PATCH 067/180] Improved code quality. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/FcpSession.java | 52 ++++++++++--------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpSession.java b/uploader/src/freenet/library/uploader/FcpSession.java index 5f51632b..3d73b623 100644 --- a/uploader/src/freenet/library/uploader/FcpSession.java +++ b/uploader/src/freenet/library/uploader/FcpSession.java @@ -10,19 +10,19 @@ import net.pterodactylus.fcp.NodeHello; public class FcpSession { - - private FcpAdapter closeListener; - private FcpConnection connection; - private int exitStatus; + + private FcpAdapter closeListener; + private FcpConnection connection; + private int exitStatus; - public FcpSession() throws IllegalStateException, IOException { - this("SpiderMerger"); - } - - public FcpSession(final String clientName) throws IllegalStateException, IOException { - exitStatus = 0; + public FcpSession() throws IllegalStateException, IOException { + this("SpiderMerger"); + } + + public FcpSession(final String clientName) throws IllegalStateException, IOException { + exitStatus = 0; - closeListener = new FcpAdapter() { + closeListener = new FcpAdapter() { public void connectionClosed(FcpConnection fcpConnection, Throwable throwable) { System.out.println("Connection Closed - Aborting."); System.exit(1); @@ -57,27 +57,29 @@ public void receivedCloseConnectionDuplicateClientName(FcpConnection fcpConnecti exitStatus = 1; return; } finally { - connection.removeFcpListener(helloListener); + connection.removeFcpListener(helloListener); } } helloListener = null; System.out.println("Connected"); - } - - public void close() { - if (closeListener != null) { - connection.removeFcpListener(closeListener); - } + } + + public void close() { + if (closeListener != null) { + connection.removeFcpListener(closeListener); + closeListener = null; + } if (connection != null) { connection.close(); + connection = null; } - } + } - public FcpConnection getConnection() { - return connection; - } + public FcpConnection getConnection() { + return connection; + } - public int getStatus() { - return exitStatus; - } + public int getStatus() { + return exitStatus; + } } From 5809473f988b829b55c4e1e4417683a233c145df Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 3 Jun 2015 05:36:52 +0000 Subject: [PATCH 068/180] New calculation of estimate of entire tree. Fixed command line argument parsing. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/DownloadAll.java | 98 ++++++++++++++++--- 1 file changed, 84 insertions(+), 14 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index ae735c1f..33c788d8 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -191,11 +191,17 @@ private static class FetchedPage { private Set children = new HashSet(); private String uri; + int level; private boolean succeeded; private boolean failed; FetchedPage(String u) { + this(u, 0); + } + + FetchedPage(String u, int l) { uri = u; + level = l; } void addParent(FetchedPage fp) { @@ -207,7 +213,7 @@ void addChild(FetchedPage fp) { } FetchedPage newChild(String u) { - FetchedPage child = new FetchedPage(u); + FetchedPage child = new FetchedPage(u, level + 1); child.addParent(this); addChild(child); return child; @@ -235,6 +241,18 @@ int getTreeSize() { } return size; } + + void addPerLevel(Map result) { + if (!result.containsKey(level)) { + result.put(level, 0); + } + if (!succeeded && !failed) { + result.put(level, result.get(level) + 1); + } + for (FetchedPage child : children) { + child.addPerLevel(result); + } + } int getTreeSizeSucceeded() { int size = succeeded ? 1 : 0; @@ -312,6 +330,47 @@ public void restart() { } } } + + + class StatisticsAccumulator { + private int count = 0; + private int sum = 0; + + void addSample(int found) { + count++; + sum += found; + } + + double getMean() { + return 1.0 * sum / count; + } + } + + private Map statistics = new HashMap(); + private void addFoundChildren(int level, int foundChildren) { + if (!statistics.containsKey(level)) { + statistics.put(level, new StatisticsAccumulator()); + } + statistics.get(level).addSample(foundChildren); + } + + private double getEstimatedPagesLeft(FetchedPage page) { + double estimate = 0.0; + double extra = 0.0; + Map pagesPerLevel = new HashMap(); + page.addPerLevel(pagesPerLevel); + for (int level = 1; pagesPerLevel.containsKey(level); level++) { + if (!statistics.containsKey(level)) { + return Double.POSITIVE_INFINITY; + } + extra += pagesPerLevel.get(level); + estimate += extra; + extra = extra * statistics.get(level).getMean(); + } + return estimate; + } + + private class GetAdapter extends FcpAdapter { private ClientGet getter; @@ -363,18 +422,19 @@ void printLeft() { showProgress(); } - private void processUri(String uri) { + private boolean processUri(String uri) { synchronized (roots) { for (FetchedPage root : roots) { FetchedPage foundChild = root.findUri(uri); if (foundChild != null) { page.addChild(foundChild); foundChild.addParent(page); - return; + return false; } } } objectQueue.offer(page.newChild(uri)); + return true; } @Override @@ -387,6 +447,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { System.out.println("receivedAllData for " + token + " adding to the " + objectQueue.size() + " elements in the queue."); page.didSucceed(); + int foundChildren = 0; try { try { Map map = @@ -401,7 +462,10 @@ public void receivedAllData(FcpConnection c, AllData ad) { for (BinInfo value : entries.values()) { try { String u = (String) value.getID(); - processUri(u); + if (processUri(u)) { + foundChildren ++; + } + } catch (ClassCastException e) { System.out.println("Cannot process " + value.getID()); } @@ -409,7 +473,9 @@ public void receivedAllData(FcpConnection c, AllData ad) { Map subnodes = (Map) map2.get("subnodes"); for (String key : subnodes.keySet()) { - processUri(key); + if (processUri(key)) { + foundChildren ++; + } } return; } @@ -423,7 +489,9 @@ public void receivedAllData(FcpConnection c, AllData ad) { for (BinInfo value : entries.values()) { try { String u = (String) value.getID(); - processUri(u); + if (processUri(u)) { + foundChildren ++; + } } catch (ClassCastException e) { System.out.println("Cannot process " + value.getID()); } @@ -446,6 +514,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { System.exit(1); } } finally { + addFoundChildren(page.level, foundChildren); markDone(); System.out.println("receivedAllData for " + token + " done."); successful ++; @@ -455,7 +524,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { } } - @Override + @Override public void receivedGetFailed(FcpConnection c, GetFailed gf) { assert c == connection; assert gf != null; @@ -716,22 +785,23 @@ private void showProgress() { } String edition = root.getURI().substring(uri.length()); sb.append(edition); - int treeSize = root.getTreeSize(); int succeeded = root.getTreeSizeSucceeded(); int failed = root.getTreeSizeFailed(); if (failed > 0) { sb.append(new Formatter().format(" FAILED: %.2f%%.", 100.0 * failed / (failed + succeeded))); } - sb.append(new Formatter().format(" Fetched: %.2f%%.", 100.0 * (failed + succeeded) / treeSize)); + double estimate = getEstimatedPagesLeft(root); + if (estimate < Double.POSITIVE_INFINITY) { + sb.append(new Formatter().format(" Fetched: %.2f%%.", + 100.0 * (failed + succeeded) / (estimate + failed + succeeded))); + } sb.append(" ("); sb.append(succeeded); - sb.append("/"); - sb.append(treeSize); if (failed > 0) { - sb.append(" (and "); + sb.append(" and "); sb.append(failed); - sb.append(" failed)"); + sb.append(" failed"); } sb.append(")"); @@ -742,7 +812,7 @@ private void showProgress() { } public static void main(String[] argv) { - new DownloadAll(argv[1]).doit(); + new DownloadAll(argv[0]).doit(); } public void waitForSlot() { From b1d5a919452028c2e80e026892152a708667a562 Mon Sep 17 00:00:00 2001 From: anonymous Date: Wed, 3 Jun 2015 05:45:12 +0000 Subject: [PATCH 069/180] Added function to include the contents of several selected file, if small. --HG-- branch : eclipse-separation --- .../freenet/library/uploader/IndexPeeker.java | 8 + .../src/freenet/library/uploader/Merger.java | 162 +++++++++++++----- 2 files changed, 125 insertions(+), 45 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 6c0e810a..32811977 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -102,4 +102,12 @@ boolean include(String subj) { } return false; } + + void roomForOne() { + maxSections = activeSections.size() + 1; + } + + void roomForNone() { + maxSections = activeSections.size(); + } } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index c2afd976..978e4ac6 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -6,7 +6,8 @@ import java.io.File; import java.io.FileNotFoundException; -import java.io.FilenameFilter;import java.io.DataInputStream; +import java.io.FilenameFilter; +import java.io.DataInputStream; import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; @@ -48,8 +49,12 @@ *
                    Fetch the index top to get the top fan-out. *
                      Get the first term in the first and create an index with all * the contents from all the files with all terms from the same index. - * Rewrite all files with the rest of the terms. + *
                        While processing, group into some selected files. The rest goes + * into the filtered files. *
                          Merge that index. + *
                            If there are selected files, get the first term from the first one of them instead. + *
                              If there is room for more in the same go, get the next selected file. + *
                                Rewrite all not filtered files getting the matched terms. *
                                  Done. */ final public class Merger { @@ -59,6 +64,8 @@ final public class Merger { private static final String SELECTED = UploaderPaths.BASE_FILENAME_DATA + "selected."; private static final String FILTERED = UploaderPaths.BASE_FILENAME_DATA + "filtered."; private static final String PROCESSED = UploaderPaths.BASE_FILENAME_DATA + "processed."; + + private static int MIN_MOVED_TERMS = 40000; static final Comparator comparator = new StringNumberComparator(); @@ -203,19 +210,19 @@ public boolean accept(File arg0, String arg1) { private static void createMergeDirectory(File directory) throws TaskAbortException { - String[] selectedFilesToMerge = getMatchingFiles(directory, SELECTED); + final String[] selectedFilesToMerge = getMatchingFiles(directory, SELECTED); System.out.println("There is " + selectedFilesToMerge.length + " selected files."); - String [] filteredFilesToMerge = getMatchingFiles(directory, FILTERED); + final String [] filteredFilesToMerge = getMatchingFiles(directory, FILTERED); System.out.println("There is " + filteredFilesToMerge.length + " filtered files."); - String [] processedFilesToMerge = getMatchingFiles(directory, PROCESSED); + final String [] processedFilesToMerge = getMatchingFiles(directory, PROCESSED); System.out.println("There is " + processedFilesToMerge.length + " processed files."); - String[] newFilesToMerge = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); + final String[] newFilesToMerge = getMatchingFiles(directory, UploaderPaths.BASE_FILENAME_PUSH_DATA); System.out.println("There is " + newFilesToMerge.length + " new files."); - // Calculate the next number of filtered and processed files. + // Calculate the last number of filtered and processed files. int lastFoundNumber = 0; for (String filename : filteredFilesToMerge) { int numberFound = Integer.parseInt(filename.substring(FILTERED.length())); @@ -243,46 +250,106 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti Map writers = new HashMap(); - IndexPeeker peeker = new IndexPeeker(directory); + IndexPeeker creatorPeeker = new IndexPeeker(directory); Set toBeRemoved = new HashSet(); - List filesToMerge = new ArrayList(); - String restBase; - boolean createSelectedFiles = false; - if (selectedFilesToMerge.length > 0) { - if (processedFilesToMerge.length > 1 - && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { - createSelectedFiles = true; - for (int i = 0; i < selectedFilesToMerge.length; i++) { - filesToMerge.add(selectedFilesToMerge[i]); - } - for (int i = 0; i < filteredFilesToMerge.length; i++) { - filesToMerge.add(filteredFilesToMerge[i]); + + class ProcessedFilenames implements Iterator { + String restBase; + boolean createSelectedFiles = false; + boolean processingSelectedFile = false; + int movedTerms = 0; + private boolean doSelected = false; + private boolean doAllSelected = false; + private boolean doFiltered = false; + private boolean doProcessed = false; + private boolean doNew = true; + private int nextSelected = 0; + private int nextFiltered = 0; + private int nextProcessed = 0; + private int nextNew = 0; + + ProcessedFilenames() { + if (selectedFilesToMerge.length > 0) { + if (processedFilesToMerge.length > 1 + && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { + createSelectedFiles = true; + doAllSelected = true; + doFiltered = true; + restBase = FILTERED; + } else { + doSelected = true; + restBase = PROCESSED; + } + } else { + createSelectedFiles = true; + doFiltered = true; + restBase = FILTERED; } - restBase = FILTERED; - } else { - filesToMerge.add(selectedFilesToMerge[0]); - restBase = PROCESSED; + doProcessed = true; + doNew = true; + } + + @Override + public boolean hasNext() { + if (doAllSelected && nextSelected < selectedFilesToMerge.length) { + return true; + } + if (doSelected && + nextSelected < selectedFilesToMerge.length && + movedTerms < MIN_MOVED_TERMS) { + return true; + } + if (doFiltered && nextFiltered < filteredFilesToMerge.length) { + return true; + } + if (doProcessed && nextProcessed < processedFilesToMerge.length) { + return true; + } + if (doNew && nextNew < newFilesToMerge.length) { + return true; + } + return false; } - } else { - createSelectedFiles = true; - for (int i = 0; i < filteredFilesToMerge.length; i++) { - filesToMerge.add(filteredFilesToMerge[i]); - } - restBase = FILTERED; - } - for (int i = 0; i < processedFilesToMerge.length; i++) { - filesToMerge.add(processedFilesToMerge[i]); - } - for (int i = 0; i < newFilesToMerge.length; i++) { - filesToMerge.add(newFilesToMerge[i]); - } + + @Override + public String next() { + processingSelectedFile = false; + if (doAllSelected && nextSelected < selectedFilesToMerge.length) { + return selectedFilesToMerge[nextSelected++]; + } else if (doSelected && + nextSelected < selectedFilesToMerge.length && + movedTerms < MIN_MOVED_TERMS) { + processingSelectedFile = true; + System.out.println("So far " + movedTerms + " terms are moved."); + return selectedFilesToMerge[nextSelected++]; + } else if (doFiltered && nextFiltered < filteredFilesToMerge.length) { + return filteredFilesToMerge[nextFiltered++]; + } else if (doProcessed && nextProcessed < processedFilesToMerge.length) { + return processedFilesToMerge[nextProcessed++]; + } else if (doNew && nextNew < newFilesToMerge.length) { + return newFilesToMerge[nextNew++]; + } else { + throw new IllegalArgumentException("next() called after hasNext() returned false."); + } + } + + @Override + public void remove() { + throw new IllegalArgumentException("Not implemented"); + } + }; + final ProcessedFilenames processedFilenames = new ProcessedFilenames(); TermEntryFileWriter notMerged = null; int totalTerms = 0; - int movedTerms = 0; - for (String s : filesToMerge) { + for (String s : new Iterable() { + @Override + public Iterator iterator() { + return processedFilenames; + } + }) { System.out.println("File: " + s); File file = new File(s); FileInputStream fileInputStream; @@ -297,13 +364,18 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti while (iterator.hasNext()) { TermEntry tt = iterator.next(); totalTerms ++; - if (peeker.include(tt.subj)) { + if (processedFilenames.processingSelectedFile) { + creatorPeeker.roomForOne(); + } else { + creatorPeeker.roomForNone(); + } + if (creatorPeeker.include(tt.subj)) { creator.putEntry(tt); - movedTerms ++; + processedFilenames.movedTerms ++; continue; } - if (createSelectedFiles) { + if (processedFilenames.createSelectedFiles) { // They are all to be sorted. boolean found = false; for (Map.Entry entry : writers.entrySet()) { @@ -330,7 +402,7 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti } if (notMerged == null) { lastFoundNumber ++; - String restFilename = restBase + lastFoundNumber; + String restFilename = processedFilenames.restBase + lastFoundNumber; notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, restFilename)); } notMerged.write(tt); @@ -350,9 +422,9 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti System.out.println("Removing file " + file); file.delete(); } - double percentage = new Double(movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; + double percentage = new Double(processedFilenames.movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; System.out.format("Processed %d/%d terms (%.2f%%).%n", - movedTerms, + processedFilenames.movedTerms, totalTerms, percentage); } From 0aa5f83476ef7766212b8654f73fbb6d961d177d Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 6 Jun 2015 16:48:01 +0000 Subject: [PATCH 070/180] Adjustments in the calculation of how many selected to include. --HG-- branch : eclipse-separation --- .../library/uploader/DirectoryCreator.java | 6 +- .../src/freenet/library/uploader/Merger.java | 618 +++++++++--------- 2 files changed, 315 insertions(+), 309 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java index 12f1f121..cf11f2b2 100644 --- a/uploader/src/freenet/library/uploader/DirectoryCreator.java +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -78,11 +78,13 @@ public void putEntry(TermEntry tt) throws TaskAbortException { leafsrlDisk.setSerialiserFor(tree); } tree.add(tt); - // tree.deflate(); - // assert(tree.isBare()); idxDisk.ttab.put(tt.subj, tree); countTerms++; } + + public int size() { + return idxDisk.ttab.size(); + } public void done() throws TaskAbortException { for (Entry> entry : idxDisk.ttab.entrySet()) { diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 978e4ac6..44d389c8 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -10,22 +10,18 @@ import java.io.DataInputStream; import java.io.FileInputStream; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TreeMap; import net.pterodactylus.fcp.FcpConnection; import freenet.library.FactoryRegister; import freenet.library.index.TermEntry; -import freenet.library.util.BTreeMap.PairIterable; import freenet.library.util.exec.TaskAbortException; /** @@ -58,91 +54,89 @@ *
                                    Done. */ final public class Merger { - - private static FcpSession session; + + private static FcpSession session; - private static final String SELECTED = UploaderPaths.BASE_FILENAME_DATA + "selected."; - private static final String FILTERED = UploaderPaths.BASE_FILENAME_DATA + "filtered."; - private static final String PROCESSED = UploaderPaths.BASE_FILENAME_DATA + "processed."; - - private static int MIN_MOVED_TERMS = 40000; - - static final Comparator comparator = new StringNumberComparator(); - - static class StringNumberComparator implements Comparator { - @Override - public int compare(String a, String b) { - int ai; - int bi; - for (ai = 0, bi = 0; ai < a.length() && bi < b.length(); ai++, bi++) { - if (a.substring(ai, ai + 1).matches("[0-9]") - && a.substring(bi, bi + 1).matches("[0-9]")) { - int aii; - for (aii = ai + 1; aii < a.length(); aii++) { - if (!a.substring(aii, aii + 1).matches("[0-9]")) { - break; - } - } - int bii; - for (bii = bi + 1; bii < b.length(); bii++) { - if (!b.substring(bii, bii + 1).matches("[0-9]")) { - break; - } - } - try { - int ret = Integer.valueOf(a.substring(ai, aii)).compareTo( - Integer.valueOf(b.substring(bi, bii))); - if (ret != 0) { - return ret; - } - - ai = aii - 1; - bi = bii - 1; - continue; - } catch (NumberFormatException e) { - continue; - } - } - int ret = a.charAt(ai) - b.charAt(bi); - if (ret != 0) { - return ret; - } - } - if (ai < a.length()) { - return 1; - } - if (bi < b.length()) { - return -1; - } - return 0; - } - } + private static final String SELECTED = UploaderPaths.BASE_FILENAME_DATA + "selected."; + private static final String FILTERED = UploaderPaths.BASE_FILENAME_DATA + "filtered."; + private static final String PROCESSED = UploaderPaths.BASE_FILENAME_DATA + "processed."; + + static final Comparator comparator = new StringNumberComparator(); + + static class StringNumberComparator implements Comparator { + @Override + public int compare(String a, String b) { + int ai; + int bi; + for (ai = 0, bi = 0; ai < a.length() && bi < b.length(); ai++, bi++) { + if (a.substring(ai, ai + 1).matches("[0-9]") + && a.substring(bi, bi + 1).matches("[0-9]")) { + int aii; + for (aii = ai + 1; aii < a.length(); aii++) { + if (!a.substring(aii, aii + 1).matches("[0-9]")) { + break; + } + } + int bii; + for (bii = bi + 1; bii < b.length(); bii++) { + if (!b.substring(bii, bii + 1).matches("[0-9]")) { + break; + } + } + try { + int ret = Integer.valueOf(a.substring(ai, aii)).compareTo( + Integer.valueOf(b.substring(bi, bii))); + if (ret != 0) { + return ret; + } + + ai = aii - 1; + bi = bii - 1; + continue; + } catch (NumberFormatException e) { + continue; + } + } + int ret = a.charAt(ai) - b.charAt(bi); + if (ret != 0) { + return ret; + } + } + if (ai < a.length()) { + return 1; + } + if (bi < b.length()) { + return -1; + } + return 0; + } + } - /** - * Return an array with the filenames in order. - */ - static String[] getMatchingFiles(File directory, - final String baseFilename) { - String[] array = directory.list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if (!(arg1.toLowerCase().startsWith(baseFilename))) { - return false; - } - File f = new File(arg0, arg1); - if (!f.isFile()) { - return false; - } - if (f.length() == 0) { - f.delete(); - return false; - } - return true; - } - }); - Arrays.sort(array, comparator); - return array; - } + /** + * Return an array with the filenames in order. + */ + static String[] getMatchingFiles(File directory, + final String baseFilename) { + String[] array = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if (!(arg1.toLowerCase().startsWith(baseFilename))) { + return false; + } + File f = new File(arg0, arg1); + if (!f.isFile()) { + return false; + } + if (f.length() == 0) { + f.delete(); + return false; + } + return true; + } + }); + Arrays.sort(array, comparator); + return array; + } public static void main(String[] argv) { @@ -158,59 +152,59 @@ public static void main(String[] argv) { FcpConnection connection = null; try { - String[] dirsToMerge = null; - File directory = new File("."); - for (String arg : argv) { - if (new File(directory, arg).isDirectory()) { - dirsToMerge = new String[1]; - dirsToMerge[0] = arg; - } else { - System.out.println("No such directory " + arg); - } - break; - } - if (dirsToMerge == null) { - dirsToMerge = directory.list(new FilenameFilter() { - - public boolean accept(File arg0, String arg1) { - if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; - return true; - } - - }); - } + String[] dirsToMerge = null; + File directory = new File("."); + for (String arg : argv) { + if (new File(directory, arg).isDirectory()) { + dirsToMerge = new String[1]; + dirsToMerge[0] = arg; + } else { + System.out.println("No such directory " + arg); + } + break; + } + if (dirsToMerge == null) { + dirsToMerge = directory.list(new FilenameFilter() { + + public boolean accept(File arg0, String arg1) { + if(!(arg1.toLowerCase().startsWith(UploaderPaths.DISK_DIR_PREFIX))) return false; + return true; + } + + }); + } - if (dirsToMerge.length > 0) { - System.out.println("Merging directory " + dirsToMerge[0]); - session = new FcpSession(); - connection = session.getConnection(); - UploaderLibrary.init(connection); - FactoryRegister.register(UploaderLibrary.getInstance()); - - File directoryToMerge = new File(directory, dirsToMerge[0]); - new DirectoryUploader(connection, directoryToMerge).run(); - System.out.println("Upload completed."); - return; + if (dirsToMerge.length > 0) { + System.out.println("Merging directory " + dirsToMerge[0]); + session = new FcpSession(); + connection = session.getConnection(); + UploaderLibrary.init(connection); + FactoryRegister.register(UploaderLibrary.getInstance()); + + File directoryToMerge = new File(directory, dirsToMerge[0]); + new DirectoryUploader(connection, directoryToMerge).run(); + System.out.println("Upload completed."); + return; } - createMergeDirectory(directory); + createMergeDirectory(directory); } catch (TaskAbortException | IllegalStateException | IOException e) { - e.printStackTrace(); - exitStatus = 1; - } finally { - if (session != null) { - session.close(); - if (exitStatus == 0) { - exitStatus = session.getStatus(); - } - } + e.printStackTrace(); + exitStatus = 1; + } finally { + if (session != null) { + session.close(); + if (exitStatus == 0) { + exitStatus = session.getStatus(); + } + } } System.exit(exitStatus); } - private static void createMergeDirectory(File directory) throws TaskAbortException { - final String[] selectedFilesToMerge = getMatchingFiles(directory, SELECTED); + private static void createMergeDirectory(File directory) throws TaskAbortException { + final String[] selectedFilesToMerge = getMatchingFiles(directory, SELECTED); System.out.println("There is " + selectedFilesToMerge.length + " selected files."); final String [] filteredFilesToMerge = getMatchingFiles(directory, FILTERED); @@ -224,208 +218,218 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti // Calculate the last number of filtered and processed files. int lastFoundNumber = 0; - for (String filename : filteredFilesToMerge) { - int numberFound = Integer.parseInt(filename.substring(FILTERED.length())); - if (numberFound > lastFoundNumber) { - lastFoundNumber = numberFound; - } + for (String filename : filteredFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(FILTERED.length())); + if (numberFound > lastFoundNumber) { + lastFoundNumber = numberFound; + } } - for (String filename : processedFilesToMerge) { - int numberFound = Integer.parseInt(filename.substring(PROCESSED.length())); - if (numberFound > lastFoundNumber) { - lastFoundNumber = numberFound; - } + for (String filename : processedFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(PROCESSED.length())); + if (numberFound > lastFoundNumber) { + lastFoundNumber = numberFound; + } } System.out.println("Last found: " + lastFoundNumber); int lastSelected = 0; - for (String filename : selectedFilesToMerge) { - int numberFound = Integer.parseInt(filename.substring(SELECTED.length())); - if (numberFound > lastSelected) { - lastSelected = numberFound; - } + for (String filename : selectedFilesToMerge) { + int numberFound = Integer.parseInt(filename.substring(SELECTED.length())); + if (numberFound > lastSelected) { + lastSelected = numberFound; + } } - DirectoryCreator creator = new DirectoryCreator(directory); + final DirectoryCreator creator = new DirectoryCreator(directory); Map writers = - new HashMap(); + new HashMap(); IndexPeeker creatorPeeker = new IndexPeeker(directory); - Set toBeRemoved = new HashSet(); - - class ProcessedFilenames implements Iterator { - String restBase; - boolean createSelectedFiles = false; - boolean processingSelectedFile = false; - int movedTerms = 0; - private boolean doSelected = false; - private boolean doAllSelected = false; - private boolean doFiltered = false; - private boolean doProcessed = false; - private boolean doNew = true; - private int nextSelected = 0; - private int nextFiltered = 0; - private int nextProcessed = 0; - private int nextNew = 0; + Set toBeRemoved = new HashSet(); + + class ProcessedFilenames implements Iterator { + String restBase; + boolean createSelectedFiles = false; + boolean processingSelectedFile = false; + int movedTerms = 0; + private boolean doSelected = false; + private boolean doAllSelected = false; + private boolean doFiltered = false; + private boolean doProcessed = false; + private boolean doNew = true; + private int nextSelected = 0; + private int nextFiltered = 0; + private int nextProcessed = 0; + private int nextNew = 0; + + ProcessedFilenames() { + if (selectedFilesToMerge.length > 0) { + if (processedFilesToMerge.length > 1 + && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { + createSelectedFiles = true; + doAllSelected = true; + doFiltered = true; + restBase = FILTERED; + } else { + doSelected = true; + restBase = PROCESSED; + } + } else { + createSelectedFiles = true; + doFiltered = true; + restBase = FILTERED; + } + doProcessed = true; + doNew = true; + } + + private boolean addAnotherSelectedFile() { + return nextSelected < 20 && + creator.size() < 10000 && + movedTerms < 200000 && + nextSelected * 8 + movedTerms * 0.001 + creator.size() * 0.0001 - 6 < 100; + } - ProcessedFilenames() { - if (selectedFilesToMerge.length > 0) { - if (processedFilesToMerge.length > 1 - && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { - createSelectedFiles = true; - doAllSelected = true; - doFiltered = true; - restBase = FILTERED; - } else { - doSelected = true; - restBase = PROCESSED; - } - } else { - createSelectedFiles = true; - doFiltered = true; - restBase = FILTERED; - } - doProcessed = true; - doNew = true; - } - - @Override - public boolean hasNext() { - if (doAllSelected && nextSelected < selectedFilesToMerge.length) { - return true; - } - if (doSelected && - nextSelected < selectedFilesToMerge.length && - movedTerms < MIN_MOVED_TERMS) { - return true; - } - if (doFiltered && nextFiltered < filteredFilesToMerge.length) { - return true; - } - if (doProcessed && nextProcessed < processedFilesToMerge.length) { - return true; - } - if (doNew && nextNew < newFilesToMerge.length) { - return true; - } - return false; - } + @Override + public boolean hasNext() { + if (doAllSelected && nextSelected < selectedFilesToMerge.length) { + return true; + } + if (doSelected && + nextSelected < selectedFilesToMerge.length && + addAnotherSelectedFile()) { + return true; + } + if (doFiltered && nextFiltered < filteredFilesToMerge.length) { + return true; + } + if (doProcessed && nextProcessed < processedFilesToMerge.length) { + return true; + } + if (doNew && nextNew < newFilesToMerge.length) { + return true; + } + return false; + } - @Override - public String next() { - processingSelectedFile = false; - if (doAllSelected && nextSelected < selectedFilesToMerge.length) { - return selectedFilesToMerge[nextSelected++]; - } else if (doSelected && - nextSelected < selectedFilesToMerge.length && - movedTerms < MIN_MOVED_TERMS) { - processingSelectedFile = true; - System.out.println("So far " + movedTerms + " terms are moved."); - return selectedFilesToMerge[nextSelected++]; - } else if (doFiltered && nextFiltered < filteredFilesToMerge.length) { - return filteredFilesToMerge[nextFiltered++]; - } else if (doProcessed && nextProcessed < processedFilesToMerge.length) { - return processedFilesToMerge[nextProcessed++]; - } else if (doNew && nextNew < newFilesToMerge.length) { - return newFilesToMerge[nextNew++]; - } else { - throw new IllegalArgumentException("next() called after hasNext() returned false."); - } - } + @Override + public String next() { + processingSelectedFile = false; + if (doSelected && + nextSelected < selectedFilesToMerge.length && + addAnotherSelectedFile()) { + processingSelectedFile = true; + return selectedFilesToMerge[nextSelected++]; + } else if (doAllSelected && nextSelected < selectedFilesToMerge.length) { + return selectedFilesToMerge[nextSelected++]; + } else if (doFiltered && nextFiltered < filteredFilesToMerge.length) { + return filteredFilesToMerge[nextFiltered++]; + } else if (doProcessed && nextProcessed < processedFilesToMerge.length) { + return processedFilesToMerge[nextProcessed++]; + } else if (doNew && nextNew < newFilesToMerge.length) { + return newFilesToMerge[nextNew++]; + } else { + throw new IllegalArgumentException("next() called after hasNext() returned false."); + } + } - @Override - public void remove() { - throw new IllegalArgumentException("Not implemented"); - } - }; - final ProcessedFilenames processedFilenames = new ProcessedFilenames(); + @Override + public void remove() { + throw new IllegalArgumentException("Not implemented"); + } + }; + final ProcessedFilenames processedFilenames = new ProcessedFilenames(); TermEntryFileWriter notMerged = null; int totalTerms = 0; for (String s : new Iterable() { - @Override - public Iterator iterator() { - return processedFilenames; - } + @Override + public Iterator iterator() { + return processedFilenames; + } }) { System.out.println("File: " + s); - File file = new File(s); - FileInputStream fileInputStream; - try { - fileInputStream = new FileInputStream(file); - } catch (FileNotFoundException e) { - e.printStackTrace(); - return; - } - TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); - Iterator iterator = teri.iterator(); - while (iterator.hasNext()) { - TermEntry tt = iterator.next(); - totalTerms ++; - if (processedFilenames.processingSelectedFile) { - creatorPeeker.roomForOne(); - } else { - creatorPeeker.roomForNone(); - } - if (creatorPeeker.include(tt.subj)) { - creator.putEntry(tt); - processedFilenames.movedTerms ++; - continue; - } - - if (processedFilenames.createSelectedFiles) { - // They are all to be sorted. - boolean found = false; - for (Map.Entry entry : writers.entrySet()) { - if (entry.getKey().include(tt.subj)) { - entry.getValue().write(tt); - found = true; - break; - } - } - if (found) { - continue; - } else if (writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { - lastSelected ++; - String selectedFilename = SELECTED + lastSelected; - IndexPeeker p = new IndexPeeker(directory); - TermEntryFileWriter t = new TermEntryFileWriter(teri.getHeader(), - new File(directory, selectedFilename)); - if (p.include(tt.subj)) { - writers.put(p, t); - t.write(tt); - } - continue; - } - } - if (notMerged == null) { - lastFoundNumber ++; - String restFilename = processedFilenames.restBase + lastFoundNumber; - notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, restFilename)); - } - notMerged.write(tt); - if (notMerged.isFull()) { - notMerged.close(); - notMerged = null; - } - } - toBeRemoved.add(file); + File file = new File(s); + FileInputStream fileInputStream; + try { + fileInputStream = new FileInputStream(file); + } catch (FileNotFoundException e) { + e.printStackTrace(); + return; + } + TermEntryReaderIterator teri = new TermEntryReaderIterator(new DataInputStream(fileInputStream)); + Iterator iterator = teri.iterator(); + while (iterator.hasNext()) { + TermEntry tt = iterator.next(); + totalTerms ++; + if (processedFilenames.processingSelectedFile) { + creatorPeeker.roomForOne(); + } else { + creatorPeeker.roomForNone(); + } + if (creatorPeeker.include(tt.subj)) { + creator.putEntry(tt); + processedFilenames.movedTerms ++; + continue; + } + + if (processedFilenames.createSelectedFiles) { + // They are all to be sorted. + boolean found = false; + for (Map.Entry entry : writers.entrySet()) { + if (entry.getKey().include(tt.subj)) { + entry.getValue().write(tt); + found = true; + break; + } + } + if (found) { + continue; + } else if (writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { + lastSelected ++; + String selectedFilename = SELECTED + lastSelected; + IndexPeeker p = new IndexPeeker(directory); + TermEntryFileWriter t = new TermEntryFileWriter(teri.getHeader(), + new File(directory, selectedFilename)); + if (p.include(tt.subj)) { + writers.put(p, t); + t.write(tt); + } + continue; + } + } + if (notMerged == null) { + lastFoundNumber ++; + String restFilename = processedFilenames.restBase + lastFoundNumber; + notMerged = new TermEntryFileWriter(teri.getHeader(), new File(directory, restFilename)); + } + notMerged.write(tt); + if (notMerged.isFull()) { + notMerged.close(); + notMerged = null; + } + } + if (processedFilenames.processingSelectedFile) { + System.out.println("Items: " + processedFilenames.movedTerms + + " Entries: " + creator.size()); + } + toBeRemoved.add(file); } if (notMerged != null) { - notMerged.close(); - notMerged = null; + notMerged.close(); + notMerged = null; } - creator.done(); + creator.done(); for (File file : toBeRemoved) { - System.out.println("Removing file " + file); - file.delete(); + System.out.println("Removing file " + file); + file.delete(); } double percentage = new Double(processedFilenames.movedTerms).doubleValue() / new Double(totalTerms).doubleValue() * 100.0; System.out.format("Processed %d/%d terms (%.2f%%).%n", - processedFilenames.movedTerms, - totalTerms, - percentage); - } + processedFilenames.movedTerms, + totalTerms, + percentage); + } } From c9441df72f5a221a611180f9c4e8b53a52f16f5e Mon Sep 17 00:00:00 2001 From: anonymous Date: Sat, 6 Jun 2015 16:49:32 +0000 Subject: [PATCH 071/180] Calculate how many uploads that are resurrected. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/DownloadAll.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 33c788d8..ea5eca85 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -64,6 +64,7 @@ public class DownloadAll { private int successfulBlocks = 0; private long successfulBytes = 0; private int failed = 0; + private int recreated = 0; private int avoidFetching = 0; private Random rand = new Random(); @@ -223,13 +224,6 @@ String getURI() { return uri; } - String getPath() { - for (FetchedPage parent : parents) { - return parent.getPath() + "->" + uri; - } - return uri; - } - boolean hasParent() { return !parents.isEmpty(); } @@ -543,6 +537,7 @@ public void receivedGetFailed(FcpConnection c, GetFailed gf) { upload(page.getURI(), new Runnable() { public void run() { objectQueue.offer(page); + recreated ++; } }); } @@ -775,6 +770,7 @@ private void showProgress() { " blocks: " + successfulBlocks + " bytes: " + successfulBytes + " Failed: " + failed + + " Recreated: " + recreated + " Avoided: " + avoidFetching + "."); StringBuilder sb = new StringBuilder(); @@ -793,7 +789,7 @@ private void showProgress() { double estimate = getEstimatedPagesLeft(root); if (estimate < Double.POSITIVE_INFINITY) { sb.append(new Formatter().format(" Fetched: %.2f%%.", - 100.0 * (failed + succeeded) / (estimate + failed + succeeded))); + 100.0 * (failed + succeeded) / (estimate + succeeded))); } sb.append(" ("); sb.append(succeeded); From d349b19412aad505fb334de48b38f4ec36081e8c Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 8 Jun 2015 05:23:59 +0000 Subject: [PATCH 072/180] The new known good getter changed name in FCPlib. Updated the log messages to make it cleaner. --HG-- branch : eclipse-separation --- .../src/freenet/library/uploader/DownloadAll.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index ea5eca85..2303f53c 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -299,7 +299,8 @@ public USKUpdateAdapter(Object s) { @Override public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { assert fcpConnection == connection; - if (subscribedUSKUpdate.getNewKnownGood()) { + if (subscribedUSKUpdate.isNewKnownGood() && + subscribedUSKUpdate.getEdition() > edition) { updated = true; newUri = subscribedUSKUpdate.getURI(); edition = subscribedUSKUpdate.getEdition(); @@ -413,7 +414,6 @@ void printLeft() { System.out.println("Outstanding " + stillRunning.size() + " ClientGet jobs " + "(" + completed + "/" + required + "/" + total + ") "); } - showProgress(); } private boolean processUri(String uri) { @@ -510,7 +510,6 @@ public void receivedAllData(FcpConnection c, AllData ad) { } finally { addFoundChildren(page.level, foundChildren); markDone(); - System.out.println("receivedAllData for " + token + " done."); successful ++; successfulBlocks += progressCompleted; successfulBytes += ad.getDataLength(); @@ -528,7 +527,7 @@ public void receivedGetFailed(FcpConnection c, GetFailed gf) { synchronized (getter) { getter.notify(); } - System.out.println("receivedGetFailed for " + token + " (" + page + ")."); + System.out.println("receivedGetFailed for " + token + " (" + page.getURI() + ")."); // System.exit(1); page.didFail(); markDone(); @@ -593,6 +592,7 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) } uploadStarter.execute(new Runnable() { public void run() { + System.out.println("Ressurrecting " + filename); uploadCounter++; final String identifier = "Upload" + uploadCounter; ongoingUploads.put(identifier, new AbstractMap.SimpleImmutableEntry(filename, callback)); @@ -766,11 +766,15 @@ public void doit() { private void showProgress() { + String recreatedMessage = ""; + if (recreated > 0) { + recreatedMessage = " Recreated: " + recreated; + } System.out.println("Fetches: Successful: " + successful + " blocks: " + successfulBlocks + " bytes: " + successfulBytes + " Failed: " + failed + - " Recreated: " + recreated + + recreatedMessage + " Avoided: " + avoidFetching + "."); StringBuilder sb = new StringBuilder(); From e7c183e27338c5b95c77658811981f70aa41df9e Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 8 Jun 2015 05:54:38 +0000 Subject: [PATCH 073/180] Attempted to tune to a better approximation of the size of an upload. --HG-- branch : eclipse-separation --- uploader/src/freenet/library/uploader/Merger.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 44d389c8..2dbd8c0d 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -288,7 +288,7 @@ private boolean addAnotherSelectedFile() { return nextSelected < 20 && creator.size() < 10000 && movedTerms < 200000 && - nextSelected * 8 + movedTerms * 0.001 + creator.size() * 0.0001 - 6 < 100; + nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 100; } @Override From 1efc22ac72217a9c1c6b5ceb13d7a204fda730f9 Mon Sep 17 00:00:00 2001 From: anonymous Date: Mon, 8 Jun 2015 05:56:51 +0000 Subject: [PATCH 074/180] Changed the eclipse project names and location of the fcp lib. --HG-- branch : eclipse-separation --- .classpath | 2 +- build.xml | 3 +-- shared/.classpath | 2 +- uploader/.classpath | 4 ++-- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.classpath b/.classpath index aa1f13c2..24982e93 100644 --- a/.classpath +++ b/.classpath @@ -5,6 +5,6 @@ - + diff --git a/build.xml b/build.xml index 3435160f..193c3a99 100644 --- a/build.xml +++ b/build.xml @@ -4,7 +4,7 @@ - + @@ -264,7 +264,6 @@ - diff --git a/shared/.classpath b/shared/.classpath index 056e19e4..76c9471b 100644 --- a/shared/.classpath +++ b/shared/.classpath @@ -3,8 +3,8 @@ - + diff --git a/uploader/.classpath b/uploader/.classpath index 4198d1ae..7531b275 100644 --- a/uploader/.classpath +++ b/uploader/.classpath @@ -3,8 +3,8 @@ - - + + From 7988d48318cf8363ccf3fa2800c21305b29f5a12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 11 Jun 2015 23:44:17 +0200 Subject: [PATCH 075/180] Updated the Eclipse project name. Ignore more eclipse directories. --- .project | 2 +- shared/.gitignore | 1 + uploader/.gitignore | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 shared/.gitignore create mode 100644 uploader/.gitignore diff --git a/.project b/.project index b40f21ce..1c45eab8 100644 --- a/.project +++ b/.project @@ -1,6 +1,6 @@ - Library + library-plugin diff --git a/shared/.gitignore b/shared/.gitignore new file mode 100644 index 00000000..5e56e040 --- /dev/null +++ b/shared/.gitignore @@ -0,0 +1 @@ +/bin diff --git a/uploader/.gitignore b/uploader/.gitignore new file mode 100644 index 00000000..5e56e040 --- /dev/null +++ b/uploader/.gitignore @@ -0,0 +1 @@ +/bin From 30f9bef970c6dda3c298559d67047b60a95d5c97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 12 Jun 2015 01:08:34 +0200 Subject: [PATCH 076/180] Slow down the build up of the queue of uploads. --- .../freenet/library/uploader/FcpArchiver.java | 34 +++++-------------- 1 file changed, 8 insertions(+), 26 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index de74080a..3ddae28c 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -14,11 +14,9 @@ import net.pterodactylus.fcp.ClientPut; import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.FinishedCompression; import net.pterodactylus.fcp.PutFailed; import net.pterodactylus.fcp.PutFetchable; import net.pterodactylus.fcp.PutSuccessful; -import net.pterodactylus.fcp.StartedCompression; import net.pterodactylus.fcp.URIGenerated; import net.pterodactylus.fcp.Verbosity; import freenet.copied.Base64; @@ -37,8 +35,7 @@ public class FcpArchiver private File cacheDir; private ObjectStreamReader reader; private ObjectStreamWriter writer; - private String mimeType; - private int size; + private int totalBlocksStillUploading = 0; private Priority priorityLevel; /** @@ -59,8 +56,6 @@ public FcpArchiver(FcpConnection fcpConnection, cacheDir = directory; reader = rw; writer = rw; - mimeType = mime; - size = s; priorityLevel = pl; } @@ -162,6 +157,7 @@ void printLeft() { total += entry.getValue().progressTotal; completed += entry.getValue().progressCompleted; } + totalBlocksStillUploading = total - completed; System.out.println("Outstanding " + stillRunning.size() + " jobs " + "(" + completed + "/" + total + ")"); } @@ -221,26 +217,6 @@ public void receivedSimpleProgress(FcpConnection c, sp.getSucceeded() + "/" + sp.getTotal()); printLeft(); } - - @Override - public void receivedStartedCompression(FcpConnection c, - StartedCompression startedCompression) { - assert c == connection; - assert startedCompression != null; - if (!identifier.equals(startedCompression.getIdentifier())) - return; - System.out.println("receivedStartedCompression for " + token); - } - - @Override - public void receviedFinishedCompression(FcpConnection c, - FinishedCompression finishedCompression) { - assert c == connection; - assert finishedCompression != null; - if (!identifier.equals(finishedCompression.getIdentifier())) - return; - System.out.println("receivedFinishedCompression for " + token); - } public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert c == connection; @@ -290,6 +266,12 @@ String getURI() { @Override public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, SimpleProgress progress) throws TaskAbortException { + // Slow down the build up of the queue. + try { + Thread.sleep(1 + totalBlocksStillUploading * totalBlocksStillUploading); + } catch (InterruptedException e1) { + throw new RuntimeException("Unexpected interrupt"); + } if (connection == null) { throw new IllegalArgumentException("No connection."); } From e5904b290250dd8c44e27043fdeebcfbc4ee729c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 12 Jun 2015 08:16:53 +0200 Subject: [PATCH 077/180] Tuned the size of the uploads down a bit. --- uploader/src/freenet/library/uploader/Merger.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 2dbd8c0d..039f5fa6 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -288,7 +288,7 @@ private boolean addAnotherSelectedFile() { return nextSelected < 20 && creator.size() < 10000 && movedTerms < 200000 && - nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 100; + nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 90; } @Override From 7f1c88e782da6d62c49f3e2b27a7544b0f744d7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 8 Jul 2015 12:26:28 +0200 Subject: [PATCH 078/180] Reduce ant warnings. --- build.xml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/build.xml b/build.xml index 193c3a99..dbc7fb16 100644 --- a/build.xml +++ b/build.xml @@ -122,7 +122,8 @@ - + @@ -130,11 +131,13 @@ - + - + @@ -148,7 +151,8 @@ - + @@ -168,7 +172,8 @@ - + From 0bda01ebac0fe53804d12c95dd8397e0fdf80321 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 9 Jul 2015 12:51:04 +0200 Subject: [PATCH 079/180] Moved the index selection to a separate web page. --- src/plugins/Library/Library.java | 6 + src/plugins/Library/ui/ConfigPageToadlet.java | 319 ++++++++++++++++++ src/plugins/Library/ui/MainPage.java | 33 +- src/plugins/Library/ui/WebInterface.java | 9 + 4 files changed, 345 insertions(+), 22 deletions(-) create mode 100644 src/plugins/Library/ui/ConfigPageToadlet.java diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 6cd79439..f318c4ab 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -13,6 +13,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -74,6 +75,8 @@ final public class Library implements URLUpdateHook, ArchiverFactory { private static int version = 36; public static final String plugName = "Library " + getVersion(); + + public static String getPlugName() { return plugName; } @@ -255,6 +258,9 @@ public synchronized void saveState(){ private Map bookmarkCallbacks = new HashMap(); + /** Set of all the enabled indices */ + public Set selectedIndices = new HashSet(); + /** ** Get the index type giving a {@code FreenetURI}. This must not contain ** a metastring (end with "/") or be a USK. diff --git a/src/plugins/Library/ui/ConfigPageToadlet.java b/src/plugins/Library/ui/ConfigPageToadlet.java new file mode 100644 index 00000000..745af0ef --- /dev/null +++ b/src/plugins/Library/ui/ConfigPageToadlet.java @@ -0,0 +1,319 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ +package plugins.Library.ui; + +import plugins.Library.Library; + +import freenet.client.HighLevelSimpleClient; +import freenet.clients.http.PageNode; +import freenet.clients.http.RedirectException; +import freenet.clients.http.Toadlet; +import freenet.clients.http.ToadletContext; +import freenet.clients.http.ToadletContextClosedException; +import freenet.keys.FreenetURI; +import freenet.node.NodeClientCore; +import freenet.pluginmanager.PluginRespirator; +import freenet.support.HTMLNode; +import freenet.support.MultiValueTable; +import freenet.support.api.HTTPRequest; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; + +/** + * Encapsulates the ConfigPage in a Toadlet + * @author Debora Wöpcke + */ +public class ConfigPageToadlet extends Toadlet { + static final String PATH = "/config/library/"; + private NodeClientCore core; + private final Library library; + private final PluginRespirator pr; + + public ConfigPageToadlet(HighLevelSimpleClient client, + Library library, + NodeClientCore core, + PluginRespirator pr) { + super(client); + this.core = core; + this.library = library; + this.pr = pr; + } + + @Override + public String path() { + return PATH; + } + + public String menu() { + return "FProxyToadlet.categoryBrowsing"; + } + + /** post commands */ + private static enum Commands { + /** saves selected bookmarks */ + select, + + /** adds a new index to the bookmarks in Library */ + addbookmark, + + /** deletes a bookmark from the Library, requires an integer parameter between 0 and the number of bookmarks */ + removebookmark + } + + + /** + * Class containing errors to be shown in the page. + */ + private class ConfigPageError { + String message; + + // Key error + boolean keyError; + String key; + String uri; + + /** + * Constructor for key error. + */ + ConfigPageError(String m, String k, String u) { + keyError = true; + message = m; + key = k; + uri = u; + } + } + + /** + * @param ctx + */ + private void configForm(final ToadletContext ctx, + final ConfigPageError pageError) { + PageNode p = ctx.getPageMaker().getPageNode("Configure indices (" + + Library.plugName + + ")", + ctx); + p.headNode.addChild("link", + new String[]{"rel", "href", "type"}, + new String[]{"stylesheet", + path() + "static/style.css", + "text/css"}); + + HTMLNode pageNode = p.outer; + HTMLNode contentNode = p.content; + + HTMLNode searchForm = pr.addFormChild(contentNode, path(), "searchform"); + MultiValueTable headers = new MultiValueTable(); + HTMLNode indexeslist = searchForm.addChild("ul", "class", + "index-bookmark-list", + "Select indexes"); + for (String bm : library.bookmarkKeys()) { + HTMLNode bmItem = indexeslist.addChild("li"); + bmItem.addChild("input", + new String[]{"name", + "type", + "value", + "title", + (library.selectedIndices.contains(bm) ? + "checked" : + "size" ) + }, + new String[]{"~"+bm, + "checkbox", + bm, + "Index uri : "+library.getBookmark(bm), + "1" }, + bm); + bmItem.addChild("input", + new String[]{"name", + "type", + "value", + "title", + "class" + }, + new String[]{Commands.removebookmark+bm, + "submit", + "X", + "Delete this bookmark", + "index-bookmark-delete" + }); + String bookmark = library.getBookmark(bm); + if (bookmark != null) { + try { + FreenetURI uri = new FreenetURI(bookmark); + if (uri.isUSK()) { + bmItem.addChild("#", "(" + uri.getEdition() + ")"); + } + } catch (MalformedURLException e) { + // Don't add index. + } + } + } + + indexeslist.addChild("li").addChild("input", + new String[]{"name", + "type", + "value", + "title", + "class" + }, + new String[]{Commands.select.toString(), + "submit", + "Save", + "Save selected indices", + "index-bookmark-select" + }); + + + HTMLNode bmItem = indexeslist.addChild("li"); + if (pageError != null && pageError.keyError) { + bmItem.addChild("div", + new String[]{"class"}, + new String[]{"index"}, + pageError.message); + } + + bmItem.addChild("div", + new String[]{"class"}, + new String[]{"index"}, + "Token:"); + + String keyValue = ""; + if (pageError != null && pageError.key != null) { + keyValue = pageError.key; + } + bmItem.addChild("input", + new String[]{"name", + "type", + "class", + "title", + "value", + "size", + "maxsize" + }, + new String[]{"addindexname", + "text", + "index", + "Token of the index", + keyValue, + "32", + "32" + }); + String uriValue = ""; + if (pageError != null && pageError.uri != null) { + uriValue = pageError.uri; + } + bmItem.addChild("div", + new String[]{"class"}, + new String[]{"index"}, + "Key:"); + bmItem.addChild("input", + new String[]{"name", + "type", + "class", + "title", + "value", + "size", + "maxsize" + }, + new String[]{"addindexuri", + "text", + "index", + "Key of the index", + uriValue, + "100", + "256" + }); + bmItem.addChild("input", + new String[]{"name", + "type", + "value", + "title", + "class" + }, + new String[]{Commands.addbookmark.toString(), + "submit", + "Add", + "Create this index", + "index-bookmark-add" + }); + + // write reply + try { + writeHTMLReply(ctx, 200, "OK", headers, pageNode.generate()); + } catch (ToadletContextClosedException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public void handleMethodGET(URI uri, + final HTTPRequest request, + final ToadletContext ctx) { + configForm(ctx, null); + } + + public void handleMethodPOST(URI uri, + HTTPRequest request, + final ToadletContext ctx) + throws ToadletContextClosedException, IOException, RedirectException { + + boolean hasFormPassword = ctx.hasFormPassword(request); + boolean userAccess = ctx.isAllowedFullAccess(); + + PageNode p = ctx.getPageMaker().getPageNode(Library.plugName, ctx); + HTMLNode pageNode = p.outer; + MultiValueTable headers = new MultiValueTable(); + boolean locationIsSet = false; + + if(userAccess && hasFormPassword) { + for (String bm : library.bookmarkKeys()) { + if (request.isPartSet("~" + bm)) { + library.selectedIndices.add(bm); + } else { + library.selectedIndices.remove(bm); + } + } + + if (request.isPartSet(Commands.select.toString())) { + headers.put("Location", MainPage.path()); + locationIsSet = true; + } + + for (String bm : library.bookmarkKeys()) { + if (request.isPartSet(Commands.removebookmark + bm)) { + library.removeBookmark(bm); + break; + } + } + + if (request.isPartSet(Commands.addbookmark.toString())) { + String addindexname = request.getPartAsStringFailsafe("addindexname", 32).trim(); + String addindexuri = request.getPartAsStringFailsafe("addindexuri", 256).trim(); + if (addindexname.length() == 0) { + configForm(ctx, + new ConfigPageError("Incorrect Token, too short", + addindexname, + addindexuri)); + return; + } + + if (library.addBookmark(addindexname, addindexuri) == null) { + configForm(ctx, new ConfigPageError("Incorrect URI.", + addindexname, + addindexuri)); + return; + } + } + } + + if (!locationIsSet) { + headers.put("Location", path()); + } + writeHTMLReply(ctx, 303, "See complete list", headers, pageNode.generate()); + } +} diff --git a/src/plugins/Library/ui/MainPage.java b/src/plugins/Library/ui/MainPage.java index 4995b092..e7348f8b 100644 --- a/src/plugins/Library/ui/MainPage.java +++ b/src/plugins/Library/ui/MainPage.java @@ -145,26 +145,10 @@ public static MainPage processPostRequest(HTTPRequest request, HTMLNode contentN // Get bookmarked index list page.indexstring = ""; - for (String bm : library.bookmarkKeys()){ + for (String bm : library.selectedIndices){ String bmid = (Library.BOOKMARK_PREFIX + bm).trim(); - if(request.isPartSet("~"+bm)){ - page.indexstring += bmid + " "; - page.selectedBMIndexes.add(bmid); - } - } - // Get other index list - for (int i = 0; i < request.getIntPart("extraindexcount", 0); i++) { - if (request.isPartSet("index"+i)){ - String otherindexuri = request.getPartAsStringFailsafe("index"+i, 256); - page.indexstring += otherindexuri + " "; - page.selectedOtherIndexes.add(otherindexuri); - } - } - for (String string : etcIndexes) { - if(string.length()>0){ - page.indexstring += string + " "; - page.selectedOtherIndexes.add(string); - } + page.indexstring += bmid + " "; + page.selectedBMIndexes.add(bmid); } page.indexstring = page.indexstring.trim(); @@ -338,7 +322,11 @@ private HTMLNode searchBox(){ searchBox.addChild("br"); searchBox.addChild("input", new String[]{"name", "size", "type", "value", "title"}, new String[]{"search", "40", "text", query, "Enter a search query. You can use standard search syntax such as 'and', 'or', 'not' and \"\" double quotes around phrases"}); searchBox.addChild("input", new String[]{"name", "type", "value", "tabindex"}, new String[]{"find", "submit", "Find!", "1"}); - if(js) + + searchBox.addChild("#", "Will use indices: " + library.selectedIndices + " "); + searchBox.addChild("a", new String[]{ "href", }, new String[]{ ConfigPageToadlet.PATH, }).addChild("#", "change"); + +/* if(js) searchBox.addChild("input", new String[]{"type","name"}, new String[]{"hidden","js"}); // Shows the list of bookmarked indexes TODO show descriptions on mouseover ?? HTMLNode indexeslist = searchBox.addChild("ul", "class", "index-bookmark-list", "Select indexes"); @@ -363,7 +351,7 @@ private HTMLNode searchBox(){ indexeslist.addChild("input", new String[]{"name", "type", "value"}, new String[]{"extraindexcount", "hidden", ""+selectedOtherIndexes.size()}); indexeslist.addChild("li") .addChild("input", new String[]{"name", "type", "value", "class", "title"}, new String[]{"indexuris", "text", "", "index", "URI or path of other index(s) to search on"}); - +*/ HTMLNode optionsBox = searchForm.addChild("div", "style", "margin: 20px 0px 20px 20px; display: inline-table; text-align: left;", "Options"); HTMLNode optionsList = optionsBox.addChild("ul", "class", "options-list"); @@ -371,7 +359,7 @@ private HTMLNode searchBox(){ .addChild("input", new String[]{"name", "type", groupusk?"checked":"size", "title"}, new String[]{"groupusk", "checkbox", "1", "If set, the results are returned grouped by site and edition, this makes the results quicker to scan through but will disrupt ordering on relevance, if applicable to the indexs you are using."}, "Group sites and editions"); optionsList.addChild("li") .addChild("input", new String[]{"name", "type", showold?"checked":"size", "title"}, new String[]{"showold", "checkbox", "1", "If set, older editions are shown in the results greyed out, otherwise only the most recent are shown."}, "Show older editions"); - +/* HTMLNode newIndexInput = optionsBox.addChild("div", new String[]{"class", "style"}, new String[]{"index", "display: inline-table;"}, "Add an index:"); newIndexInput.addChild("br"); newIndexInput.addChild("div", "style", "display: inline-block; width: 50px;", "Name:"); @@ -381,6 +369,7 @@ private HTMLNode searchBox(){ newIndexInput.addChild("input", new String[]{"name", "type", "class", "title", "value"}, new String[]{"addindexuri", "text", "index", "URI or path of index to add to bookmarks, including the main index filename at the end of a Freenet uri will help Library not to block in order to discover the index type.", addindexuri}); newIndexInput.addChild("br"); newIndexInput.addChild("input", new String[]{"name", "type", "value"}, new String[]{"addbookmark", "submit", "Add Bookmark"}); + */ }else searchDiv.addChild("#", "No PluginRespirater, so Form cannot be displayed"); return searchDiv; diff --git a/src/plugins/Library/ui/WebInterface.java b/src/plugins/Library/ui/WebInterface.java index 9c1c8927..28abcbdb 100644 --- a/src/plugins/Library/ui/WebInterface.java +++ b/src/plugins/Library/ui/WebInterface.java @@ -23,6 +23,7 @@ public class WebInterface { private MainPageToadlet pluginsToadlet; private MainPageToadlet mainToadlet; private StaticToadlet staticToadlet; + private ConfigPageToadlet configToadlet; /** * // @param spider @@ -47,6 +48,13 @@ public void load() { mainToadlet = new MainPageToadlet(client, library, core, pr); toadletContainer.register(mainToadlet, mainToadlet.menu(), mainToadlet.path(), true, mainToadlet.name(), mainToadlet.name(), true, null ); + configToadlet = new ConfigPageToadlet(client, library, core, pr); + toadletContainer.register(configToadlet, + configToadlet.menu(), + configToadlet.path(), + true, + false); + // Ive just realised that the form filter allows access to /plugins/... so /library/ wont be allowed, this is a temporary Toadlet untilthere is a whitelist for formfilter and /library is on it TODO put /library on formfilter whitelist pluginsToadlet = new MainPageToadlet(client, library, core, pr); toadletContainer.register(pluginsToadlet, null, "/plugins/plugin.Library.FreesiteSearch", true, null, null, true, null ); @@ -61,6 +69,7 @@ public void load() { public void unload() { toadletContainer.unregister(mainToadlet); pageMaker.removeNavigationLink(mainToadlet.menu(), mainToadlet.name()); + toadletContainer.unregister(configToadlet); toadletContainer.unregister(pluginsToadlet); toadletContainer.unregister(staticToadlet); } From 6032d0765d4b88f50cdf2ba1d6ac26c9d3f7437d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 16 Jun 2015 20:56:03 +0200 Subject: [PATCH 080/180] Force my testing index into the mix. --- src/plugins/Library/Library.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index f318c4ab..c168c893 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -203,6 +203,12 @@ private Library(PluginRespirator pr) { callback.ret = uskManager.subscribeContent(u, callback, false, pr.getHLSimpleClient().getFetchContext(), RequestStarter.IMMEDIATE_SPLITFILE_PRIORITY_CLASS, rcBulk); } } + if (!bookmarks.containsKey("debbies-library-development-index")) { + addBookmark("debbies-library-development-index", + "USK@E0jWjfYUfJqESuiM~5ZklhTZXKCWapxl~CRj1jmZ-~I,gl48QSprqZC1mASLbE9EOhQoBa~PheO8r-q9Lqj~uXA,AQACAAE/index.yml/966"); + migrated = true; + Logger.normal(this, "Added new default index"); + } if(bookmarks.isEmpty() || needNewWanna || !bookmarks.containsKey("gotcha") || !bookmarks.containsKey("liberty-of-information") || !bookmarks.containsKey("free-market-free-people")) { From a91d247436ba9fd09a2f08776acf6c7c8ed9a234 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 23 Jun 2015 23:46:08 +0200 Subject: [PATCH 081/180] Add retrieval of index of sites for a word. --- .../freenet/library/uploader/DownloadAll.java | 54 ++++++++++++++----- 1 file changed, 42 insertions(+), 12 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 2303f53c..239321ae 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -339,6 +339,10 @@ void addSample(int found) { double getMean() { return 1.0 * sum / count; } + + public String toString() { + return "" + getMean() + " (" + count + ")"; + } } private Map statistics = new HashMap(); @@ -477,20 +481,31 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (map.containsKey("lkey") && map.containsKey("rkey") && map.containsKey("entries")) { + // Must separate map and array! System.out.println("Contains entries"); - Map entries = - (Map) map.get("entries"); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (processUri(u)) { - foundChildren ++; - } - } catch (ClassCastException e) { - System.out.println("Cannot process " + value.getID()); - } + if (map.containsKey("subnodes")) { + throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + + page.getURI()); + } + if (map.get("entries") instanceof Map) { + Map entries = + (Map) map.get("entries"); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + if (processUri(u)) { + foundChildren ++; + } + } catch (ClassCastException e) { + System.out.println("Cannot process " + value.getID()); + } + } + return; + } + if (map.get("entries") instanceof ArrayList) { + // Assuming this is a list of TermPageEntries. + return; } - return; } Entry entry = map.entrySet().iterator().next(); if (entry.getValue() instanceof Map) { @@ -498,6 +513,21 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (map2.containsKey("node_min") && map2.containsKey("size") && map2.containsKey("entries")) { + System.out.println("Is an entry. Searching for subnodes."); + for (Object contents : map.values()) { + if (contents instanceof Map) { + Map map3 = (Map) contents; + if (map3.containsKey("subnodes")) { + Map subnodes = + (Map) map3.get("subnodes"); + for (String key : subnodes.keySet()) { + if (processUri(key)) { + foundChildren ++; + } + } + } + } + } return; } } From c53102465a5d91b8c8ed56f710f2d57a6358f8e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 24 Jun 2015 00:36:57 +0200 Subject: [PATCH 082/180] Reduced logging when downloading. --- .../freenet/library/uploader/DownloadAll.java | 72 +++++++++++-------- 1 file changed, 41 insertions(+), 31 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 239321ae..fef14c64 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -23,6 +23,8 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; +import java.util.logging.Level; +import java.util.logging.Logger; import net.pterodactylus.fcp.AllData; import net.pterodactylus.fcp.ClientGet; @@ -45,6 +47,10 @@ */ public class DownloadAll { private static final int PARALLEL_JOBS = 10; + + /** Logger. */ + private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); + public final Map stillRunning = new HashMap(); private String uri; private String newUri; @@ -313,7 +319,7 @@ public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedU public void restart() { if (updated) { updated = false; - System.out.println("Found: " + newUri + " Edition: " + edition); + logger.info("Found: " + newUri + " Edition: " + edition); FetchedPage rootPage = new FetchedPage(newUri); synchronized (roots) { roots.add(rootPage); @@ -406,18 +412,20 @@ public GetAdapter(FetchedPage u) { * Show the amount of outstanding work. */ void printLeft() { - int total = 0; - int required = 0; - int completed = 0; - synchronized (stillRunning) { - for (GetAdapter value : stillRunning.values()) { - total += value.progressTotal; - required += value.progressRequired; - completed += value.progressCompleted; - } - System.out.println("Outstanding " + stillRunning.size() + " ClientGet jobs " + - "(" + completed + "/" + required + "/" + total + ") "); - } + if (logger.isLoggable(Level.FINE)) { + int total = 0; + int required = 0; + int completed = 0; + synchronized (stillRunning) { + for (GetAdapter value : stillRunning.values()) { + total += value.progressTotal; + required += value.progressRequired; + completed += value.progressCompleted; + } + logger.fine("Outstanding " + stillRunning.size() + " ClientGet jobs " + + "(" + completed + "/" + required + "/" + total + ") "); + } + } } private boolean processUri(String uri) { @@ -442,7 +450,9 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (!token.equals(ad.getIdentifier())) { return; } - System.out.println("receivedAllData for " + token + + logger.entering(GetAdapter.class.toString(), + "receivedAllData", + "receivedAllData for " + token + " adding to the " + objectQueue.size() + " elements in the queue."); page.didSucceed(); int foundChildren = 0; @@ -454,7 +464,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (map.containsKey("ttab")) { Map map2 = (Map) map.get("ttab"); if (map2.containsKey("entries")) { - System.out.println("Contains ttab.entries"); + logger.finer("Contains ttab.entries"); Map entries = (Map) map2.get("entries"); for (BinInfo value : entries.values()) { @@ -465,7 +475,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { } } catch (ClassCastException e) { - System.out.println("Cannot process " + value.getID()); + logger.warning("Cannot process " + value.getID()); } } Map subnodes = @@ -482,7 +492,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { map.containsKey("rkey") && map.containsKey("entries")) { // Must separate map and array! - System.out.println("Contains entries"); + logger.finer("Contains entries"); if (map.containsKey("subnodes")) { throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + page.getURI()); @@ -497,7 +507,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { foundChildren ++; } } catch (ClassCastException e) { - System.out.println("Cannot process " + value.getID()); + logger.warning("Cannot process " + value.getID()); } } return; @@ -513,7 +523,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (map2.containsKey("node_min") && map2.containsKey("size") && map2.containsKey("entries")) { - System.out.println("Is an entry. Searching for subnodes."); + logger.finer("Is an entry. Searching for subnodes."); for (Object contents : map.values()) { if (contents instanceof Map) { Map map3 = (Map) contents; @@ -531,7 +541,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { return; } } - System.out.println("Cannot understand contents: " + map); + logger.severe("Cannot understand contents: " + map); System.exit(1); } catch (IOException e) { e.printStackTrace(); @@ -544,6 +554,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { successfulBlocks += progressCompleted; successfulBytes += ad.getDataLength(); showProgress(); + logger.exiting(GetAdapter.class.toString(), "receivedAllData"); } } @@ -557,8 +568,7 @@ public void receivedGetFailed(FcpConnection c, GetFailed gf) { synchronized (getter) { getter.notify(); } - System.out.println("receivedGetFailed for " + token + " (" + page.getURI() + ")."); - // System.exit(1); + logger.warning("receivedGetFailed for " + token + " (" + page.getURI() + ")."); page.didFail(); markDone(); failed ++; @@ -586,7 +596,7 @@ public boolean upload(final String filename, final Runnable callback) { } final File file = new File(dir, filename); if (!file.canRead()) { - System.err.println("Cannot find " + file + " in the cache."); + logger.warning("Cannot find " + file + " in the cache."); return false; } if (uploadStarter == null) { @@ -601,7 +611,7 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { String identifier = uriGenerated.getIdentifier(); String chk = ongoingUploads.get(identifier).getKey(); if (!uriGenerated.getURI().equals(chk)) { - System.err.println("Were supposed to upload " + chk + + logger.severe("Were supposed to upload " + chk + " but calculated to " + uriGenerated.getURI()); System.exit(1); } @@ -622,7 +632,7 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) } uploadStarter.execute(new Runnable() { public void run() { - System.out.println("Ressurrecting " + filename); + logger.fine("Ressurrecting " + filename); uploadCounter++; final String identifier = "Upload" + uploadCounter; ongoingUploads.put(identifier, new AbstractMap.SimpleImmutableEntry(filename, callback)); @@ -641,7 +651,7 @@ public void run() { in = null; } catch (IOException e) { e.printStackTrace(); - System.err.println("Upload failed for " + file); + logger.warning("Upload failed for " + file); } } }); @@ -727,9 +737,9 @@ public void doit() { if (moreJobs) { synchronized (stillRunning) { try { - System.out.println("Queue empty. " + - "Still running " + - stillRunning.size() + "."); + logger.fine("Queue empty. " + + "Still running " + + stillRunning.size() + "."); stillRunning.wait(20000); } catch (InterruptedException e) { e.printStackTrace(); @@ -768,7 +778,7 @@ public void doit() { taken = objectQueue.take(); } } - System.out.println("Rotated " + rotateLaps); + logger.finer("Rotated " + rotateLaps); if (taken == null) { break; } @@ -800,7 +810,7 @@ private void showProgress() { if (recreated > 0) { recreatedMessage = " Recreated: " + recreated; } - System.out.println("Fetches: Successful: " + successful + + logger.fine("Fetches: Successful: " + successful + " blocks: " + successfulBlocks + " bytes: " + successfulBytes + " Failed: " + failed + From 575095c35e549572c7624f7e120b85ceb43170f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 12 Jul 2015 08:06:54 +0200 Subject: [PATCH 083/180] Script to run the upload perpetually. --- loop.sh | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100755 loop.sh diff --git a/loop.sh b/loop.sh new file mode 100755 index 00000000..2d41782f --- /dev/null +++ b/loop.sh @@ -0,0 +1,8 @@ +#!/bin/sh -ex + +while test -f library.continue.loop +do + # tail wrapper.log + # ls -ltr library.index.* + java -jar ../projects/freenet/github/plugin-Library/dist/uploader.jar +done From 96a24ea7c68e2387d617f0a0f2ef717a98b46aa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 12 Jul 2015 08:09:15 +0200 Subject: [PATCH 084/180] Corrected the logic for adding multiple selected in an upload. Since it does not always work, also disable the whole logic by never allowing more selected. --- .../src/freenet/library/uploader/Merger.java | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 039f5fa6..1a13911a 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -265,6 +265,7 @@ class ProcessedFilenames implements Iterator { ProcessedFilenames() { if (selectedFilesToMerge.length > 0) { + doSelected = true; if (processedFilesToMerge.length > 1 && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { createSelectedFiles = true; @@ -272,7 +273,6 @@ class ProcessedFilenames implements Iterator { doFiltered = true; restBase = FILTERED; } else { - doSelected = true; restBase = PROCESSED; } } else { @@ -285,20 +285,24 @@ class ProcessedFilenames implements Iterator { } private boolean addAnotherSelectedFile() { - return nextSelected < 20 && - creator.size() < 10000 && - movedTerms < 200000 && - nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 90; + return false; +// return nextSelected < 20 && +// creator.size() < 10000 && +// movedTerms < 200000 && +// nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 90; } @Override public boolean hasNext() { - if (doAllSelected && nextSelected < selectedFilesToMerge.length) { + if (doSelected && + nextSelected < selectedFilesToMerge.length) { + return true; + } + if (addAnotherSelectedFile() && + nextSelected < selectedFilesToMerge.length) { return true; } - if (doSelected && - nextSelected < selectedFilesToMerge.length && - addAnotherSelectedFile()) { + if (doAllSelected && nextSelected < selectedFilesToMerge.length) { return true; } if (doFiltered && nextFiltered < filteredFilesToMerge.length) { @@ -317,8 +321,12 @@ public boolean hasNext() { public String next() { processingSelectedFile = false; if (doSelected && - nextSelected < selectedFilesToMerge.length && - addAnotherSelectedFile()) { + nextSelected < selectedFilesToMerge.length) { + processingSelectedFile = true; + doSelected = false; + return selectedFilesToMerge[nextSelected++]; + } else if (addAnotherSelectedFile() && + nextSelected < selectedFilesToMerge.length) { processingSelectedFile = true; return selectedFilesToMerge[nextSelected++]; } else if (doAllSelected && nextSelected < selectedFilesToMerge.length) { From 554914a905d957fc4f32b46d905c75c56cedf829 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 25 Sep 2015 00:05:26 +0200 Subject: [PATCH 085/180] Take down the uploader jobs service. --- .../freenet/library/uploader/DownloadAll.java | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index fef14c64..b6111eee 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -23,6 +23,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; @@ -584,10 +585,11 @@ public void run() { /** * We have detected that we cannot download a certain CHK. * - * If we are running on the host where this CHK is actually cached, - * lets upload it from the cache in an attempt to repair. + * If we are running on a host where this CHK is actually cached, + * lets upload it from the cache in an attempt to repair the index. * * @param filename of the file to upload. + * @param callback when the file is successfully uploaded. */ public boolean upload(final String filename, final Runnable callback) { final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); @@ -795,6 +797,14 @@ public void doit() { moreJobs = !stillRunning.isEmpty(); } } while (moreJobs); + if (uploadStarter != null) { + uploadStarter.shutdown(); + try { + uploadStarter.awaitTermination(1, TimeUnit.HOURS); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } connection.removeFcpListener(subscriberListener); } finally { removeCleanupThread(); @@ -828,12 +838,12 @@ private void showProgress() { int succeeded = root.getTreeSizeSucceeded(); int failed = root.getTreeSizeFailed(); if (failed > 0) { - sb.append(new Formatter().format(" FAILED: %.2f%%.", 100.0 * failed / (failed + succeeded))); + sb.append(new Formatter().format(" FAILED: %.1f%%.", 100.0 * failed / (failed + succeeded))); } double estimate = getEstimatedPagesLeft(root); if (estimate < Double.POSITIVE_INFINITY) { - sb.append(new Formatter().format(" Fetched: %.2f%%.", - 100.0 * (failed + succeeded) / (estimate + succeeded))); + sb.append(new Formatter().format(" Fetched: %.1f%%.", + 100.0 * succeeded / (estimate + succeeded))); } sb.append(" ("); sb.append(succeeded); From 873e9bc3566769d015c960620887afabf96b5d72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 27 Sep 2015 13:49:36 +0200 Subject: [PATCH 086/180] Add log messages when URIs are not fetched. --- uploader/src/freenet/library/uploader/DownloadAll.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index b6111eee..16fc37cf 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -749,12 +749,14 @@ public void doit() { } } } + boolean empty = true; do { if (!empty) { try { FetchedPage taken = objectQueue.take(); while (!taken.hasParent()) { + logger.finer("Avoid fetching " + taken.getURI()); taken = null; avoidFetching++; if (objectQueue.isEmpty()) { From 2e44d799f666e911ff2471afaa5d6ae7546258d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 27 Sep 2015 14:14:15 +0200 Subject: [PATCH 087/180] Moved the pruning of the trees to avoid always having at least two of them. --- .../src/freenet/library/uploader/DownloadAll.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 16fc37cf..06b1d238 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -324,9 +324,6 @@ public void restart() { FetchedPage rootPage = new FetchedPage(newUri); synchronized (roots) { roots.add(rootPage); - while (roots.size() > 2) { - roots.remove(0); - } } new GetAdapter(rootPage.newChild(newUri)); } @@ -752,6 +749,14 @@ public void doit() { boolean empty = true; do { + synchronized (roots) { + if (roots.size() > 1) { + if (roots.get(roots.size() - 1).getTreeSize() > 100) { + roots.remove(0); + } + } + } + if (!empty) { try { FetchedPage taken = objectQueue.take(); From 7e659c78a5b8e8623c328a11d21017ceadd31a63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 6 Dec 2015 02:32:12 +0100 Subject: [PATCH 088/180] End the waiting indefinately. It is still unclear why we end up with this waiting forever but at least the process terminates. --- .../freenet/library/uploader/DownloadAll.java | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 06b1d238..d81f1be2 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -383,11 +383,14 @@ private class GetAdapter extends FcpAdapter { private int progressRequired; private int progressCompleted; private boolean done; + int waitingLaps; + public static final int WAITING_FACTOR = 50; public GetAdapter(FetchedPage u) { page = u; getterCounter ++; token = "Getter" + getterCounter; + waitingLaps = 0; getter = new ClientGet(page.getURI(), token); getter.setPriority(Priority.prefetch); getter.setVerbosity(Verbosity.ALL); @@ -407,6 +410,27 @@ public GetAdapter(FetchedPage u) { } /** + * Called when nothing has happened for a while with this request. + * @param key The page. + */ + public void hasBeenWaiting(FetchedPage key) { + waitingLaps++; + if (waitingLaps > WAITING_FACTOR * PARALLEL_JOBS) { + connection.removeFcpListener(this); + getter = null; + synchronized (stillRunning) { + stillRunning.remove(key); + } + if (key.hasParent()) { + logger.warning("Restarting fetch for " + key.getURI()); + new GetAdapter(key); + } else { + logger.finer("Avoid refetching " + key.getURI()); + } + } + } + + /** * Show the amount of outstanding work. */ void printLeft() { @@ -749,6 +773,10 @@ public void doit() { boolean empty = true; do { + for (Entry entry : + new HashSet>(stillRunning.entrySet())) { + entry.getValue().hasBeenWaiting(entry.getKey()); + } synchronized (roots) { if (roots.size() > 1) { if (roots.get(roots.size() - 1).getTreeSize() > 100) { From 0a8ac89d8fbefad5dfde919a52fafcee3df522c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 7 Dec 2015 23:39:59 +0100 Subject: [PATCH 089/180] Saving the unpacked object for examining it in the debugger. --- uploader/src/freenet/library/uploader/DownloadAll.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index d81f1be2..a34328ae 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -478,11 +478,11 @@ public void receivedAllData(FcpConnection c, AllData ad) { " adding to the " + objectQueue.size() + " elements in the queue."); page.didSucceed(); int foundChildren = 0; + Object readObject; try { try { - Map map = - (LinkedHashMap) - new YamlReaderWriter().readObject(ad.getPayloadInputStream()); + readObject = new YamlReaderWriter().readObject(ad.getPayloadInputStream()); + Map map = ((LinkedHashMap) readObject); if (map.containsKey("ttab")) { Map map2 = (Map) map.get("ttab"); if (map2.containsKey("entries")) { From 7115936c8c3c21221b0cfabd21b77501e74fcc45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 20 Feb 2016 15:31:54 +0100 Subject: [PATCH 090/180] Fixed problem with specific downloads. Added more logging. --- .../freenet/library/uploader/DownloadAll.java | 207 +++++++++++------- .../uploader/finest_logging.properties | 48 ++++ 2 files changed, 174 insertions(+), 81 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/finest_logging.properties diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index a34328ae..bd29be4a 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -480,95 +480,139 @@ public void receivedAllData(FcpConnection c, AllData ad) { int foundChildren = 0; Object readObject; try { - try { - readObject = new YamlReaderWriter().readObject(ad.getPayloadInputStream()); - Map map = ((LinkedHashMap) readObject); - if (map.containsKey("ttab")) { - Map map2 = (Map) map.get("ttab"); - if (map2.containsKey("entries")) { - logger.finer("Contains ttab.entries"); - Map entries = - (Map) map2.get("entries"); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (processUri(u)) { - foundChildren ++; - } - - } catch (ClassCastException e) { - logger.warning("Cannot process " + value.getID()); - } - } - Map subnodes = - (Map) map2.get("subnodes"); - for (String key : subnodes.keySet()) { - if (processUri(key)) { - foundChildren ++; - } + readObject = new YamlReaderWriter().readObject(ad.getPayloadInputStream()); + Map map = ((LinkedHashMap) readObject); + if (map.containsKey("ttab") && + map.containsKey("utab") && + map.containsKey("totalPages")) { + Map map2 = (Map) map.get("ttab"); + if (map2.containsKey("entries")) { + Map entries = + (Map) map2.get("entries"); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + if (processUri(u)) { + foundChildren ++; + } + + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + page.getURI(), e); + } + } + Map subnodes = + (Map) map2.get("subnodes"); + logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { + page.level, + subnodes.size(), + }); + for (String key : subnodes.keySet()) { + if (processUri(key)) { + foundChildren ++; } - return; } + return; } - if (map.containsKey("lkey") && - map.containsKey("rkey") && - map.containsKey("entries")) { - // Must separate map and array! - logger.finer("Contains entries"); - if (map.containsKey("subnodes")) { - throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + - page.getURI()); - } - if (map.get("entries") instanceof Map) { - Map entries = - (Map) map.get("entries"); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (processUri(u)) { - foundChildren ++; - } - } catch (ClassCastException e) { - logger.warning("Cannot process " + value.getID()); - } - } - return; - } - if (map.get("entries") instanceof ArrayList) { - // Assuming this is a list of TermPageEntries. - return; - } + } + if (map.containsKey("lkey") && + map.containsKey("rkey") && + map.containsKey("entries")) { + // Must separate map and array! + if (map.containsKey("subnodes")) { + throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + + page.getURI()); } - Entry entry = map.entrySet().iterator().next(); - if (entry.getValue() instanceof Map) { - Map map2 = (Map) entry.getValue(); - if (map2.containsKey("node_min") - && map2.containsKey("size") - && map2.containsKey("entries")) { - logger.finer("Is an entry. Searching for subnodes."); - for (Object contents : map.values()) { - if (contents instanceof Map) { - Map map3 = (Map) contents; - if (map3.containsKey("subnodes")) { - Map subnodes = - (Map) map3.get("subnodes"); - for (String key : subnodes.keySet()) { - if (processUri(key)) { - foundChildren ++; - } - } - } + if (map.get("entries") instanceof Map) { + Map entries = + (Map) map.get("entries"); + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with {3} entries.", + new Object[] { + page.level, + map.get("lkey"), + map.get("rkey"), + entries.size() + }); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + if (processUri(u)) { + foundChildren ++; } + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + " for " + page.getURI()); } - return; } + return; + } + if (map.get("entries") instanceof ArrayList) { + // Assuming this is a list of TermPageEntries. + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with page entries.", + new Object[] { + page.level, + map.get("lkey"), + map.get("rkey") + }); + return; + } + } + Entry entry = map.entrySet().iterator().next(); + if (entry.getValue() instanceof Map) { + Map map2 = (Map) entry.getValue(); + if (map2.containsKey("node_min") + && map2.containsKey("size") + && map2.containsKey("entries")) { + logger.log(Level.FINER, "Starts with entry for {1} (level {0}). Searching for subnodes.", new Object[] { + page.level, + entry.getKey(), + }); + String first = null; + String last = null; + for (Entry contents : map.entrySet()) { + if (contents.getValue() instanceof Map) { + if (first == null) { + first = contents.getKey(); + } + last = contents.getKey(); + Map map3 = (Map) contents.getValue(); + if (map3.containsKey("subnodes")) { + Map subnodes = + (Map) map3.get("subnodes"); + logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { + page.level, + contents.getKey(), + subnodes.size(), + }); + + for (String key : subnodes.keySet()) { + if (processUri(key)) { + foundChildren ++; + } + } + } + continue; + } + throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + page.getURI()); + } + logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { + page.level, + first, + last, + }); + return; } - logger.severe("Cannot understand contents: " + map); - System.exit(1); - } catch (IOException e) { - e.printStackTrace(); - System.exit(1); } + logger.severe("Cannot understand contents: " + map); + System.exit(1); + } catch (IOException e) { + logger.log(Level.SEVERE, "Cannot unpack.", e); + e.printStackTrace(); + System.exit(1); + } catch (ClassCastException cce) { + logger.log(Level.SEVERE, "Cannot unpack.", cce); + cce.printStackTrace(); + System.exit(1); } finally { addFoundChildren(page.level, foundChildren); markDone(); @@ -576,7 +620,8 @@ public void receivedAllData(FcpConnection c, AllData ad) { successfulBlocks += progressCompleted; successfulBytes += ad.getDataLength(); showProgress(); - logger.exiting(GetAdapter.class.toString(), "receivedAllData"); + logger.exiting(GetAdapter.class.toString(), + "receivedAllData added " + foundChildren + " to the queue."); } } diff --git a/uploader/src/freenet/library/uploader/finest_logging.properties b/uploader/src/freenet/library/uploader/finest_logging.properties new file mode 100644 index 00000000..dd78149c --- /dev/null +++ b/uploader/src/freenet/library/uploader/finest_logging.properties @@ -0,0 +1,48 @@ +############################################################ +# Global properties +############################################################ + +# "handlers" specifies a comma separated list of log Handler +# classes. These handlers will be installed during VM startup. +# Note that these classes must be on the system classpath. +# By default we only configure a ConsoleHandler, which will only +# show messages at the INFO and above levels. +handlers= java.util.logging.ConsoleHandler + +# Default global logging level. +# This specifies which kinds of events are logged across +# all loggers. For any given facility this global level +# can be overriden by a facility specific level +# Note that the ConsoleHandler also has a separate level +# setting to limit messages printed to the console. +.level= FINEST + +############################################################ +# Handler specific properties. +# Describes specific configuration info for Handlers. +############################################################ + +# default file output is in user's home directory. +java.util.logging.FileHandler.pattern = %h/java%u.log +java.util.logging.FileHandler.limit = 50000 +java.util.logging.FileHandler.count = 1 +java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter + +# Limit the message that are printed on the console to INFO and above. +java.util.logging.ConsoleHandler.level = FINEST +java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter + +# Example to customize the SimpleFormatter output format +# to print one-line log message like this: +# : [] +# +# java.util.logging.SimpleFormatter.format=%4$s: %5$s [%1$tc]%n + +############################################################ +# Facility specific properties. +# Provides extra control for each logger. +############################################################ + +# For example, set the com.xyz.foo logger to only log SEVERE +# messages: +# com.xyz.foo.level = SEVERE From b5f214ced08493c7cceecc1f78926b6b8fd1d3c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 16 Apr 2016 21:22:15 +0200 Subject: [PATCH 091/180] Accept and count that the CHK:s has occasionally changed. Prioritize download deeper in the tree and connected to most recent root. Calculate and log ETA. --- .../freenet/library/uploader/DownloadAll.java | 143 +++++++++++++----- 1 file changed, 106 insertions(+), 37 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index bd29be4a..69016ce4 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -9,6 +9,8 @@ import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.Date; import java.util.Formatter; import java.util.HashMap; import java.util.HashSet; @@ -73,8 +75,10 @@ public class DownloadAll { private int failed = 0; private int recreated = 0; private int avoidFetching = 0; + private int wrongChkCounterForUpload = 0; private Random rand = new Random(); + private Date started = new Date(); public DownloadAll(String u) { uri = u; @@ -235,7 +239,26 @@ boolean hasParent() { return !parents.isEmpty(); } - int getTreeSize() { + /** + * fetchedPage is an ancestor, any number of levels, to this + * page. + * + * @param fetchedPage the ancestor to search for. + * @return + */ + public boolean hasParent(FetchedPage fetchedPage) { + if (parents.contains(fetchedPage)) { + return true; + } + for (FetchedPage parent : parents) { + if (parent.hasParent(fetchedPage)) { + return true; + } + } + return false; + } + + int getTreeSize() { int size = 1; for (FetchedPage child : children) { size += child.getTreeSize(); @@ -444,8 +467,13 @@ void printLeft() { required += value.progressRequired; completed += value.progressCompleted; } + String ongoingUploadsMessage = ""; + if (ongoingUploads != null && ongoingUploads.size() > 0) { + ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads."; + } logger.fine("Outstanding " + stillRunning.size() + " ClientGet jobs " + - "(" + completed + "/" + required + "/" + total + ") "); + "(" + completed + "/" + required + "/" + total + ")" + + ongoingUploadsMessage); } } } @@ -680,8 +708,11 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { String chk = ongoingUploads.get(identifier).getKey(); if (!uriGenerated.getURI().equals(chk)) { logger.severe("Were supposed to upload " + chk + - " but calculated to " + uriGenerated.getURI()); - System.exit(1); + " but calculated to " + uriGenerated.getURI() + ". " + + "Will upload anyway."); + wrongChkCounterForUpload++; + } else { + logger.fine("Uploading " + chk); } } @@ -690,7 +721,14 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) assert c == connection; assert putSuccessful != null; String identifier = putSuccessful.getIdentifier(); - ongoingUploads.get(identifier).getValue().run(); + String chk = ongoingUploads.get(identifier).getKey(); + if (!putSuccessful.getURI().equals(chk)) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " while supposed to upload " + chk + + ". "); + } else { + ongoingUploads.get(identifier).getValue().run(); + } ongoingUploads.remove(identifier); }; }); @@ -823,13 +861,20 @@ public void doit() { entry.getValue().hasBeenWaiting(entry.getKey()); } synchronized (roots) { - if (roots.size() > 1) { - if (roots.get(roots.size() - 1).getTreeSize() > 100) { + final int roots_size = roots.size(); + if (roots_size > 1) { + int roots_distance = roots_size - 1; + if (roots.get(1).getTreeSizeSucceeded() >= roots.get(0).getTreeSizeSucceeded() - roots_distance * roots_distance * roots_distance) { roots.remove(0); } } } + FetchedPage lastRoot; + synchronized (roots) { + lastRoot = roots.get(roots.size() - 1); + } + if (!empty) { try { FetchedPage taken = objectQueue.take(); @@ -847,20 +892,24 @@ public void doit() { if (maxLaps == 0) { maxLaps = 1; } - int rotateLaps = rand.nextInt(maxLaps); - for (int i = 0; i < rotateLaps; i++) { + int toRotate = rand.nextInt(maxLaps); + int rotated = 0; + assert taken.level > 0; + for (int i = 0; i < toRotate; i += taken.hasParent(lastRoot) ? taken.level * taken.level * taken.level : 1) { objectQueue.offer(taken); taken = objectQueue.take(); - while (!taken.hasParent()) { + while (!taken.hasParent()) { taken = null; avoidFetching++; if (objectQueue.isEmpty()) { break; } taken = objectQueue.take(); + assert taken.level > 0; } + rotated++; } - logger.finer("Rotated " + rotateLaps); + logger.finer("Rotated " + rotated + " (count to " + toRotate + ")."); if (taken == null) { break; } @@ -900,42 +949,62 @@ private void showProgress() { if (recreated > 0) { recreatedMessage = " Recreated: " + recreated; } + String wrongChkCounterForUploadMessage = ""; + if (wrongChkCounterForUpload > 0) { + wrongChkCounterForUploadMessage = " WrongChkUploaded: " + wrongChkCounterForUpload; + } logger.fine("Fetches: Successful: " + successful + " blocks: " + successfulBlocks + " bytes: " + successfulBytes + " Failed: " + failed + recreatedMessage + + wrongChkCounterForUploadMessage + " Avoided: " + avoidFetching + "."); StringBuilder sb = new StringBuilder(); + List copiedRoots; synchronized (roots) { - for (FetchedPage root : roots) { - if (sb.length() > 0) { - sb.append(", "); - } - String edition = root.getURI().substring(uri.length()); - sb.append(edition); - int succeeded = root.getTreeSizeSucceeded(); - int failed = root.getTreeSizeFailed(); - if (failed > 0) { - sb.append(new Formatter().format(" FAILED: %.1f%%.", 100.0 * failed / (failed + succeeded))); - } - double estimate = getEstimatedPagesLeft(root); - if (estimate < Double.POSITIVE_INFINITY) { - sb.append(new Formatter().format(" Fetched: %.1f%%.", - 100.0 * succeeded / (estimate + succeeded))); - } - sb.append(" ("); - sb.append(succeeded); - - if (failed > 0) { - sb.append(" and "); - sb.append(failed); - sb.append(" failed"); - } - - sb.append(")"); + copiedRoots = new ArrayList(roots); + } + Collections.reverse(copiedRoots); + boolean first = true; + for (FetchedPage root : copiedRoots) { + if (sb.length() > 0) { + sb.append(", "); } + String edition = root.getURI().substring(uri.length()); + sb.append(edition); + int succeeded = root.getTreeSizeSucceeded(); + int failed = root.getTreeSizeFailed(); + if (failed > 0) { + sb.append(new Formatter().format(" FAILED: %.1f%%.", 100.0 * failed / (failed + succeeded))); + } + double estimate = getEstimatedPagesLeft(root); + if (estimate < Double.POSITIVE_INFINITY) { + final double fractionDone = 1.0 * succeeded / (estimate + succeeded); + sb.append(new Formatter().format(" Fetched: %.1f%%.", + 100.0 * fractionDone)); + if (first) { + logger.log(Level.FINER, "ETA: {0,date}, Started: {1,date}. Done {2,number,percent}.", + new Object[] { + new Date(new Double(1.0 / fractionDone * (new Date().getTime() - started.getTime())).longValue() + + started.getTime()), + started, + fractionDone, + }); + first = false; + } + } + sb.append(" ("); + sb.append(succeeded); + + if (failed > 0) { + sb.append(" and "); + sb.append(failed); + sb.append(" failed"); + } + + sb.append(")"); } System.out.println("Editions: " + sb.toString()); From 06d0b392e5154f8a31c3ab98ea572447eb82d8e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 27 Apr 2016 20:28:18 +0200 Subject: [PATCH 092/180] Adjusted the logging. --- .../src/freenet/library/uploader/DownloadAll.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 69016ce4..5ffc7c51 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -457,7 +457,7 @@ public void hasBeenWaiting(FetchedPage key) { * Show the amount of outstanding work. */ void printLeft() { - if (logger.isLoggable(Level.FINE)) { + if (logger.isLoggable(Level.FINEST)) { int total = 0; int required = 0; int completed = 0; @@ -471,7 +471,7 @@ void printLeft() { if (ongoingUploads != null && ongoingUploads.size() > 0) { ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads."; } - logger.fine("Outstanding " + stillRunning.size() + " ClientGet jobs " + + logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + "(" + completed + "/" + required + "/" + total + ")" + ongoingUploadsMessage); } @@ -707,12 +707,12 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { String identifier = uriGenerated.getIdentifier(); String chk = ongoingUploads.get(identifier).getKey(); if (!uriGenerated.getURI().equals(chk)) { - logger.severe("Were supposed to upload " + chk + - " but calculated to " + uriGenerated.getURI() + ". " + + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + ". " + "Will upload anyway."); wrongChkCounterForUpload++; } else { - logger.fine("Uploading " + chk); + logger.fine("Resurrecting " + chk); } } @@ -738,7 +738,6 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) } uploadStarter.execute(new Runnable() { public void run() { - logger.fine("Ressurrecting " + filename); uploadCounter++; final String identifier = "Upload" + uploadCounter; ongoingUploads.put(identifier, new AbstractMap.SimpleImmutableEntry(filename, callback)); From 551672fcd34973a0f2649a9dea3757577955102f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 1 May 2016 14:48:30 +0200 Subject: [PATCH 093/180] Reduce the amount of downloading while there is uploading. Also add logging of completed uploads. --- .../freenet/library/uploader/DownloadAll.java | 79 ++++++++++++++++--- 1 file changed, 68 insertions(+), 11 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 5ffc7c51..49a98087 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -6,6 +6,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.text.MessageFormat; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; @@ -49,7 +50,7 @@ * Class to download the entire index. */ public class DownloadAll { - private static final int PARALLEL_JOBS = 10; + private static final int PARALLEL_JOBS = 30; /** Logger. */ private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); @@ -67,7 +68,7 @@ public class DownloadAll { private List roots = new ArrayList(); private ExecutorService uploadStarter = null; - private Map> ongoingUploads = null; + private Map ongoingUploads = null; private int successful = 0; private int successfulBlocks = 0; @@ -397,6 +398,38 @@ private double getEstimatedPagesLeft(FetchedPage page) { } + private static class OngoingUpload { + private final Date started = new Date(); + private final String filename; + private final Runnable callback; + + public OngoingUpload(String fname, Runnable cback) { + filename = fname; + callback = cback; + } + + Date getStarted() { + return started; + } + + String getFilename() { + return filename; + } + + void complete() { + final long millis = new Date().getTime() - started.getTime(); + final long seconds = millis / 1000; + final long minutes = seconds / 60; + final long hours = minutes / 60; + logger.log(Level.FINE, "Upload completed after {0,number}:{1,number,00}:{2,number,00}.", + new Object[] { + hours, + minutes % 60, + seconds % 60, + }); + callback.run(); + } + } private class GetAdapter extends FcpAdapter { private ClientGet getter; @@ -468,8 +501,18 @@ void printLeft() { completed += value.progressCompleted; } String ongoingUploadsMessage = ""; - if (ongoingUploads != null && ongoingUploads.size() > 0) { - ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads."; + if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { + Date oldest = null; + for (Map.Entry entry : ongoingUploads.entrySet()) { + if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { + oldest = entry.getValue().getStarted(); + } + } + ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads"; + if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { + ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); + } + ongoingUploadsMessage += "."; } logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + "(" + completed + "/" + required + "/" + total + ")" + @@ -705,7 +748,7 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert c == connection; assert uriGenerated != null; String identifier = uriGenerated.getIdentifier(); - String chk = ongoingUploads.get(identifier).getKey(); + String chk = ongoingUploads.get(identifier).getFilename(); if (!uriGenerated.getURI().equals(chk)) { logger.severe("Were supposed to resurrect " + chk + " but the URI calculated to " + uriGenerated.getURI() + ". " + @@ -721,26 +764,30 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) assert c == connection; assert putSuccessful != null; String identifier = putSuccessful.getIdentifier(); - String chk = ongoingUploads.get(identifier).getKey(); + final OngoingUpload foundUpload = ongoingUploads.get(identifier); + String chk = foundUpload.getFilename(); if (!putSuccessful.getURI().equals(chk)) { logger.severe("Uploaded " + putSuccessful.getURI() + " while supposed to upload " + chk + ". "); } else { - ongoingUploads.get(identifier).getValue().run(); + foundUpload.complete(); } ongoingUploads.remove(identifier); + synchronized (stillRunning) { + stillRunning.notifyAll(); + } }; }); } }); - ongoingUploads = new HashMap>(); + ongoingUploads = new HashMap(); } uploadStarter.execute(new Runnable() { public void run() { uploadCounter++; final String identifier = "Upload" + uploadCounter; - ongoingUploads.put(identifier, new AbstractMap.SimpleImmutableEntry(filename, callback)); + ongoingUploads.put(identifier, new OngoingUpload(filename, callback)); final ClientPut putter = new ClientPut("CHK@", identifier); putter.setEarlyEncode(true); putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); @@ -887,7 +934,7 @@ public void doit() { taken = objectQueue.take(); } // Randomize the order by rotating the queue - int maxLaps = objectQueue.size() / PARALLEL_JOBS; + int maxLaps = objectQueue.size(); if (maxLaps == 0) { maxLaps = 1; } @@ -1013,11 +1060,21 @@ public static void main(String[] argv) { new DownloadAll(argv[0]).doit(); } + private int ongoingUploadsSize() { + if (ongoingUploads == null) { + return 0; + } + + synchronized (ongoingUploads) { + return ongoingUploads.size(); + } + } + public void waitForSlot() { startCleanupThread(); synchronized (stillRunning) { try { - while (stillRunning.size() >= PARALLEL_JOBS) { + while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { stillRunning.wait(); } } catch (InterruptedException e) { From b9191b88f9b0558982e89bc7ecb95ac22664905b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 4 May 2016 07:14:21 +0200 Subject: [PATCH 094/180] Added max queue-len. Buggfixes: * Concurrent modification in parent lists in tree. * Pace fetches. --- .../src/freenet/library/uploader/DownloadAll.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 49a98087..ee577b51 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -77,6 +77,7 @@ public class DownloadAll { private int recreated = 0; private int avoidFetching = 0; private int wrongChkCounterForUpload = 0; + private int maxObjectQueueSize = 0; private Random rand = new Random(); private Date started = new Date(); @@ -200,8 +201,8 @@ private static class FetchedPage { * This is really a Set but there is no WeakSet so we use the keys * and let all values be TOKEN. */ - private Set parents = new WeakHashSet(); - private Set children = new HashSet(); + private Set parents = Collections.synchronizedSet(new WeakHashSet()); + private Set children = Collections.synchronizedSet(new HashSet()); private String uri; int level; @@ -543,10 +544,15 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (!token.equals(ad.getIdentifier())) { return; } - logger.entering(GetAdapter.class.toString(), + final int objectQueueSize = objectQueue.size(); + if (objectQueueSize > maxObjectQueueSize) { + maxObjectQueueSize = objectQueueSize; + } + logger.entering(GetAdapter.class.toString(), "receivedAllData", "receivedAllData for " + token + - " adding to the " + objectQueue.size() + " elements in the queue."); + " adding to the " + objectQueueSize + " elements in the queue " + + "(max " + maxObjectQueueSize + ")."); page.didSucceed(); int foundChildren = 0; Object readObject; @@ -1074,6 +1080,7 @@ public void waitForSlot() { startCleanupThread(); synchronized (stillRunning) { try { + stillRunning.wait(TimeUnit.SECONDS.toMillis(1)); while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { stillRunning.wait(); } From d134cb4754d8bfa4f578a9d7518af5793683a2ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 14 May 2016 10:15:51 +0200 Subject: [PATCH 095/180] Count and log failed uploads. --- .../freenet/library/uploader/DownloadAll.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index ee577b51..25ae6a6e 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -37,6 +37,7 @@ import net.pterodactylus.fcp.FcpConnection; import net.pterodactylus.fcp.GetFailed; import net.pterodactylus.fcp.Priority; +import net.pterodactylus.fcp.PutFailed; import net.pterodactylus.fcp.PutSuccessful; import net.pterodactylus.fcp.SubscribeUSK; import net.pterodactylus.fcp.SubscribedUSKUpdate; @@ -75,6 +76,7 @@ public class DownloadAll { private long successfulBytes = 0; private int failed = 0; private int recreated = 0; + private int failedRecreated = 0; private int avoidFetching = 0; private int wrongChkCounterForUpload = 0; private int maxObjectQueueSize = 0; @@ -784,6 +786,21 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) stillRunning.notifyAll(); } }; + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { + assert c == connection; + assert putFailed != null; + String identifier = putFailed.getIdentifier(); + final OngoingUpload foundUpload = ongoingUploads.get(identifier); + String chk = foundUpload.getFilename(); + logger.severe("Uploaded " + chk + " failed."); + failedRecreated++; + ongoingUploads.remove(identifier); + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } }); } }); @@ -1001,6 +1018,9 @@ private void showProgress() { if (recreated > 0) { recreatedMessage = " Recreated: " + recreated; } + if (failedRecreated > 0) { + recreatedMessage += " Recreation failed: " + failedRecreated; + } String wrongChkCounterForUploadMessage = ""; if (wrongChkCounterForUpload > 0) { wrongChkCounterForUploadMessage = " WrongChkUploaded: " + wrongChkCounterForUpload; From b7e5966366e3535d97d715c0a2688a810fe52672 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 14 May 2016 19:53:21 +0200 Subject: [PATCH 096/180] Factored out the parsing of the contents of the files. --- .../freenet/library/uploader/DownloadAll.java | 351 ++++++++++-------- 1 file changed, 186 insertions(+), 165 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 25ae6a6e..165aeddc 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -6,6 +6,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.text.MessageFormat; import java.util.AbstractMap; import java.util.ArrayList; @@ -434,6 +435,182 @@ void complete() { } } + /** + * Show the amount of outstanding work. + */ + void printLeft() { + if (logger.isLoggable(Level.FINEST)) { + int total = 0; + int required = 0; + int completed = 0; + synchronized (stillRunning) { + for (GetAdapter value : stillRunning.values()) { + total += value.progressTotal; + required += value.progressRequired; + completed += value.progressCompleted; + } + String ongoingUploadsMessage = ""; + if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { + Date oldest = null; + for (Map.Entry entry : ongoingUploads.entrySet()) { + if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { + oldest = entry.getValue().getStarted(); + } + } + ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads"; + if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { + ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); + } + ongoingUploadsMessage += "."; + } + logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + + "(" + completed + "/" + required + "/" + total + ")" + + ongoingUploadsMessage); + } + } + } + + interface UriProcessor { + public boolean processUri(String uri); + } + + private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriProcessor, String page_uri, int page_level) + throws IOException { + int foundChildren = 0; + try { + Object readObject = new YamlReaderWriter().readObject(inputStream); + Map map = ((LinkedHashMap) readObject); + if (map.containsKey("ttab") && + map.containsKey("utab") && + map.containsKey("totalPages")) { + Map map2 = (Map) map.get("ttab"); + if (map2.containsKey("entries")) { + Map entries = + (Map) map2.get("entries"); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + if (uriProcessor.processUri(u)) { + foundChildren ++; + } + + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + page_uri, e); + } + } + Map subnodes = + (Map) map2.get("subnodes"); + logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { + page_level, + subnodes.size(), + }); + for (String key : subnodes.keySet()) { + if (uriProcessor.processUri(key)) { + foundChildren ++; + } + } + return; + } + } + if (map.containsKey("lkey") && + map.containsKey("rkey") && + map.containsKey("entries")) { + // Must separate map and array! + if (map.containsKey("subnodes")) { + throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + + page_uri); + } + if (map.get("entries") instanceof Map) { + Map entries = + (Map) map.get("entries"); + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with {3} entries.", + new Object[] { + page_level, + map.get("lkey"), + map.get("rkey"), + entries.size() + }); + for (BinInfo value : entries.values()) { + try { + String u = (String) value.getID(); + if (uriProcessor.processUri(u)) { + foundChildren ++; + } + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + " for " + page_uri); + } + } + return; + } + if (map.get("entries") instanceof ArrayList) { + // Assuming this is a list of TermPageEntries. + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with page entries.", + new Object[] { + page_level, + map.get("lkey"), + map.get("rkey") + }); + return; + } + } + Entry entry = map.entrySet().iterator().next(); + if (entry.getValue() instanceof Map) { + Map map2 = (Map) entry.getValue(); + if (map2.containsKey("node_min") + && map2.containsKey("size") + && map2.containsKey("entries")) { + logger.log(Level.FINER, "Starts with entry for {1} (level {0}). Searching for subnodes.", new Object[] { + page_level, + entry.getKey(), + }); + String first = null; + String last = null; + for (Entry contents : map.entrySet()) { + if (contents.getValue() instanceof Map) { + if (first == null) { + first = contents.getKey(); + } + last = contents.getKey(); + Map map3 = (Map) contents.getValue(); + if (map3.containsKey("subnodes")) { + Map subnodes = + (Map) map3.get("subnodes"); + logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { + page_level, + contents.getKey(), + subnodes.size(), + }); + + for (String key : subnodes.keySet()) { + if (uriProcessor.processUri(key)) { + foundChildren ++; + } + } + } + continue; + } + throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + page_uri); + } + logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { + page_level, + first, + last, + }); + return; + } + } + logger.severe("Cannot understand contents: " + map); + System.exit(1); + } finally { + addFoundChildren(page_level, foundChildren); + logger.exiting(GetAdapter.class.toString(), + "receivedAllData added " + foundChildren + " to the queue."); + } + + } + private class GetAdapter extends FcpAdapter { private ClientGet getter; private String token; @@ -489,42 +666,8 @@ public void hasBeenWaiting(FetchedPage key) { } } - /** - * Show the amount of outstanding work. - */ - void printLeft() { - if (logger.isLoggable(Level.FINEST)) { - int total = 0; - int required = 0; - int completed = 0; - synchronized (stillRunning) { - for (GetAdapter value : stillRunning.values()) { - total += value.progressTotal; - required += value.progressRequired; - completed += value.progressCompleted; - } - String ongoingUploadsMessage = ""; - if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { - Date oldest = null; - for (Map.Entry entry : ongoingUploads.entrySet()) { - if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { - oldest = entry.getValue().getStarted(); - } - } - ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads"; - if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { - ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); - } - ongoingUploadsMessage += "."; - } - logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + - "(" + completed + "/" + required + "/" + total + ")" + - ongoingUploadsMessage); - } - } - } - private boolean processUri(String uri) { + private boolean processAnUri(String uri) { synchronized (roots) { for (FetchedPage root : roots) { FetchedPage foundChild = root.findUri(uri); @@ -556,134 +699,15 @@ public void receivedAllData(FcpConnection c, AllData ad) { " adding to the " + objectQueueSize + " elements in the queue " + "(max " + maxObjectQueueSize + ")."); page.didSucceed(); - int foundChildren = 0; - Object readObject; + UriProcessor uriProcessor = new UriProcessor() { + @Override + public boolean processUri(String uri) { + return processAnUri(uri); + } + }; + final InputStream inputStream = ad.getPayloadInputStream(); try { - readObject = new YamlReaderWriter().readObject(ad.getPayloadInputStream()); - Map map = ((LinkedHashMap) readObject); - if (map.containsKey("ttab") && - map.containsKey("utab") && - map.containsKey("totalPages")) { - Map map2 = (Map) map.get("ttab"); - if (map2.containsKey("entries")) { - Map entries = - (Map) map2.get("entries"); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (processUri(u)) { - foundChildren ++; - } - - } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + page.getURI(), e); - } - } - Map subnodes = - (Map) map2.get("subnodes"); - logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { - page.level, - subnodes.size(), - }); - for (String key : subnodes.keySet()) { - if (processUri(key)) { - foundChildren ++; - } - } - return; - } - } - if (map.containsKey("lkey") && - map.containsKey("rkey") && - map.containsKey("entries")) { - // Must separate map and array! - if (map.containsKey("subnodes")) { - throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + - page.getURI()); - } - if (map.get("entries") instanceof Map) { - Map entries = - (Map) map.get("entries"); - logger.log(Level.FINE, - "Contains from {1} to {2} (level {0}) with {3} entries.", - new Object[] { - page.level, - map.get("lkey"), - map.get("rkey"), - entries.size() - }); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (processUri(u)) { - foundChildren ++; - } - } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + " for " + page.getURI()); - } - } - return; - } - if (map.get("entries") instanceof ArrayList) { - // Assuming this is a list of TermPageEntries. - logger.log(Level.FINE, - "Contains from {1} to {2} (level {0}) with page entries.", - new Object[] { - page.level, - map.get("lkey"), - map.get("rkey") - }); - return; - } - } - Entry entry = map.entrySet().iterator().next(); - if (entry.getValue() instanceof Map) { - Map map2 = (Map) entry.getValue(); - if (map2.containsKey("node_min") - && map2.containsKey("size") - && map2.containsKey("entries")) { - logger.log(Level.FINER, "Starts with entry for {1} (level {0}). Searching for subnodes.", new Object[] { - page.level, - entry.getKey(), - }); - String first = null; - String last = null; - for (Entry contents : map.entrySet()) { - if (contents.getValue() instanceof Map) { - if (first == null) { - first = contents.getKey(); - } - last = contents.getKey(); - Map map3 = (Map) contents.getValue(); - if (map3.containsKey("subnodes")) { - Map subnodes = - (Map) map3.get("subnodes"); - logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { - page.level, - contents.getKey(), - subnodes.size(), - }); - - for (String key : subnodes.keySet()) { - if (processUri(key)) { - foundChildren ++; - } - } - } - continue; - } - throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + page.getURI()); - } - logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { - page.level, - first, - last, - }); - return; - } - } - logger.severe("Cannot understand contents: " + map); - System.exit(1); + readAndProcessYamlData(inputStream, uriProcessor, page.getURI(), page.level); } catch (IOException e) { logger.log(Level.SEVERE, "Cannot unpack.", e); e.printStackTrace(); @@ -693,14 +717,11 @@ public void receivedAllData(FcpConnection c, AllData ad) { cce.printStackTrace(); System.exit(1); } finally { - addFoundChildren(page.level, foundChildren); markDone(); successful ++; successfulBlocks += progressCompleted; successfulBytes += ad.getDataLength(); showProgress(); - logger.exiting(GetAdapter.class.toString(), - "receivedAllData added " + foundChildren + " to the queue."); } } From d128eb47e531b358628278bb60e38bf8323f50d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 15 May 2016 09:30:16 +0200 Subject: [PATCH 097/180] Change back to 10 simultaneous fetches. 30 doesn't seem quicker. --- uploader/src/freenet/library/uploader/DownloadAll.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 165aeddc..7857d1c5 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -52,7 +52,7 @@ * Class to download the entire index. */ public class DownloadAll { - private static final int PARALLEL_JOBS = 30; + private static final int PARALLEL_JOBS = 10; /** Logger. */ private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); From 267368c97033c8df0ff70a4a027c96f8dc343576 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 15 May 2016 09:13:29 +0200 Subject: [PATCH 098/180] Added a function to move all files connected from a CHK. --- .../freenet/library/uploader/DownloadAll.java | 69 ++++++++++++++++++- 1 file changed, 66 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 7857d1c5..22bfddf0 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -7,8 +7,9 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import java.text.MessageFormat; -import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -897,7 +898,7 @@ boolean isDone() { }; - public void doit() { + public void doDownload() { FcpSession session; try { session = new FcpSession("DownloaderFor" + uri); @@ -1102,9 +1103,71 @@ private void showProgress() { System.out.println("Editions: " + sb.toString()); } + + public void doMove() { + int count = 0; + File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new"); + if (!toDirectory.mkdir()) { + System.err.println("Could not create the directory " + toDirectory); + System.exit(1); + } + final FetchedPage fetchedPage = new FetchedPage(uri); + roots.add(fetchedPage); + objectQueue.add(fetchedPage); + while (objectQueue.size() > 0) { + FetchedPage page; + try { + page = objectQueue.take(); + } catch (InterruptedException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + System.exit(1); + return; + } + final FetchedPage finalPage = page; + FileInputStream inputStream; + try { + Files.createLink(Paths.get(toDirectory.getPath(), page.uri), Paths.get(page.uri)); + inputStream = new FileInputStream(page.uri); + count++; + System.out.println("Read file " + count + " in " + page.uri + " level " + page.level + " left: " + objectQueue.size()); + } catch (IOException e) { + System.out.println("Cannot find file " + page.uri); + e.printStackTrace(); + System.exit(1); + return; + } + try { + readAndProcessYamlData(inputStream, + new UriProcessor() { + Set seen = new HashSet(); + @Override + public boolean processUri(String uri) { + if (seen.contains(uri)) { + return false; + } + seen.add(uri); + objectQueue.offer(finalPage.newChild(uri)); + return true; + } + + }, page.getURI(), page.level); + } catch (IOException e) { + System.out.println("Cannot read file " + page.uri); + e.printStackTrace(); + System.exit(1); + return; + } + + } + } public static void main(String[] argv) { - new DownloadAll(argv[0]).doit(); + if (argv.length > 1 && argv[0].equals("--move")) { + new DownloadAll(argv[1]).doMove(); + } else { + new DownloadAll(argv[0]).doDownload(); + } } private int ongoingUploadsSize() { From 2a94540fc868c43cc6dd4462c0fa16022bcaad6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 10 Jun 2016 06:14:04 +0200 Subject: [PATCH 099/180] Fixed some exceptions. Adjusted logging. Added an overview of what logging is done on each level. --- .../freenet/library/uploader/DownloadAll.java | 48 +++++++++++++++---- 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 22bfddf0..e3bb5cf3 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1,6 +1,14 @@ /* */ +/* + * Log levels used: + * None/Warning: Serious events and small problems. + * FINE: Stats for fetches and overview of contents of fetched keys. Minor events. + * FINER: Queue additions, length, ETA, rotations. + * FINEST: Really minor events. + */ + package freenet.library.uploader; import java.io.File; @@ -245,6 +253,17 @@ boolean hasParent() { return !parents.isEmpty(); } + private FetchedPage[] getParents() { + // Even though parents and children are synchronized we + // encountered some ConcurrentModificationException when + // fetching them through iterators so we avoid that. + return parents.toArray(new FetchedPage[0]); + } + + private FetchedPage[] getChildren() { + return children.toArray(new FetchedPage[0]); + } + /** * fetchedPage is an ancestor, any number of levels, to this * page. @@ -256,8 +275,8 @@ public boolean hasParent(FetchedPage fetchedPage) { if (parents.contains(fetchedPage)) { return true; } - for (FetchedPage parent : parents) { - if (parent.hasParent(fetchedPage)) { + for (FetchedPage parent : getParents()) { + if (parent != null && parent.hasParent(fetchedPage)) { return true; } } @@ -266,7 +285,7 @@ public boolean hasParent(FetchedPage fetchedPage) { int getTreeSize() { int size = 1; - for (FetchedPage child : children) { + for (FetchedPage child : getChildren()) { size += child.getTreeSize(); } return size; @@ -286,7 +305,7 @@ void addPerLevel(Map result) { int getTreeSizeSucceeded() { int size = succeeded ? 1 : 0; - for (FetchedPage child : children) { + for (FetchedPage child : getChildren()) { size += child.getTreeSizeSucceeded(); } return size; @@ -294,7 +313,7 @@ int getTreeSizeSucceeded() { int getTreeSizeFailed() { int size = failed ? 1 : 0; - for (FetchedPage child : children) { + for (FetchedPage child : getChildren()) { size += child.getTreeSizeFailed(); } return size; @@ -313,7 +332,7 @@ public FetchedPage findUri(String u) { if (u.equals(uri)) { return this; } - for (FetchedPage child : children) { + for (FetchedPage child : getChildren()) { FetchedPage found = child.findUri(u); if (found != null) { return found; @@ -785,7 +804,7 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { "Will upload anyway."); wrongChkCounterForUpload++; } else { - logger.fine("Resurrecting " + chk); + logger.finest("Resurrecting " + chk); } } @@ -846,7 +865,7 @@ public void run() { connection.sendMessage(putter); in.close(); in = null; - } catch (IOException e) { + } catch (IOException | NullPointerException e) { e.printStackTrace(); logger.warning("Upload failed for " + file); } @@ -1000,7 +1019,7 @@ public void doDownload() { } rotated++; } - logger.finer("Rotated " + rotated + " (count to " + toRotate + ")."); + logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); if (taken == null) { break; } @@ -1184,7 +1203,7 @@ public void waitForSlot() { startCleanupThread(); synchronized (stillRunning) { try { - stillRunning.wait(TimeUnit.SECONDS.toMillis(1)); + stillRunning.wait(TimeUnit.SECONDS.toMillis(3)); while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { stillRunning.wait(); } @@ -1235,5 +1254,14 @@ public void run () { private synchronized void removeCleanupThread() { cleanupThread = null; + + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (GetAdapter ga : copy) { + ga.markDone(); + ga.forgetAboutThis(); + } } } From 8bd931cf9bffcb4f256a06646e2de2546a573fdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 16 Aug 2016 20:39:31 +0200 Subject: [PATCH 100/180] Factored out aging of running fetches. Fix of race condition. --- .../src/freenet/library/uploader/DownloadAll.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index e3bb5cf3..267d1d51 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -916,6 +916,15 @@ boolean isDone() { } }; + private void ageRunning() { + final HashSet> stillRunningCopy; + synchronized (stillRunning) { + stillRunningCopy = new HashSet>(stillRunning.entrySet()); + } + for (Entry entry : stillRunningCopy) { + entry.getValue().hasBeenWaiting(entry.getKey()); + } + } public void doDownload() { FcpSession session; @@ -966,10 +975,7 @@ public void doDownload() { boolean empty = true; do { - for (Entry entry : - new HashSet>(stillRunning.entrySet())) { - entry.getValue().hasBeenWaiting(entry.getKey()); - } + ageRunning(); synchronized (roots) { final int roots_size = roots.size(); if (roots_size > 1) { From 1cfdf5ba4d33f50725861e3a8f55410ba15a27b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 24 Aug 2016 08:12:48 +0200 Subject: [PATCH 101/180] Clean the title from not-printable characters. --- .../library/index/TermEntryReaderWriter.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/shared/src/freenet/library/index/TermEntryReaderWriter.java b/shared/src/freenet/library/index/TermEntryReaderWriter.java index 8152501d..08d06f89 100644 --- a/shared/src/freenet/library/index/TermEntryReaderWriter.java +++ b/shared/src/freenet/library/index/TermEntryReaderWriter.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.regex.Pattern; import java.io.ByteArrayInputStream; import java.io.InputStream; @@ -20,6 +21,8 @@ import java.io.IOException; import java.net.URLEncoder; +import org.yaml.snakeyaml.reader.ReaderException; + /** ** Reads and writes {@link TermEntry}s in binary form, for performance. ** @@ -151,6 +154,9 @@ public TermEntry readObject(DataInputStream dis) throws IOException { if (size < 0) { title = dis.readUTF(); size = ~size; + if (!isValid(title)) { + title = clean(title); + } } Map pos = new HashMap(size<<1); for (int i=0; i Date: Wed, 24 Aug 2016 08:13:14 +0200 Subject: [PATCH 102/180] Comment to make it easer to run the move job. --- uploader/src/freenet/library/uploader/DownloadAll.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 267d1d51..f88b0a05 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1129,6 +1129,11 @@ private void showProgress() { System.out.println("Editions: " + sb.toString()); } + /** + * 1. chdir to the directory with all the files. + * 2. Give parameters --move CHK/filename + * The CHK/filename is of the top file (in library.index.lastpushed.chk). + */ public void doMove() { int count = 0; File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new"); From fd2eb8cfd6a9985fec88779863cc417c8a682f06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 5 Nov 2016 13:27:15 +0100 Subject: [PATCH 103/180] Removed callback message. --- uploader/src/freenet/library/uploader/FcpArchiver.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 3ddae28c..c6e846c3 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -213,8 +213,6 @@ public void receivedSimpleProgress(FcpConnection c, } progressCompleted = sp.getSucceeded(); progressTotal = sp.getTotal(); - System.out.println("receivedSimpleProgess for " + token + ": " + - sp.getSucceeded() + "/" + sp.getTotal()); printLeft(); } From a17c1af77ee2e692774d6ab3d041503b9c76e172 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 2 Jul 2017 10:00:23 +0200 Subject: [PATCH 104/180] Added delay. --- loop.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/loop.sh b/loop.sh index 2d41782f..34203a5c 100755 --- a/loop.sh +++ b/loop.sh @@ -5,4 +5,5 @@ do # tail wrapper.log # ls -ltr library.index.* java -jar ../projects/freenet/github/plugin-Library/dist/uploader.jar + sleep 60 done From 6dec3a8d72e2e773c5611c509a20c9637d48234a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 2 Jul 2017 10:01:46 +0200 Subject: [PATCH 105/180] Added yaml sources location. --- shared/.classpath | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared/.classpath b/shared/.classpath index 76c9471b..d3344ef5 100644 --- a/shared/.classpath +++ b/shared/.classpath @@ -5,6 +5,6 @@ - + From 3672757e9b7e7a55afcbed504b8540d1e72aa270 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 19 Dec 2015 13:31:37 +0100 Subject: [PATCH 106/180] Partial work done to the restore the FreenetURI... --- lib/snakeyaml-1.5-sources.jar | Bin 131686 -> 0 bytes .../library/index/TermEntryReaderWriter.java | 11 +- .../freenet/library/index/TermIndexEntry.java | 10 +- .../freenet/library/index/TermPageEntry.java | 14 +- shared/src/freenet/library/io/FreenetURI.java | 97 ++++++++++++++ .../freenet/library/io/YamlReaderWriter.java | 13 +- .../freenet/library/index/TermEntryTest.java | 7 +- src/plugins/Library/Library.java | 41 +++--- .../Library/index/xml/LibrarianHandler.java | 5 +- src/plugins/Library/index/xml/XMLIndex.java | 8 +- .../Library/ui/ResultNodeGenerator.java | 24 ++-- .../freenet/library/io/FreenetURIForTest.java | 15 +++ test/plugins/Library/Tester.java | 15 ++- test/plugins/Library/index/BIndexTest.java | 6 +- test/plugins/Library/index/TermEntryTest.java | 121 ++++++++++++++++++ test/plugins/Library/util/Generators.java | 4 +- 16 files changed, 320 insertions(+), 71 deletions(-) delete mode 100644 lib/snakeyaml-1.5-sources.jar create mode 100644 shared/src/freenet/library/io/FreenetURI.java create mode 100644 test/freenet/library/io/FreenetURIForTest.java create mode 100644 test/plugins/Library/index/TermEntryTest.java diff --git a/lib/snakeyaml-1.5-sources.jar b/lib/snakeyaml-1.5-sources.jar deleted file mode 100644 index aa6736db099d8983a0b0c8e172f0ffebd9d1737f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 131686 zcma&O1yo$wwly4rySux)ySuwvpm28y?(XjH7ThhkOK^fqaEAoRU+H__yPf{}_WNrL zsA6#T*n6LvOXiwuA0=5(Fqn70US<&nynlT7_b+JRyS%uXFuk;b1f$ZwFN1mq0A6<8 zKCW*F{I?zO2kp<7$qOq;ONgtgG002Y%1=(n$PX92P z@b5>P*xNa~IJy31Fr>-vQ{(-&!L|(PQQ=)>t?b4envZcQ^23^ z(O;+gFT(&%PQTrpzkY}RWtC0{PB%XrXiYshn zCY+1Uis~jq`Zyj=?d?q}_0P@ifZ+8@9pVlprGPM=42~H&{0L>}9aQ=Z!-+I5S6frS z+OvZU6T^%~A^~m|!ZfKZ*6MYL^(J1@j^?n7*w+Zr?Nzxf=!}J{Ei}wQR2(2e8sGpg zn@h4?2IUN@Cq&lBcOWMQMb1f!Ssr?gKPn1Of*G2?Pa2o~HC12MsFd7pWIX*Pj)-bv z-aNL6Q5~f?r2)Zixad^c<25wA)y|znyheCuB#>6;;R7fgr5Um=(pGW`5qiBC@f5aY zMp`5;L!!y7lch}Q?9OLB+#$@6)x_Ez>C;P`cn>)td#nMjv;G4nys#0317E(M&R(2h z&pS38*^?v7#B%0LnF_VTx7RKr_Jl@Up8mZfNmV97D6#K%%Jf#*k(BMkhuTz_#5PD8 zlF_MiOg^Xa6@G~*Hi8x*SuO{ohjCKFUUu_lSB}P47-zLhciehNJrn^CgCu)Q3L)u# zg`ZF{gnbBph;Z7y@(-5=Y$w!Y%#;XXcp8fo#RJT}lkRfLC*zgNYi|Xb2$3m^B1feT z;&St53~rPWR(etS5}8|!4V-3K$OB%qU$>>eZCxYSVPZLwUkw#*h(_2|7>JR5*+jn=Z!b#{ufie}bi{Xx>HTy@m?pGm` z7W@moZJQlv1l_n6lTxeTSn`1V{pf&a9!X50QaSJ;cHYj zj`(HwoEm~o37jc8nLT7ExATs%v0aa_YrE=N^Ae${Is*sD8tb)vGB#};Z}zQjA3e#o z47&}S#yZ9cfls`iZ@FHv!pPS7^2JlHFP^gY>zDkpAKb@Mz8REPCZBNy`sr=1u<1LM zOer+C1qpTb7_XxOVxyzVKgAUn4(+RaUgX#53hmwUwIpOHmw+Bw@_4RcZ6OilrDh93 zQTG|4|Hw+HhKOmL&{_I%M$hnzK^>9{fw&^p;}6~UGn=&@>I=@MElt&fp0~b@-qRbE z>*qOyg|MDY7 zNPIb1@87*k2YvUB{_p+BzoWFO3otg=irblr|BBjwxD(q%W&1TIw2-Y^+S(hiaEfxp z_2WuFvgMpbI+XrUku*a;NkgM`nI+qO?LFO4c6EPVEAe^Q0PqkynqvsV#a_@CrxLD8 z*dt+?t&lV3-1XYyyEfq5v??~nVNhECweLd?-{*CYoU0RDu`vYS&trpV@u+*wSIrd5 zASNKO-h5Z;}ddcx8-Pvgr z)U({Y_W~LV-`JITYZQUz+pA|6?2ylvNlC=5249WW8S96}Biw`KGBIdvYOdNvlt`D!||sAUq{9!Rv$$LV&$7C#p@e^RW8Ps)6{*baNd*}2&t1xlAOp~MR7 z1C%hFQxJk9;tf`NAU^9VCGE)#l0@LP?LA1=Z!3- zFDK8g`rg&-XVNL(Jt4omO}X%8gd~};Y}$8Va$CW}{8ZCZ?o$S>CR1gO8nv3XiigoE zjbDmxDzzSh$6SSKl9Qi8)5#3BF7i;z$jP>)Q`ujCH;#k44Mr>{3fTj`!-l7w81`*< zOE9Eiv-V;3JosE>OMzSdQ$}~ zvO8Vzr^aI0XwPvI7`zos@G z&TRbr!UxgHISqK^e_RFNnh>KO=+4$#*t%m>h10;$72?#d2xYU5uYB$Z>IGfrEZMa- zqmtLF$E(Jl4W(YdenxyRK96e;k|Q;7a={Yqb~R|QAn>5(Vb9gJqUkeu=M8;bYPD?O z-Xv*0OvbC$ zCp4f#CWd_Xj_w~VM%vcF#?r*nMa|X02JnYZ9#UIZSQkR_k=!MO(5!qPjCH-38YR_U zu3;JIKqaD1xb*N@+xW7h^AxM7n zNPut`JmmAU{loDM#xRMpmME2Tds76%_s9_{HVUOkF!Xfrfi@%N&9zY^fueam7%r^y z!9EHP+tGCa7p!JFVsmyV%lt(tBE7+EbEqdQ6JvkaaDahnb_7=9o!WAL7F6S~er$nju2A zWNORL%ONz78q>u;bDE$_fzzo=F_Y5Y$~X>+^MKJGm41N1R~Tqk=A)5U?1-hj@68q3 zWd`}m6-;!u{mKP5P$1e4Yerbw%G!pzwGmC(xJ?rL4L)0*@~EFKUE%l~WjM+E2}^D| z%%2PoePBdnl^~p|weZ6{6&A^Z)FePu1{3R+JFbE^A(c*C>HsArzcgPc#o2bGD&oVg z152*+VD$5Yu61p4Z@aLQpcWO%N3}*iT+4ofS^gcAkU7(sDfr&z{pfdi%jvl3=5)OBNhf{t0BwBlYz0Tru*=Ik)4+nZ{$H<%L_>XUPsi)5|VQ;{zHV;h2Cf>`EXVV z1t=X$bAT}Y$1*gXkX>1osJHMy0IjfoZx|yWo@UurC2J}91ReYCD)cmKGwnP!ME`*g zi~SSfBUE*uzI>!RlCpg*E_T{QzibI{qU2v0i1H4X|S+!V-Pe;0r3zx zF~R-pXC4v$f-2SU4j>iPCyA^HBuyJ=c6lJ=^;n9ap&K|72hkXdcP+S>?^!`Dz0rpk z{eq3xTR@zHK`Ug@|yw6oHyS{Sp`#_X;S%kl9lksa*4cZz|&Dg@j89OSNyLfHfU9m8Ir zw%2A`_wEU+hU41j{i$S|Zr#_BERXKORz#$l-0c~U`>&8>BNl`&%CuGf7EY%WX44&D zjv~gC-ZCA-$(J57^I~7?@rcvB)dJCkV=8yo+|*I~CkK+dc0;i_?+EqXiX0?31a3Sjqg5ikeaDvB$3NVqksHxk}*CYP;Sqj;5V4B)TZktDtkKpW!6|C- z@X>#lN8T>a&AWZBy{g0yh-`!6F~!8Zd!E?j_kJGvYR9Nh-$|SPp`rkyJqP0myiGn9 zg+pxQfa2h=8l@17eQ$i)wuCNWm)C(kgx`;UNtpS{fI|Bq_A)hCHQMHG{r%ABd)S!4 zWZrF8X~8Raa1v!tzF)s7#nh8-^4U{eMi1$&%=@mR!gQH+=)D$msL z)$J2$Yy=~5Q+lz*iZpoEnFQcFT+3Nqcwxs!i){v;v1Wre-?jWj7N{J_+^w#oKI-JH z&`*=RAig)G|JnGC)K%5rQW5%y$pORaWY7P>O+HV4h!qco3ci$MnDK=qg)`;#UG96g za|GXtAMkuzEiT{Yifrp~>lsICJCMt!(B;+NvS0s_Stjo7hvES<%VMB4GykI{i`v`R z0Lz7z_ICddfdQtOAzR7G97V-bHHDOZG#gX&KZqQz>*xTS#-Ih6xfv-5Q;y|Vv!*l6Je6S7jUkohl18q zcco6>hpMSBd&`|0awiy>6D>(i$es(x)c9V4l#XAp>HMkX4F$}0{YN$ALkdHnQ+%co zZVjj7r*@L#>)9Z>Ijq^IR^&_=1zsa|VaHSyt1%rV9Ib+chia$U8Vrrm4kU`+2b^So^E<83oA&VJVEsqKG}gtMa=O(=~-)n7{jDdnEALs@A3xg!Aq4&FgW#BuxYbZ3sEhi zT1le|$8?|!yM943I<%2dhMutb`At;+qOh+heP%i=@PSt&r6=Jm`V}ruNvH9Kk3@Ct zO|!6h{*8aZJP~{_?#C3;3w%)0@XM*G%`XD8Jgt!}e$~kv`nW!J&h!=Rtfj6&CG#eg z4M<=>8l)?V`czIp_Aorgs2ZASWcL-C5>|%`RMIg3W=2}w9=XqD!wZB5s$uZMEVi3A zAg`Hl8(c+uOai3^7G;?k4^CnS}!}#k0)uwJX}s#`G8C!tZdPS7cbpbHp>K8UWt+>AG~y`kpn+>t=`%9P`! z=&t=f+Q=C(|D9M!PZeTV`Mo1cs68-Xs4CvzG5XQvre5Lk3rO4o_+*W)p<1K9>vu(C zI!E+~vKjWzc=&RHqm2|J^r-FgB4w;~MrfPs2YQehn|Mpt4yf6?(9~pfX>g0!JxWRN z1j1A-PAh(i=$l+X>sYLAd%i1RG^E0dnBtuwm4K|FiR!~{9<&SdW;GyKoN`-EVL_=Aogzo6*UKa_P?%eOQP^(!L?%u4TU4lB7nMFT5pA9T*uI zk>tshd!ODX$KHMTWqN?C*Z;JtN-^#378^y|;*40enFncF7?V%Zvff_HrCP{1yQw;?nJ{=R2_MQa!-Dk7i&rg>OY!B|w$hbG_*C%K^HR)J*~bdb69Du;<&yJs2}xB46ZWl^ zha7Xth{f-^@7jmSq7&ifZNS{6Q_TuUJ!LorHR5nRWR^}%+og}NtCyH`=`R=LJ9OsH zdC4ZspE4LOMTMNAPqt-?^dZTTcsU!cmH^?GWXUH~V{GZSiARf>)jAr)YA60WDw4DL zwe60o>}2&0Y=O%gET=zz%%;(zL7Luvy^fz#p0Ps49(y=e^boR~VuO;(n1AR+s!53@ zNdJmTC%|yRDY|Qqb;g!zC^;yV^>uHPi?`?Ky6%nJQ!Cmo?lLQV^9O--$(X(L$l8Q} zj7=+>@|wUH*nVJwJ~lj_U&AtjSYSIqr^LNrn?2Bs?IUCBb@Di-^otP7a|a$MadFx%kH-oC3wo>< zU711X8EYi)D4EaMC1!>1ZH0;SIf*ln`ij7t3I|u)$i6je1IZ=h~7x9d^ER zlczckV-|BH>~cl^`wQB1~QR6Ew9J6G!*WQlG>4C3M;p-LXn**Bbz`{>=h*8D4~=;R^qDOOy{s; z!IB}1vWV9`F#!(aWplfBT|ehbe>2%CQ{AAw1A)G(TsJb_MEpd`F~BHo_7+2B5Qo^! zCV?xF!9Ez+;wPv({Y|cKeEX(rOTD8Ux2wvDe3|7*SumJ$`8CQ-0QR$Zl2EBEq;P%isCIch_vTyhc&VHq__i<4277(j;7jM5<3VOKmFt_j zK3~1r4N#4E-^xr@N-jg`+w~!MnL8Z@=U_kk+`Ms6W%phmfzL@B_2sfNJ~FTvwn(%s z9P#tEtd>aaG4vT}mJ8ENt?!3m7}d&5zE*2`}Fu#pi0yus#QRrTQkGTy+(g@qZ)ms@a>6jZnP6PZh_ zqx@DDQ2VRU);|kuL_%3jjU{-m7$L*^LNhEID_vs-^1(|3_IJ z7J&`jm|IBR1J`gS$;Ee|a3qaDEn@iHPM!n`u7N9&6-G(~x-7joY^D%e@JI7z)Xl^u z6?9c5FI!#VV+$usx-oSVL>BL)(dkE!>`bL?N$SrPh+Bnh3T3hT%m!~-d~`d^9lddh z06s-yN5ti*eF}r&s9?n{Vwv1+^Xv^d5N_t$pc!^U*NG#q_b} z$(2kR9AzRjudw13&33M3#SkF@ruH$@Eb!;s1mX%9~ zu<^`{qXriF2T`bm(rty=RO20{a?Q}8JlGYaTE>@#64&KDeO}%6l%2W@>WnR~s?oBw3Z1igL$V9RQ*f~q7;}##Dk$1fQk?6P>#4Wx%%Ihfnl9rH3Zvoq=pIHM)mCdj$qy zGJI8U%UTAdFA98byJ$l3>mxV5LY#>~7fMn{QGu(eEL4WbnzqFt%CZv(A?`V{{iBus#Eoffd#Im%Y zWDIRy-LtDtxY~cpMm4h1_b~@LW!~1ePvE8Z_VVrDT2Lh~ePmIHGEeB~>e(PVPzxes zmB%NmR8g#Uo==gI#u?8~z_zTH3@jf`E;BZV>6OAQAk%|n#o`VSwE4c*PCHitL31WV z0~V;d#|IpZYV0I46Dj+7Wz= zFH$=#Ra&aHPjawiI%g!TR8Bdc$QzjHLLS|zKIor8s~Rbz)%05D_rnP?gkb%nlk3WH z{$M3s^k>w2S}zjIsg2uoIm>6mh$R`w$8tqF*crt8DB~yG;G)u(|A;|c5kN60?n3kJ z+_MMzy5$ncrCw9Js(*<=&f7pSh_WHLNvwsoA+uhF&Z^TT`K*XM1Di_{h}+zS_@`En zeUJXOm73Dok?e~@X39Y(xxw66hG6?lPe#vsgYLiN169h%hr~dXl><@6@Q+SW-q^vx z($4%ZG-}T7mn?KdTkG)~U621o*N@X-8{Ts?%2Q?jxa_2Gk!ij5xBYl=&3%;Vz;bQ$ z9#-952Tc-|R3LPH^=uQq`>QA!YYcvIrHc6g_pyvaG6buIX(-Mwy%D?2cqL;1aZ!Q_ z`j_5_@GrfQisy^a5KsI#P;cZ44$w=6dYWu#AtG~7|59Rq!Q@57jE);Wy2|G*fd(l3m$NT+d z05MkozCg)*F%j?Id_BHzaxS?LC3W>}L9k~-xImIQ4-XL>-=&%clynvP89P^5hxL(9 zrL^glQOvp|h<$-FFeEMN$5{LmFN9>wcS%1BAYb?R#n;82y4NL7Q~dm}FGbw8ysMK- zyU69M3?9TkIOoUCpC?s=8L&iJ%j;9j$Y#*zNJY*SPQtyC0U|ck-&TJdcwSh+llh^? zX1g$@TArGz9xsa>3^RWjxJnP*#n%ThWWFW1+3oh|{fmwQn{HY`0t8#-|LxNLO^yD7 zwtl-`r2Q71G|y%Ngtn|jp-5#!T^U~wyDTBj8tZDjUe-JfN$p^ZH%j`T>7eg|#r|uL zCo9-O1*EFhNe8Jm9)0uzwBF3oW$n{Ffk+CnTl}$%AoH~FGK)9>SRDdL8O1>v)2kSS zj>7p8VOOB*Jv(FZ_E|v1P{2ygeS@Hx3z)~i37RB^-~S}mcZ6mQt0G)k)sZYx0#(yA z@|GJg?Beg zQ5vyQ%*6l7oHPPfz~xsMjs*RmFpSr@7GWo?QxS=cm9wsa1VkH-?9J>7XKuI5{x~DZ zS*8*b3~G2H@<)UustoSL5sOFH+xnmphferC{U#Zs#*hPwo+!6Dio*p=s6GCVQM;Yk zM7+a~+s^W}?^CIBveNID$iwZ)_J7a;!|=FXsouZB@DFHdM{y-m_4&419E^1COcjlJ zn03D&Weo1h+YM8cBZCspIps22RE`6_VT$F|+8zY73#LHMKEJ;O%fn2^g?K_axN8rF z2Ynk?RPgUO?3vjAXh-<9PO|mro&u@WCnkmBLHD}l1dvD00eL~Cn1@1MTw>ITV#jr} zU8C0GM6)UxMZ@}%mP}sCaIbircGSL#qxCk8$9w%p=w&^gnLJp6rW^#q4W7X8ogs77 z-@?r*J(Qae2(=}k(9QafzU@Ed$iGBUSG75(EFL7EN6oXRF==YkAB$hvauV5cSme)n z`@3`aV6;qPgbNd)06edcowR)osA-T$YekLbGYn!!N32|7xSMwpsk)F+FK}P=^tycH zTiubW28<9ZYVS#Ru)HXp)i7W!)B_+kFo^BM6ylgs`ur@a7h%2zW7Zq>eT18uVaO<4 zre7?CEWvNWh!E9T8HEAyJ6Pe0hjG&9OhKcOJT6}U!AB}Piib!VHl0n>mJbTw4J;KA zZBS*_)i_THDlfX~Q*~`CWwBI3l*e2oW~~Ffo-@Vb!g{J;YQTd8buG)ct0Cv&%k@mj zno;mAKnvNVn9Hqi2MMMaR(E&Y0SrV1q$pU}xgDhTJODxq-LnwvJ%W#7Y=R+!S~IQX85>t|DC zCQR`j5*G~`EUH_itP~jTkkXhlnzO2u#_&QjOJrSTG4Te959a$ek-c^M*Yr<)`DkOg z;LX?Cn8b_yx-R&6>*A0w^ypdY;}!eTTKraIYI7RDlnv_%Rd}=)krSjR`Dg>KzIr+< zmFqy8E{3cdk6K7b34_DRcmBP8tLTqtu^*FV)Z$eb$CxO`WeHQo`_*=7e^T1c(_HIz({lrKb!E zHC>an`ADZ|axOiRIZ`eZlnmn*@f7q8iP@)$Ac7RRJFvP7>poV=I~eI-iCDhlg9=ja zg5mq_@*RtPMAATp8<%$|&DJs%Q+1rO=iAfj0m1mslK9PDFRLS8zpdFSbGr-V=Q}WN zhmbwL*e>i-hPfK?b}lsQ*=C9JkZc}4Z1J)rb*bstSJ;Cdbxu4%sj$OD3HSao^ zJ^`6TQ&H?fy1*CAtWP!H2L*@Dcu@sJy8}J@E)1tE$GWd@Rf&BcJ=Oipe*UF*2@G7V z{#WG-7f7&}{?Xq4olE@1*lbn*NLhW4G_!A#CD1DWDE^lMi~kg=QkwU30YO^K|H>w| z+}*@7Y)IMds_X4MfR;x60u)QEH25|K;ErN@EK(=@+XZE9jaaB7$(K+J*Yk@Ni-E zL~2hY94MTFW7l))SnI`N_dK_ibSI#usR{~Izhgj6A{1_}X|I2b*sDDyIRuxK(QM!n zk?)c+zXfpglFf(ZrxNjz?Uuwd!dQW2vvj zTDq`-b86h$T5ox}B}(P=g2!9S4N#2{O-VSUnOb7-B$m0{n%uVql$mxbq-MF~_FKU> zez_6YEUqHY=@I?(F*NVnjQ|+gNI60e+nTEoOS|(gI}3eM5UGewlndf!^i-%Bs|28n zlpvIoyMn==Wa4tFi@t}IB=wl0Z+u$_>(MLVBFcJ<4U3NT(uf+BYLV<61eLKa6&4y# zZuuEvQR8ygcb98)eO>6wO7<4ouVC`sl`XpI8~@js#yxL>pC@@qVME6cPdyQY%~>f8`!kz<@ygkKh!xu{3u6 z6a82DmsD^|E0O(oS%(o|rKB?oI`U?V7go!4@xoYetIOxs>}7Q_0K3|BlR6g@f?^EK z4l@dolg%ess2GMf96&x3+&BxbsAM5mfJ{xp1TLb~7@T?yKU&}_Ala4;YzFo{C8+B=QJ@5;W)=Q4H@&7CG2V}t90X!Azf{!? z9?VQ)&JAsNG4Erqt!~_XAb*8ZFJAuIR&bRoTAjVr9o8M$FJ<}7uB|XQ4!=HAjQmVW zMRqArt z*Ge|C^Y;8;i8#}5hbi{#fIsL4S!H{NbTMcvR%VYR2Kj_6f#?jZ#lH2Y>8!#@QMB59 zSkuKM?MaFs6HcsU-oaL+Vo%_tlN>$3PRWnXmr!@Kt*VqZva*yha2%BLyH_-LAl)L) zQcMkte&Eqa$;CJeenps}iZG>>{yrz@D zeq!|o_0i8QY};|~E8=O+%*8VvZN;d)@OtGh*B`5a=!hFK$grW*PMrpMoQpv6LB_CdO_+pqKxspnqRI-yaCFd#L}Kb^LZ-LLT5^VQ;GBWbXiQa`F75XQ)Pf%YK~$$@kIF z@k;h!F(pXA79K`Tc796h17_+15!Qo{ta(O^m=zg7UH7&3QXx^F+eyU;i_j*i>-lNZ zU67BP7q<^v@1%=`WtHoy5V?zA@aH#ppBK1*NO%pT6FK{_Sb2NX;PDS|A(3qKDf z;m1|o*n{S58dl);ug+I1Mk@yJl9+JH-`=w*2h1bDLlO{1dcvq#+z8DP(ZDD34N)t!|Rxxi`hKpr!5)F3H7i&$JAUIUn z(`Y^P3#mqa@yIS~p-O1*L84$wu3*DJ2b@^!ecz53Ti@2;XQKuvvx#r6P`BWE)8?8B z2QC6do03pwh*!@pZtM_bL&x|pnYgk37BU*BdpaFR*Wm*?oXIw^33?OZBC68FD8XX( zb5xN~PHY)X+3K=jCZW;*iX4Y~Wkv>H3l^dbLhP{M2ik;^9`#-1Q$rQ*qVM8mQ$t2V zFHMeD;x-|@A_dhy$1J=S)io_hNpIJxt%t~bf=-i(UK9ty;qY<3a}H0?j9}V4FNY*m z_=#$BIew(QK`kThxGYu_3mdlB69jAJTy_;tk}}Q1q^%?fR}q$O7jK=r7LXZF5k=SK z!h2!eE}V+lfRW@*lqsHF4u;Dl4XiuQqKxhD#(a@D7wxhUmi+oQtT;O*rEbp_da+6n z?RqD%F?YVp@$i^>)gu~X?+)LO=uAURYH~KeX@wmmel(?NNF~0w_`)9Q)`@NzKsvzL z(d3}iG}tw9nVX4(3(|%BWmQA4uvO9~-*+-R=y^|(sEx=eM9Pyey?9iK`xLld>#H`m z7oM7)CXi<7LJx_LY&^FH@fZFLhFd+#&TocNYVAKkDsa~Xx@y0_7ypQq_Rw2cSQ~|L z3{!*yml?ctygd=EDkl^axH|K0LudlMn=`8!pd zyjKq$GWFtbsB!&KJ(RC*g4JJIWE0C?7gE+TPVP)*4j;-Lc@i#(P#D(yAG;B~rE7T6 z(NZ1Q-gocl>j%BqOM}vzjVEvXKs;vRG=Se>zcZg5d+7K`(@D5$;}>MK!g4aq!BECA31(AXbDSz&F}Ucq6R$X0aWykG?BQu^-s|F zZQ&s%Pe(ASWg~g?mEm})IQGh2$rKCsyhMLKQ>|O(#MSbUVMy}}2t?xG;kj(TQ*hP5 z$)JPkDu*1Al)Oazegq0e!=WWop5`^=$Z)43QBF4;Uxfuf`<{4I?6Y4?8KU5af%_PzMieU65 zIM5Dhfa+}EhzKvl2hxc*(>G^BZil0-2Vr<&&J!xhnky%IMq**kbg~l`YS{hCmQCOk z^wWoINJ7><8l%kliU>z?{qQzn<63JOjzOtuz~_<*GE8WZLQM-aO)%`tlBU|KROn#@ ziWFJtK$-;weTKRGK^8$SJd8EQA(s0Z6hti_X>p0p=;EpR1!Lbeq>ZSQD}Ar6Z9YJm zA2D-GSlx47FtRKe+YRFI0?Xjf&r280eu&gSN#!TQldq?NzF2L{UgO7M`7kQ4lfb~1I&pKa#75tZj&V1{2z^%Ldex+FAlso; zw*g@;d@WVo5k!@?UFOR;JmIo-&TGZ=tK|ScJIgRfQc5bb7MVRur+JA-<%S_)=DAEU z_XF1I@xVYM2Yo6#eptPLPM#;PYZA$-!q+8#xz)9HK)h3tfttm9i>XJTlv2~X%MgZ# z7NyESA!86+{>Ki^%1p;xD`auIU@QL4D})xF{?yv6H6B41&a36q&9(DDMH(y{RUcKd z<^8h#q@FHSTqTzstC%nw=lBrVWzRPT-FT!A@l$qZ&N;E$pVg}_wXd+C6{+Wl7 zg<36INm|FN5Q5^0h!Z|MH)-3q9~E4XzJ*swNnI zAD(>S95Jl@NUd7vQeb~?37d_#K@gReHr-)0Cn~xHC<*&0`@DiDfJ{2&q5lM2eIS2C zx|p2wh@3ydt|;l%p3}iYM3^$#2+wuf8Y`x#N3xw@rbn^P%mLrPAT(*!1FioKC3Ioz zy9Y8)5!}pok%p| zFSKlt^1=ZWX-o8VfQX2ZF;fQVc)!Pi%rcy_OUDg&gh)_Cs|51^^<9M595vG>QgU05 zptH*c8Cxj3AsH|QQ6q;0NAC15;6Gvuvu{!S)1A#oD8gC$A^dQrew06OFiVQ3a=IHp z?O~j{s0sx6(M8q`jHVEAFrwVkE;%` zE$`6Hf8|8>!d1R-A{5&;+g@nNZP|xtTV(27Mw|gVE0jsl3*V%4fv8|Rp>N}`;Cy+G z3xYPG5kJfoO-dCwv%B;JY6Nz9^)Z^-tuM{hhNWFw(Q;SpO!KE%nsWphUxi09zn%WmWyyW^_MK~Cu=TR!wkiT{e1w_UotdpMDjiriB7v=I z%zV*#%lZ$>150F4OLFmd5edq2npZ_;KlU{#68$Cj7jaQAq4C%c?ii4E`&d5}L84qI zn?i#|6A@<-nFjk9G;om%i)nns39x&mLw$aU<|4AV6ml;}6T8dUSs$81uKz49^CoIoo>n4!++E_xy`i_u zyut_=z&+~4)Gros*y$$h&WSA%2T6LzKN|CT1O|2$nn>BtkrvAm^+beOD18=mWOQ}m zUzJn!@;K#hAkBT=_AW>Ksv}8qCN6`%pJtS%nCNWX267;Cq}}L^Ph=qpcMsGJ)`AS< zg z8-#aa5T+^WO*cMt1iKDiAuTqSx5C?~N2Oz;uN|ck5-~4_`5my$dPu%$pkrAFuL0==Um@(Lx)^kJMC#1az820fq*YMnRKhZh5*1k%HhVbo>& zp&hPRrdco+mE5kr3YwmVaku&~&2nLr&xb3zE^VY7hyxWZz zIPMGIi@c{iM=kd0F9?T?a+VjqPcaT{Z(5w}9UmJrFH@}7ONeL73j1h{!XU79BYzf)wd9|&=*lBVoZ?RK#>CAq#X-!pE?l^rFQfXg&Hg6 z>=&cz5USM^jt)?oxQElRvI4oiBr1R|h3`+PH!?~e7Ba&=SEGl|9NzC6F%+pQ(8@4y zh62~~n|YElLD#8}WD~xeHw1&h;y?={+75pOe;AA{R!H|wSo`+4BN-i@%k6U0ApSFz z25Mk0)XbRR`<&`Hwc?U4W=aZxMeTGvV8UsBuv$~UO>NFwoRlD7H6%T!PdGg%I@JIE zU2u9QV}ZuBRj_b!Wg=LqVwSTk6$(KRJKKEPzKvE6=f+w%G6!O1W63G1An;g-KmY}P z(2*8&$`bn~|512&iAah(hdj)O1!MLwVHTERy#^kDV<`p$ z59nCHSzh{Ce}Ud$Pl|tLoaEb-X`r0QPPitNP64exm8fv4Q%o@l!kA{sioYaLORi0E zQ}=-|mOb(;MP>?N=&gQ;BR_x-shSodw3WG>s9NWs0{#Nt;NVpPG+H?-p zrrS<@ngf*+z&NNn^`?tnKpMU?i&SWrqKG>cE8bap)M|x^<4XU%>w4&OI_J`C!`@V+ z%@r{x+A=ju$sM%XR%D%6QM1MtAa2ciu6(Ro9NMFRNcTQaa`A)8>eot~sPeVKB5D!2 z$1kH9-a4$yP~`yTxhbOud`Ly3!nQ)koUme?80HKwg*t*>*v@kCQw{Vb1@44|dHNu; zS2~*H9^#YQF>l@spUg?Skw)?71e#^}+9Z1sH|@ZkG$pDwY!@#UU2P*?N6~6$;h99-ATpM9*pty+xtL8&B?7=wQ;%p@b{a(4EE}rBEoI@ALUH8E7`%99H0tys;;vXr6YW z;cBT@0Cde;P5y=WBS`Ql09hWjT$G8g?`yJ6Us*kuOyw9R%E4;Ae5d#rV8Jd4l{LY| zQbnXAC9}3bxN(j4EAIyTh>fZ~rAP^NGiT95%i6_bYctE5ZC$Uu$j`Z`XNf9Rr8x7@Dv58%zubjm`$n{lOI6vRhRoN;G!tJX|dGoXQ_*M)ki zUJt{0GiZ#rz8>X?s{9dK(>YnRG)+a&Lk^MJ>P%(o$~+iJ9WI87?SB^|qimmR-6Sd1 z_%ScuBynr$zJO5&cyb)Kr%$(W=7!d3Ei>|FEmP8ym|D|SqMQXri)W%kBRci=`K}OP zwqfRWT(mb|Pv06dMbSEuK_;x}P(`Ahbu2+u*Scc3Sv|(pw4rY^c-P7lLzQaQld{t_ z_j8=0MFM{SGZf64C-9oV;Gm=Bu(!XW1G(Jam;1+Gy3R(`#e%I=2Oo^?6=Sv?3hsxKcnIK`&gro_{^KuyV`CT%=X(^yS^tOYhd} zr5qE1H&bYdTFoKwG&d7&|3{uz2hq4Vq)0sz*of-=2p0vf5mqz^=W_AbU)>dIbVZte zECG4t@M-T4-_Sx8n2zHhib6!2K7~d$6BUQ75iz#~fHERqaf<>1U`I_$WTwSJXC1uH zPVUgQmVq9b0R=aff)rZjdWR1KOsge&chiGLG$sHJ>mh z-(Xj?EroZMAJic8Qrz(44*PWY$LKP4t? zHffOiDp1zH!2p|YE)*<%KcrSW0n5t9oVs18hJ$uJ$EeeN2<$FDq}_Q)i0&p{UEk~= z4BuwoB#sz(1SJjY!ed#wK3g<3WBgR^9KWnIw?EOm4DO&($F^i2;wryLL&hP1c z?0)v!dmsINyZ(T+uDRx%W1ORoKI8EG1L0|R={KKYh6y!wV={DMYP1|oW-np~3khLH zJm2()gSQyT22-7E&@Obt$`QHNJpHJnxXh?OWSYs;kLbEuXj-&gZ0S3f`aL#TtX2Ew zg=0+y=DQbbj|0xgKdvxf;2-R6oG2AlGcR;0G`VrZS(6kc+B;mBQMY5(Df_?Y8>7Zo z2UU^QP(G8Z)04&O{L*y&|8VMvz#`EUa1xzG+Db7%=|MqQ(MT3>2GZuzONbpgH5UXD zXC=#$zsQ$sb5x3fO>V*i*fSIQOe9Jgfj#&$V z2*fv5j^Cd?KAfTaeG8Cd5KQ8nAHPR71Tb@j`Ot$|0vS0rQOGu*nngE@r5{U>Y1IW! zdw^7!MkO*}X&t!9ndBo=r)xSz=%)|?BCR#NC(+g4iemNMXI_9NyraB(XKqt#e$-E# z(9idf3zdjvl<8*U;rY9_DILIe6*-i@(Bwi1|F-L6qTFnbs?zN#5(gYg(xiRTvSRE# zrJDbyyDYU{dOryg3C;M4u{bPT*_&uWBGA~A&1ZHWGDToN!azeI#;`Y?XbkFYZ38Y7`wkftRC(IV~JOsgFAT3pd znM?a#1yke)kS+l#vRYC(5Gk9~81M8di~@llznJ58{o1e6_HDwU$`}2Sr{g zT|YfKt+LrSiG&|J{*+^!(=+Vvw7=9JbY0rAq+`^=yg9qvv5twIWZ3ISSv$ZDBY38SjRaN>^$icW6d+V=g+DZ*dJvllmQ=bl z99IIylxpUV4+dkf@-$$lLyM1M;qp)fD4{xgNo?)-30iDklC5dhTWsvku^=;?p$C#! z_hUvJ(x09aaqw$xyZtw-*$JFS;bagrXtv4q>@+Rll*9neUO+uXFfkVX;biRIZR3~z zSXvQ^(t5C(mI8^yJiQ9?ZZPQF)CK*m#=Nea)bAoilJ6s`_V4EuwVFK2!&*G5RVRlb zTy|Oo5TVMG_F@x>&&+ti%2WyX{Bsp>51^*0|sy+y=wE3MkgQsLv>N08y^ zU3cc2TaPbNn@O^CUtU_=PAnnY-rp~*PYLW*A1+3A)wAZv|4Q_(LRW+4bkVaqTUqM6HMy{4G{0mripM$TOt+49GR zk7B;fPfTdyI)`cgZjnCrD5_pLgC7u#(^nu|jdfBn(p~Y^Bp}O$t=w-brM&Ok0H7&8 z^ko@PAb2_rL;P#Q)D0bgrSS9|uwO(2mY0s@eE0{lZ2-xy?$_EGQ|tBG=9z=7cx=3I z?I|OR7R&`lnd2T<+m1a10tE`da`8nsi}I(;)$i7jP8Hb%JF?TJo<)e~zJ$axtwu^H zLb%(Zd)8{XkuSewVppzhJOUWtuD0PXui%k>!#RW$pMG*I0Dd+cMMF0jU@WL8JzKfg z(#8Nn612`pxSTJ0`rNmrLVe%{ba^CQF4_tUhkzNjQ^g@DHEu9RiLMNYuuvkPSA_Pb z!E3@hqfj#eSv|c!d|Az{<#Go@Lx*ZO4?c40i1)?1SL48X@`<#;V@zA8CZ;7W{3jKZ znxS6*b`+l>%C(RF)6zrhNYqFaCf2CNwNv>N5i(kocGaFio_Ig?rR`{X*ef0K(uwkl|3b2yaBSC+pl~gy5({Fv&=g^ZD6_l+fG` zu;?YcQwASIPQ~t@VM>rab(f!BbqMZ3b& zW87`_PFBpocv*KPJ0{4R$HxhAKiZ(TGjQV3*~Wv&b~yRdbMK+T&PVM~0oLGZv?H#G z*l6=-`Fr-AqM0`RaSb8;YEwt61>g?ySRbKws5mT+|3^}CG2J1wc*wi$O@&nlj%2Mp z#ELyDTvKpR&L|4bCEWJo$-8R%OJ1*CZPiu>meM$P>81b&A79!dNRtF4Q;6U7S_lNK z*-c`L#E~UhPKix8sWo_u4Xd-)+t3yxuUjnr5m(5|r)cF2QPf|)X66}i-^ z2YATO@Z2v}(LH4KE2{N;Cd~p_5^pKhIWxgUJo2+~!BEhwcFkAoH;G;0rp1zLkM+&w zt9LZ)LOA_~f3zVXH!#sm_5%vH+cVW+iaxyVCRJR^1D<44m7Dnar+pS7sG~b@Sa}Kv zu3IY-YTHu_7MCh9kI2ic2?|ly$RlWreW-fhTHNf{~+B&3+j&| zSGMXM#AOrDjoB>J^h8xGYraMAcM{OeoY$PXlIzTTQXc@E4K5bP3#T+7Rs_=`z;4Nf za=sl5`q1rMy~WRnv4)G)K!-;2#rT(_!kS{{&@PRmzOdU&KY3unT%1Amh%^;nmoJ1x zM*y{!ff~f8HuLYht7P5*fX&l8&j}Lr31Id2V;x_6`$|Yx!^Hl@IrAF159HeU@0Zfd zm!dBEh|WR7R*aT8jcYQQudmMCBlBvEhg2KJN%V@XxajTlIPd($^@r*Z#hd!K8Or)c zf+;B7k<}#7P1vh#2XNdPvKJu3&R{kk)&?P-vrIV@hC~!?II17u3xHmcDU-18USDjy z&P#(J{d`c{6z+$1oA0u&F8`MIh1>>PpXfttj4Muj&+fExuTLCruUl}-NbJ+}TTA=m zeU}cT%t}vM9CrULHlgbxx0r43L}XxN1VnIhZ@6>Sbtn01Z&}@hER`{Mw1;>uGsrxL zC)tRSUo!$8_@unZZKa#;slK`Nb^iM9JvZu$3P@4(3^$vcFvAR+WBXoDMglnFX*tt& zz&z&Jfh@H;JzY53{xj5L{oSqs5DE?udf13Nn{>_|PrJd-+k-Quv zvnXQ-$x7|E<|VwXP`(!Fq<%QWsEdz_FM2od>nmpIqOW3Gd{+#|%`Ft{hF#COaY@os zD2yqMj7fyEAzW|@0T7sHjGrnL9L%W>>z<5Xb?FhNQwJ#3BT3){`Fo^sR8c7LZ=$z3 z>xqdP%W(xE=N@8i`H*< zcr8hjgHipU{;~V%dR|?a>{u16pf>>L40!Ye>=onJWB z^d7!laVSeM1v7Wf&tH6_M@qjG>!`19dBFlxSAVTSzM##4(1K;}qsx>#zI_N6a|^pN z4gGFJ34hI)xT}NGN}-w4CT7;dBtxWH7W04mQDtChP0j+8t#p`UVkwY^EbD15_04!A z4nFApRg6*I4B?bF!ln)J26IV0!cz?}pJm;luCM;QvSbd`wFZqE`QpMg zk!S090-|QF2$~LYMUX17|7kQQhDmxQty!s&NF5*5ykjC?LN1G}@))e0NJ^!2{HOJ| zr4(S#f+YcV-1>f*@LQLcFUf3&j|hIbOgHUKx{8s7U23e6wn4*x^(d7)_r#B9S>5C} z{^0uYgG!E|e|@#-lr&LdLwJ2J4{o&^P&eD!ec?8#v@q$g43)AHxubKb@LaogEZ$p_ z>FoIHR9-ssC}wv_x2;egD3>|X{ce5A%4Tf4P!6uM2-f#WR}n*-k6CuMgNS<*c$8%X z4ct}XnmWzj%R{%d|YFcu*XdaQ0jhkR<1Rz^X=eR6XHE-i&i)KrCwl&;<6Ea&B%l(q1U0lH+1Li)NsU^)fa+B;^-E#;o8@bs_6iyTkVUI6n$mU8Fx4Sp~VF(=8_vyo=6Z{)_;@XfWZ!FZ!Mj@= zt^AqY=Siy;rS{<`>-B_Re^S)%H1ni!jV?#&R8kxW^|A2bCgYbZ5eqi$73MV^Z~1L} z^3-Dme-UZR(p{;C;9t_(YP4wZ^9V6wbJ-JDF07lw>V0TBK{m3Szc~@mwro_SVAv^B zFT%-og%fP@x4o^)r}ao(bNC%9)+R)cLzZ#CoTbi(rlhh^W+0+M^}`I|yR4dRy-N98 zhtkjrJ-#r;==WuWJSYqvJi`fnZO{f(2~geYXb8x6>TCU>pvL74HG~Iwchf+QNH~VZ>3QM;?G!5z{+0e%SdzPu5mK_0CaAYD52ntdK-EcMJ?1m?oG11vUr1)L-ccIuf+V(n> z7Nv6wv@$|%Xi2QugkB1~N=eFGLVK=yFEq*sM%z|y98Eht+p6fLow?T1HD78h4z$ct zu9r%NSV7Vr`IiGvGm+cv+54RYvTQ-Qw zAQhs(;usB4=7t8U(~owvTlA2a3#wC`Z@@3MG^%+2_cZfL30eh=y&}?TU0oi`3HE!M ziEFm8gsaD)e4?B|NRNQdv(i`r*93?=`ODI7rj~?VZk8+gP|R{YSfHW;{yaN<9&!)y zE6WFo!6cSP%7S8{j`bPbY+_B$a4DYIt$4xIwv0`nbK^RD5xLxrOQy0~~!df8=t`(;jK@)NaWH6JWJwnh{WCvJhKlFoifd`q zgrm>#<(}JIjcPayR)fXZvD_;qH`q})9#CaPf)b&dBvz$oz8ZA}Iz{wV=kVe?|I%Xy z4%(;Kf0Z$!Ds$|&5_d`*fs1H8407HGX!Grzh<=}+@p*5*{MIGQW57aSxWR1djbs=a zS71H_+4cSg241ezSd;Yx$p5_^9`18ZuTG?gvL+ej%@prc9~Z7y+}v+I^_WAgIB?&t7&dp&y4n(D`|Oh zdh6dx)ITaGpL`_3HVh!31pEKd!u#KrD7*ilL-Xsot#dS7zoM5B;*>j>{#JCuU$0K$ z>R6+2RVZ%aq@+Yd0+-cQElw@2vba9#xt`bwB7%xfzI0|Jz{>)I2K$(smpF)M$_=29nUN%i={w_r=lVazE$oUF(VJV!h@*Vt;Ovi-Tx(mv)(|rOerb3e(5>@y{nwc z7>lyn%;m}I>*2xP_JJI)5hPF?K?Rc<iWQ(kG>;P4YDZSV+`ZJUS`m|@nl8Z+YFE}*c zqvx02Ajq)Q)6<$6%Sd_g#_#3f^JvM3Z(z`8P`f&~DEYMha<)i}D#V;F>8*9#3L?ZR zVHwGtU`rwfACe_ks8%J8wmpDMN3Uo7B2p*rN_mH-F|b5pFbIBS2@ExbEQeCNmzefY zR?m=qasnOyim?k+Aj#V;BOyL!2mXoCSI4p~W|}JKcLz2zewX+ew0%1(chiIwGtV-O zM4G*mxB}wqv9*RO%)Z?-&f43((S;(OJbW&}sC!9;$q6llrFX!+-Q4MiA7!$2_t%C4 z5+$;Qx}>AItVjRWk=qlQQ6Kp82j6W^uxVrnth?$^#K0>&Q^N5ktv<~3e$TDZYicdO z9fosBP45RUY7}42HSY!5_YW@K5A3s4_&uqwgO$#9LVrKRYCzUo9e>(@c|AV1xIc?G^%b^hENXF_L_j~r5kBnZ~=Dv>S9!h0hH z<;lwIo~|I;(x>M_Dl8s?N+Bp-3>s(kz)SB!jxMe(<6PPe9uzPvV`(1>W&NNU)!ar_ zOyX=ppa9#akj;Ztm|VG~*2W$5`08joz#59H`PaQqV;^YD$IqzfYZ_VubHJa>qE&JVvTF^M0`zoRq4H&@a5wOAEPq0jO%3zf?f~sSm=@-e zGBcHLJ>rgYA@S#Ow#ZivXGN5LH2#R`*HCN2$3}f=S4I~4XcEVrt zWYI;KL#a%Rb*%QdJRx*}TK=ks5_QbXDhf54nbM(!KX7n9a2Qo~1RP~6LBWa+cHJY5 zb4lh*;-#0whHH3m>H{DMuyO1UqC6Bu*~j`v?v(Ccb>FhNItJ4r5=oG{Auw+krZ~Vm z_D3alj2ftpPOK6Qe6&-K7o$rh$zi5o5^Y!DS0i|Ka%B^3xM$@exDufV{7JVx9D`YF zMm^Ztn;WmxhLV{fAY1%{I)jhNDYoQy+4qmY4dByp=gyp&E>cz0T4hb%Qw1%_M6 zhc8F=l)}(lY*9q#%j9*!%7S*N1rJnIymDK45L3lu^s*u}K}$mRn;LoO(1<>}6TPA% zh&NYu+{{d+Sm7VpFuA`M*n}90XAtDU-s`Yr=t4Q90bIYL>|pdbl?ulJvpoyvz`SXe zq*6tzgEm;~CSaZ5H0P^u%;hN*C0N{?Cv%5ySwpD3;$iI}aYlTXbm$5BKJs;YiTh2$hli!kR0^uxiWiBXX*4wb*8t?w&tiualko>epH zViK+QLxmz;H4FLNNK^PL>}P>3pA{L=ezk#!c{z!dDd4cLn3Ie_HX`@5`z{%J|aNk%siG@2Z!a+qBbotDpW*T{d#SPa1~a zcyF@R(dplx*CtJ7vgr8Pk13!B1p>?7ao4^fvY zW=K6MiZ$kxr`g=B&bA<66z?8!iUA|SS8uIH~KUWf)< zAz`mOQLkUj42A1$MR@;Us7JLq<$$D;sf^*eC(Td|M)Y66(en@rCazz z5Y;rI?JOEtnH-#~Uz!cl^e9LISsAF8na&;*f7Wo&KCFR`nJmB-3NR6%f_=D6{<$`b z>!DA;o=YvPbKHSU^-}MctHwgq@L zCi>>8A$1)Uj8+!D`z+q@)+@8X2JBAsjOqs!Ku?-*VkF1+^6-efPz2N7Q*PzL!0UFy zw?y5kNuz#j2!hO63C6GD0hftK@wuWZa$34|##|DYR30a(YUsA+--*>cTvuFlr-)Sn`0 zpdWFI{;VWKeCXpAqvNLnu(GkAFZFiT%XAQPa`MJ41pYZ4RDs;(pB=YT_?sihVEfw$H%YTr3h`xh!I*cJ1ditH z;6S<#m!rcm2%U$`Ob}l~=h`68mbjj7xdzNB2&tOLr9Y&|WC>3e5A8^OlV)uGf?uje zr(lvlJH@PW$5}3N(lD0k{LIV)@tL36Y?fa^?p=i+E59$RdI_-g>(czwP|d=J7u0Q! z1I|BPpPYHkqnj7!Lnsz<3a*~F4cGfKYb7jNfW;x}3Wpz)mg3Yl@4%tptEdkAmGs>JW(Y2(*i@K5eZF}DthtOF zEC#Ma7sj5*DY&_SQrOLY^P`ohYN1CHT1}ahDG*F9bPqiE>H!RN{~$wA^qsmj1`9`E z7S0t9hz}hH4Vw@ZEguBP9Gy^REv--Hs*|_V7bi|k!NR$LrR3gjK=me_i!k=V=%HF# z%;Jd{_OZV9egT;WTSjCxacBF9GVVhGlI`LW5V*Wwrt59y9LwjaCoTGES2=CL#?PPM zz1h5_b*a(B-rwkIU$mG4BQ!S}=N(Vy&_sIugqFpoih6!;Gzc3!i3AuM2eO+m zXoK{8rarhN5#!h(NmB|NIqasC9xuMmUZW2*mVLuYhzyvb`Km3Yz(^w(u$nq>QdmZh zNNTe-DAq4*2(%eszaa!Fvm#I>bI5A%JGn3t!wsiMnGySXDwytkm@ERVqBR2=d#b** zL(ymW>KujXRWvwIuk~Uro|N5NiTb1g08mh9^xN`YDi7I}=6l$M`-%9EeM1pASKR`7 zYris3QcVXIYb50q`?p$rZdnl3KqxRcG;WQs-|?mTp}WaY%5eGCW5na^MGl5p<;n-U zshYTA-V~Wp2&iRgqc`H3)JR(bLczLq zXgBDbEa=pV+OU=WyB0#XYMPhL%JCplmz^HDo&~vudDg_#-!BhB>E7yQBgtFlCZH6F z*{I3sz*9@%7bz2Twl5sQH|UM$!AnV3<(`ZEl#qO$HDA3fb@@jN?=|V~Ek>YGmDaL9 zgq1*fVib%3wf*#Jpo|$i8=c=TPcz$X*_yaM2#{d%99+C|!%%g`z1TR+TyFiFj7lvpv* zGF*O9B{;mBowaqZHmkWLbJ;2D7%4PKICb)IO zZtS6VV(K#6h!6N!392iu5KL5zH|piB=5aBDX3H0HSG>=4!=G4vL(kA8O#1l?AVf$yh>y6?4-*@R#T3_SAmT5 z=v-zMA6w8`cs%gK+J>1k!UFq^!hpHNRn~W0F}Qk;#`TPEwu{Pa3A~BwV*svQ$m>GM zfo&Ys`C=_gfKFxDQ?51x<-{%VN9mJRhR$aW1-fw|-cxG8gJTuXG_`zD37Z_xe>QIE zitn_aV_Je#{~V5gM8dpw{2%~BaupiN5B3_mFku(ySp%2|s?}gT!O_Jtnn*MSMi6ql zBMqd<#^RRTR>+%mcQU~Y#}s^-wre%-`R7o%z8QJD-2N>>$Xkj{UzULQjF{j*ZZqQB zQFAE|nB2!}kudOR%2cLMcgYJ3CZv5?`ZU5=i<2QUlSJJ$!d#& zelxRACb{#uRfwT7z530RMw5W;XL(;xWh&@}f$)bt4RdoFQ@v35x~ZqGjM=#32mXUc z4NItc)Xvk%tEH}FGoOTu2MJA6FwKd+aT3N_=4d;DYGt5v_|E!?;&5J<`RpHFO%L`i zUzeE2h(VThGN*QttND=}LI zfhsD65%`qn4!)2im=!tm6FNFB|Lnq+EtrbHKfJIjp#0ob#bSBHU`X5SRgAi2k^Lli zjW%l>H)`547RxB?T8(W3ZTpETQLGtW#YW05*b5s32!&<_NK^h27G4Y-`L~NP4pAOK zZv*3wck`gYe|s@%hW{+_JriWCsq2K=?*TW21Z@79AQC6k2B>k5$mNT|;?G*8g*(Dk z)akHW+U0n^<3x9qmxIq|iH!?FRakZPU|6}nOD$tyH{v?qy-%X{s{{T8f$U;77X5k; z!(4vNw&iasbJ35QK_EDs;{AtM%KWX-T@EpU1Yvhg!O)#NR&GfhFalfzrjrXlM8#U{ z!x{-N%y8!(D#&5}1_E?Lp3hTPjLiO;#;xy{VVDIHlL^^}@kwAeB)D!qEf#U>8U6IvOlbOleOx?I{7%Y+_ZvP$e;=@FE+3O<0rZh4i!9ct>ptW?B3U zt*bA~n>?Vg!7-qU)1#v=hkO zGoaJg$%olSl>pxUk{ZOC-$H%1Sf|^_7rg@!B2hr}v1apfE6hFSwL&;(sESk?*~QM1 z(8FA5Eroz1gRXHyv|L|Vc=J`>Y**K z0)75nQT5nmPMSi4tddg6FH^hsWonT=Y4sS&@)sNw{GsbU)w@&Z;2O=-3OQ)oIeiy4 ziq+#Oubz#@59zNL)YJ+wvD7JovB(k8*NCUZ60*QEzHh$e!c2B{iRXs4Zs9eIvk&m4 z@?BE*gGn4N+$0)#E(e+FbgfRliDS*k?=2Ir1-pKt+@L?)(?ukIfA~8*znewB^;#jz zAU^OyVCAJEJEpDT=#|%XEGlzzQbSPw2pgX8PU;4g;QBe2m+XV$uyjqy5XUwbX+zx( zq+JGgiD(gg?_NDvOb{6R;w?}oBuFG8rykZ#y%#8uiK6eJqrlMMmZ#FAmwctIl+knNV7ImLhcdkd02;9UfR0<#|6#N#f9$(;tp~-5S@@`}dtlH?p z7QB+sj~H98%cPHYMxUB1Usu<-N}nru7HhA}V|>EAB&t12zf{Nlc%@UFbd0Gvt`Y_x z=hFL*x&V@e3JA4Lc3wNQ5ig(onaL)pnB+WA$;~1`=qoh{jTPmyF@ht;QkJP02xamP z1yka^j_0)6;V&KXoTvM&L%rxkMukl37mZ~P6}|pOaijuGYcrNZ-WwxLNGn&OGj)UQ z)=7V6@aauFQfiQ|oPK=+PTu%AjMd4vrDVIMHyGEY(lskG9+MbYj5VF6vK@Z%lDyp< zp`F5@(FEHiil?t@Ml_JeFQERy!DUK=ybs9#v#9+?!F+rl6H@xULUQ?DAqoAzS4g5R zPR@2VqSl5^PX9Y8@jrJ-|1lO%`DVBOr<-2 zM*fK(p=o7C5D0ilkYMpYgltPkM;3q7^ixYalWuaTIpplT93QU3VafE0i5&tIy)*oM zya$bvGCDJ*N;vo#3`6aRWP`sZa6SK+>9fPkk+^zi2*0ryu|~C4RD%S`6lL@*VNG=j z;2;9OG?Asi%NZ&~U{Xlf5?gEtrcroeA*TzL$*>r>^7ne=384ZzEq=F%EtZZJ|IAcs z3KX@ulb#j6JmAQj+&A$ZEA*Mkp_%!HVw9$a!s=fOtI^S*Tcv7;LwrLqa1U7L6r7E} z&A)MdXYlNQ@{2~l&A-d+_#RI*3AERsE8j>iE_ouE&>NdfvC)-;4av&us1EcE#kBFC zosmb-bg4`_1Ix2C+5;bMpz|-lzGAnJSl!-5k@O$2M!u{(2&pa5-oo&6>d@9HOuTco%TJkcI9 zvAD>r>B2!IEi*r4S(lJ6d#}6Xt?TH_kx#(%fp1#&y?V!bPSOW7@uxSJVJsM$ea+S3 z2h%jpYZj;jS%O79 z+R2rVvkT-iXwQuMmAri%?ubfy9j>)3WDZFl-XH0(w;XXp@omA{%i6AaU5PS&7bfvp z#0webcHj{B0q-|eS#gV_%aCJim{5WC0~A2a25MIlcHBi>+R5Qa`pry{=pCtQH-o!3 ze9{Yr)t={Tq6cZsPO}~7+Cbp|(XQ7>f6a^6!f7);*OGIQ#|WaMS2K3;S{VYSEniX* z7AWuZ^j@n0V^Gh3y#y7YbiR~`KtNbZ|G#AXs)lC&w~Qap%4wbB&ddDUVlp`y++ez< z;MKNNtLAVZ@k8fM%h9ps+=&<^FZJ-&1M`C1UW(sGhO2 zsjL3>)!ZZq?|t55CweFwOCyJGbM@5|_OG{SW-fjm_vtI@7XmQ&O}Q~MqX|cb2L~(^ zA?sYW3ao&dd{C#E@W~5-hvgL#D6nNkg~m1}*U{f-xsz;n0m{aBU=&I+(P7hIpSqP~ z)M3(8!Hjl>fsd;yGLG#pi;HWjH4;8GyPfPTU!fB!fYPwf z!H0l->wXLl*+bYzE`;1peNH^&6ktfTFzO}vFD0;$`vPgRSzMDB`(5Fx)iVyrc$Vvz z`g4Qg78@f&W{z*iBIzcqR-z_Eo1f5=0=V$VER}uK-XH0!+%P zN+AD`iX5EJs2Z1PDyuWyNe*Y`EHWwSS1dSzbEKRWlYs4sTs@jBhW zl$>m{-W*kO~kz5}aIQrsP zyW#F1rvlL2;^NJtf`(8LhjU1}iT=ob-SB3#fuXnEQRp5lr(VWagkZ#))XBv&6jmSR`EV2%)GbCVa1dF%JlQ$}RB= z4|Sq^bpt`McyjL?_jDRwiEmfBAlAyR&5_KpSk9y}%L0)_N4SBJGv~26?YSnV4o`oQ z%Admc^Rh0ppOHYR_um(;FYbEmzE|%4bbG^L+nLw+TMIZr*U4zguoX05fGpC}(C}w|cDafw`sC2L($+2ICWA;dp&7>@bz{Rq221C>eAiYvg z1Gh{+TUH@0>KYBA(aO6^uHcYyGsfD>*d?Q3!_oe}Gn*ZUk~!1~8HY|y17(~+Va$q$ z`|aH`0VA%Er}FrI>WdUh4NhLy^`U^(lBVOqm`lM80)Yh1;kux*1+^D6_%98*Rr4Kd zVl03_I1!dWT^$pPre|py8x3@l(3f-KlmqICa#4P%c)eQENwa6TbE!mdK8EPBeE>$O zHV5XJW5q4frENN(4ITE#CA!N7i2x5dB?`_o4IP2{&^F^`8_3Y+pk0OL#7>}iDJJa_ zBE&TZm4|nc)cC{}P*5U@cHq?1qAbQznT^0SaI2`3%|Qg{YGaJiWdZw!Q|8R+jTDfr zXrLGZ2p{iF_fJ7w4xE;An)_Pl3hNfdj?JMnSB@ac!qg93@Gsc#TiA~f<$Y=>W^(Zx zgHWb{(4thjWO4?(;9TD$4+$ey6oWZ?oY6FBh{z$3@!`FSCVuI#L=I~48Ze)!vN}Cz z@8V?(eU$b<5A+9OW<~HYFxK06Y~;Txj}xALhS2$?4*M+fvL2ALj^Zs*+2CNxJWd_~ zS(+V=<|*xX+NHNd+L&ZfR`O@+FQXdy@I)~W=j5sJ*r9R3ZLJMWbO_zHx%wCbePw!7 z>*CT?v900pSy}Je1Z-unJD711wSTN+T%7+bnT#E0u4Jk#bNQQVvCp|OR$+Be!n2wy z6FQCNO#k8te_&V=33(_tf9A?My6c2!&kwrVp>MK5di6j%Rj8oE=1h8I*|i=6X$-F- zLofLDMQywGIe{sd5QysqNXt8Ls?cgcDoYCa=FLQa2}^^`qMI%{R+Dg+$Niwq1TD+Q zFrcd>dr^UfO}rG3gZB}LHSH@-FTffyB9^9Bl;{B&G7pfncT}s8Xy9bZ$hU0@45R^Z z65bWE4+v1c4&MP8sIHsI9gANmG)7=kP(y1MQ$&#(=q%wdH>9`y`NY`!_gS`=Ul7=5 zj_)!hD5!UZ<|J$JpaM>#}{lk#|8l_~yFoQ*pUc37K+jKQtpX2G`#3jPM7t}3guHvb4; zpOetv500t24E)@d)<{Dq5`hJB{(x`wOLd$E3xk;xnN|Y|NDJIsYOQ3v6PfT@bQ0JD zmM|+T#0hPnC!$ibo0Fv!jY9loGCsT}=C?@{m&;3|mntRhh}CW(gtPH}Jl}kMMaCM{ z;BSCyzm_=%w*OJlC1)~`JTX5KueS(9kB$lYAjdCZdV9-+pQOUf5DPgphC(g!q>-IJ z7gVL3ZhS=H+=Oh7MJ?;lBK<>{)@5td0|Q~qJ%n5xi)fVtMHX5tndlSaJbdeK0J%2w zX~H3#AZTNZ(ucj1so@5rRt(`Jc{JmZy8pZvvFgh-?~^`xk+Ups!BP&4vqKf>7%VDZ zP^*W{(Att$@%js;qv>lM5BbjVMI(ES5WBNkKhYFUO=U1EiV2LY&c8;7zZCB3JkM<{v??mn64p(?bx%ryRz%%voz#-_(v5Asl5FoH4^%DnmsWGnRN;RUMc; zioya7pU^urQuSa|fK{Yxx?oG-r<2^)hptC0xmEk+iJGgaXJ$q~qT^#VulYrv=TzI- zviP0pL@ZCUDh}QAzxZt)bN1*b5x!2E@vC>GxyuL&RTBEK9cIONesUWLgW=vBh8e`1 zS5Ica<*3~s-ynW{&_6-^YG~#0YEz(=Vo0s9^-edjc@?S(7t57GiekaOoS@Q4hNgU4 zG+p1r7jrR-lM|D1O^>hPPEa2_U&cZJiwA*+b6Tw`6C;dDRL)OHA#3YT=4`CQA;4hK9#_%>e(4F;!$e&qf>1fA0EL%jTR? zCC*fNO0=B{qRF9&TmyNjSrU_OX(8sP$G6O+Gx{_tga^V4{KWR}1C>ua zP*6sDXXOLVH4AfL8C_f6vASCeEX<_O#0jE{TGJRM32X2>JgAjewafuVekxV%y$}pl zI=sYRuqQr&?(AXV3ZdAsa1eNPbCuc-m9?cnt3*CEaMNwlYO<0>;dT02XZcjaDP7-% zkr4?Uut1aup$|W=)#0>icj_4?VE`{#8fm)tU*SSaRp_(L+T=(jGpxj@w{0df!{aBU4`Dt-mmu9s4vn5omA|a8Hjia@J|}}O)U5j$n#A#xAedXSe-KOd^6f!# z^uv1gUd}lIenH}59uK1)x2mOP2;?-QB$&b$of=o2B^$kwRhs=R=MBX%i^H`<1o+G; zFFYd7&KX83?!!mu8j*F-W_P|HIF;gT-%%gS4Gsvf+Me0-OMrfAUlW>;i3ptyg{}47 z){V-LDlc5iGx3UrhNVy-5~fb*1>>>v(KM=h;u8-gb|xwXmcD3!U>v>#6djZ#dE??Q z()~v;&C$J!uTpuO4rVWRNtBYbQ*__ zkfUVPK@Wo%HW0ua&?6S?jrYnJ6*X8;n=PUCmypoqk^qfx&_Ak!r;IYen7AiiypxQu zQd};e;!jNx#mgfWX3tC?#l(I@yJD251(y+BcAek#zVQ2?`VTh$xki{fpcPpUQ%PO- znoe^hLn*w6WxXD%93Mu4?#s!NJ#)D&rCV9D0_7|Z5>M6+jpAW8XTO7z;aWI`xEe{m(ltSwen-Dq3Sz zo`<@kTUxEHcXmyEo>#+bUiJCBVOA}N%PtI$mVruN?T*l5Bx)>~RL8 z{?bsh6`ZqJQ|s}-sAjDOvL0I}h_J`7Sdl$R8iJ~p+3KbqDY7wLRp=OgrVn2HEkHzCB_VK*77R|&u>|sK!`}7z24lJ#Yd+Y32 zm3b$JU(di%?r%!aUtj;9NXk#KIY#sE;gi`nllp&vHyYd7*xNaoIx>p>$H#xQz{IF3 zf79l$yr*kl7D@wUL{3A8U@Wvw#hT_6is5cgcnnl4BXU*|h%;ZV-?KAez!0MrhH9pd zJMP(e_~3SsC@L(QM>sLwK7bTr<9hIBm$3&}=D)GBGLKSRkU7w?a{!3u38kP-q5<^% zhdWx(mzXsu@?A2%&~DI&50z`c1(mkC=&1I0%!SQAF&E8LNH-D2rovJ?6;&{OWRe!a zo6g7XsWT3I?g-5d-!T^)YHT_^5Fm~EO#wSgXCmvX_{Lgn}sAdhG zPD4-Lzl>;Rh_{p0YboK*$Zk#N&zz#U)7aK*HH@C!(CX_pZuAxd8e%=#zdPL8yz(S3 z8lpMk~}vKMJ-KyCt+yHT%A{g}w@l+7Rnkih*wmxpAcB+aOuF^Nl`WHp2e zs4PiPs&2jtIu2kMc1&JQoeu;`&HHEVxp;7WaueF{;~GZvT(-(jO4Vpd4rH`Z*iIHY zCpO%H2!zShPUlEJCSnjfwV%^7WoEXMx^`DlJGXeRS_B}c|2?MdpP6a7;bWBccOJ4i z!T(6B{=bgxzohnyTJqmRqJM%dt(`}dB1AkKWvnQxEooyYTC$l_OdNy1$1ZZnbWqw$ zY_HdP-tizkCe+frF$fsq3>bEVPxKfMH={|U?jYuTn@}y9GBloIFsOHtu>E8>T!D+Lj7D0)s4mvAV(m zNx*1&qx&W2hmWv)-mJ(5$cq|e`u?Nd7&&s}LhPGEts!{k4X}+1&Ga}@FzsZ)$D;{{ z96%HdnZWA;X+cF!ll{nF4wO!%gNVOv(?u0d$qF}Bsms#U;G!imI;a8uUxd9?Sf1^Y zts9)+?(QEB!QI{6-Q8V+2X}XOcXto&4#6$BLs(z_xmV9#eRcOffg_Hnr{1bjRpV|D zLpg~CO#|nBHb#>P3~;)2WcGG`@Pi~2Us)+UwP8lsJ;RxU{kknz zatxL?hBYmGrJPS9kS0?vNh+%=U#jnEg#U|@@F9KeR6O|ufc*lmrU7jn7;^p- zft6Oq)aEUC^PWf>gZW^}{ax~>pu`UW;*TI|vh ziCM-`+23-BQ7t2@1`G<`=)pl0IX%(RzKaAF>fm~6fz-JegV&y{1wijj@u1+E{*(zT z553;KrRX7SEl6#+7p?Hy(7Uu>dLg)$9+ek7>{)wA(-86xDM&wSMhhp+`)yo!!K#&e zn~a^WZ3wKywVL#FhkNA0dbk6`j3DXD>Yaq&&PdWvYQ7}GyPG7sA|u`vs7PT;ynJrOUuBo^}v3`rfA!C$G|5*crUuz0n=TJofuHeMC5jwzi@j2p3K zGgEy9qbOi}0iqS=3FAkYz!P)+vn34IAZPeKGtqF=s{odU#^wPg2934DaZaU7P0BUN zC3#|MxqL(|#zQmm$a~B+>o>`2v|*M>yTTS`JpFAPWksawv-Qi1@w@>4H=ESq;$*Xj zc#7n1rmI%TMIUt$O}ncrW}N z)%(OyU>R2~t@pn8ri+xvoc?OdvD!a<0SPx#?oI2Yw(G<`NwT8#6d%ad&+hKUlQ-W< zyiImel(y9soPb+gR|fFShU>Ym47p&j|0W1Z7Yi?AKDHyXondVZ|1Cvepc27FdAcQAl)Sd6xE!Ya%)Tl(o_bu@t4@#c#vDY#ZWvC9$ci;P=6SCd zG8;*Kk=tJ%yGaJW3I2dBj6v%bw2n}L32GdgqRv$WCum0?9OHRdYrIY!MU%;iqfnf) z_ZEd`8s0Bz_G8-!OzC#>7W++wy=2tI+r;X;{iutN$~ryqHg$~m&Sk!N*vARm8fAZ& zp;G>P8?h<_clagFwx77EfFTvbJf5*R$tatfJF>+UFfIhEy!LZ!pqXl#hwT;m@(5hA znj*%{0R8PS^7eRtATN8xd(pZ=MPB{Zl7Z(Ap?`+uF(BkFV2D$diQ> z6f+}txoX)osN=DirGt%?vZ;Tdip%xQKY3m+FpMtwS@5|Qb(kd8S3_42_r$(03454E zE!&+_o!7I~m3Y#5HiMF1<>0fgcG%med>Whj^S=J4q2}MaxX)v*5dd(YIf(M*3*~=5 z)Ku|wFcmR%Hg>Z7_dWHLx~}uO7@F@>)aM|+9w`k`%JuBXjf)z5>2XK1-ysbO3?%z%)Vm4KC6y$=_u z&kwkuv6Z%fa;*w-qZRd9~ zyl&{;;S67*f&@`fepQDE4H71G006a+@_3%~sz8(k0~)qj2tQsKsZdX9aF11F&YdG) zM~bHE4VbK+D*oIckAEcROI&Y@(GKQ>52+t;PlWH2l9V)sFc^`0g(n*V&1(9gkaL%4 zi}?u&9~$33lq-|(mCr<%=^zckc;!;)ICGWe#aH2lGR3 z=Z}wR$aT`)n261l+?!}HpphMHQH)r#kgz33T#xtcX=I3dK9|54z3XVal zMA8htl7LB?R!gk$72$mS{P1qgQ=wGPX0RjT#lWCfnruAF(3qW1b07xg0l46>*Es#}R~Qt;k3?mW)HIk=zrB+4LHoZc4-Btz(L2MAYM z#$~}Xykqhy_2)?R2Da)-+1nVV-gjb!vMvWimfoR@LHVw}{v*Sa)|)M2nx$O$N}u85 zfT$|x4V%5&N?k0_8sr;j1L_YsyA6i&)Wc2C%WRD%>Dvn|wo~RQmo?5CGESrX;e>!k zonIzu1P^$D+QC+0?huU)>@?)QjODr+Ya{f*3aITektASmD?`oT zy#qCC#{-A2q^oh779diDy@bdF!#Nlf$k1NbICnCXTQo1oyvT%1mCZ|-HP3;3fJwK= z2G<^f1kC#!X1jiUBSo8V-Z)jx%6>b&jAJ06OM?$~ogTGx-gdBq9(`NeGcc zC&78uxiN^ICA?UU@|CO6gzGN+fX=(FmDnEQ{*f#0K2C^AhB6yaB3Q>!U${lJ+2%FD zCH&_o=-a3xRk?Of1uv-ml|t5oFnn=LkM*si*Yw>}vS-HXhDO7gH~G5+vm*P=tV0P;Z{L!{ z{sok5XgBx#U*~bf#>y}@r#>2huUgg|AZk&{Ct)ySmZw_O{vRf}uCDbj$a^(v@&p=k zV?lG4WklNBNen%GZ-V_)S! zfQ|u3tQG}YDN2}@!%T}rC^&ao&zx$4j^4~Vk3|D5EL8F=ntg#3AMZK4iz?OJ!Md9B z@6CZ=<#%-_c@#QBsduK#{KRv5vM;m0_98~K~w=8*nKNh;ix;wHhY1Y-p9>DwLuT38%e!00(m+Ps|->i^E0EldNVr2-Lf)Gz{&ZDKEF zm~0hty$Aq_>pf6{)x`)k>D#$~Fku?QPP~}xwSTA}yRW)FVG76000HZEGf=*SWhHd( zyb5=`i1s+@ylp4j=ER*AL4e{CX%rSZ?b_^GD`DF&mi<&)nakso^+_F~@z@^XDDRIt zl+;s*Kb3ardy=^H>p)>7nNX3-7FlBX6rTX$OQHbm3m} z)A~5Ui8DK$?H6&;Q?tauPFlv1%2u0@pp4T4XoTkcXA<0k!08Z4%JHYC;nL|b7QWIN z*j`9faC)Up(zIz-^Xc3jlbUpp;N`Pqb3zz^?*PKr4I#Q~;`#`YnQ6%6QzuaTNH|Sd zHpytxp!zP9GIV`o@Cr}!QAsWK^WDeDCIqXszAmy+O1gyoH2Y&*vrf$sXmGz5FzdYkk> zx`AErOiCaWMukwBMm8!f@=kc!lx9*Rm1;Fv=YlK)PVh)p97`r>6J)avSCO2EshRWV zrzId003|9DKf4c1A3|x@pv8#v-OHc;^M_zvoN~?w8;ZRZa4RNzf7xjM=_B>$4f8B? zAB$m*C`0rK96?|v5E%iLM=a7I$~bA;XfgYZ^fs*5GC$WL3taI>k1fA2!55b2O0heL zJPk2emb+A{D5WL|#(rcn3ltfq0YWj4oB|?|LJPKBNI#B&6JiN_OKv=AT!*e>j)L<= z%3Z#4M#>!uW};hVIhLdimTW>7E}*xlZ7#}Y%&*MpqyUQS#y^r*KC|9ZKaP^OLzJ_~ zs7Cy01Jhidy~5u)iLDOwQI$&nfJGP`>rLDc$waQx?bG}2x8-udaa!JF^m3G(C@O14f` z?-}A7=JCM>;4G(Cy!P$5Rtp+6Cf@Bj%oC26$ON)amZzi&*PtqnF&s;B94|Idl95Cg z)*j;zycqi&Z;bFfDa+^$t%QLJs?aXhtW1z*+$+FW-Z(Ee~qVcnLxm>F+4U$ zOktY9usZ{F{HOV@A zuSAT6MELYa$`@bcv3My1m4bZq3Napn;`twp%`KRnndXRCf1s$IDJGAs<}O)=s5w}Z zMvZV-B7S5JK_*ANGY&B^LjPI*5)#l7P&$X7J}Z+JcD)9{fwv`APjmmYVBsR(rjE)N zo!V?QcB@udK!RGss*jw=Bo*X+p+l_x#kF#g z$-~`UX!y|7?%$cEL55FGV_En{oi!sqfnDJ06246g75CHnAnQu_5sx@WpXg-<_h-Kj zwlcS0kf>*CQWWNKYF|^k6AeF0xBc{j6qEP8G{sVSyCQy7es5DYgxNk;9SFgRgOzX5 z=`jEJ-S_@VnlXy(xHtcWa?IE^x_}X)fS;tC^--#6&Bs!>Jg*)E=cW=rHLOw2AC2ow zk%xGoyh*pyF`~cN7CTPLB}UlFYxN_b?S_(HPp5~>b(PZNnb|xUi(OHaCtI$6JMNj- zTH1@5<21*Wp^<))g9=4ES_w?4G2I2`XgX~oRcoU1kWO;x#JyY%grWA~V{Lf+yfkRL zcjsYR-pfbG^8%1AmU?x#wzMh!3qEr_V{K>_hsh@~allfT8erdHzwHGLWjotEqC58z1WH?&C=MJoTfJ9sey8&kkXlN6zvg+w+UrJ${siCp?I z*^fldA&cN7?fw0aO7*#v_4^mvMqkl|)_JW}u;si|X*cC&=TD_3?l?|`#Y`xT{ z$9RiBmI}CS_q#8N6K$s_4068{d^Hj-p`Msm@l;);z&1b^D{*UKG!>9e9$V!Dn)*X4 zS+KF>ahj#HC-29W(*D7E8&Wj7feNPPD-$JDI!vVUyWVG#`Z%ZwA&}@o0pXLd+7WnW zz8fSjsux@$KfxHKb%g)yk!BPN#ly#xW0MF+aEiBI^JE0@TZs1nAGZKUJj`*_)M#B^v7S?#-&V`y%`Cb1*>N8|EX*A!kM?Tq0nch-E5tY-}XX$dHr$g|{dc z@lC{ad-oZuU*K944qaw6Yq9qweyHB$PtUAEGSqmO{k}z49_n-2ZD|8&QE_ZI-mp8w z7#^zdec)Y?q#)mI4uuOPo3D(}sq`yUy}hhuo&l5C0rZQ8bh=Qi`WTe#5ulHuWZ}Jd zXl%YS%ifNJe26+|rE%H&&qcaQkW;NrKtsxR3`b{)2_XRSetDpHBEG`o+&CS<)?9L| z3+Jd22=x%5l1O5=Ey8NN_z71;$tvO;G_~@yr^*+3gChc=mcZ3($AO~9=d?c2erU>D z#wn&F)?#{a)C0i&O)!&gOw@!$;~GfSRrkf^m1f0tGNym%K8Gsh-z@{(dB?;c0X+2)14&&; z!)uTiys0K&)GZ%3t3k_pXq^nmU%!hwVn#iKl+x>(4^W1eEp{eE5u}?Y$w~$x2?MAI zVRnFyyWbDP6`7t*J9lTDGrGuZqYXaX8-5s$FAYTFW~HR(XAi^uAwa1wk~dvv!B=Oc zxk4W{K(WjzGu6M%I>T@(hcn&sp{{Hm&Z53D6F)N-XDUd@B(wAy)|{BCVlj#M0jo$e zW>z|g)adFuvIRcdie6_lK{!XogDqeCTmsn4V2dYxEuz|vBBBKc{pn_yJXg310^$ov zF>BqT@YsYIv(^Y|b|MoM4jXlc-Dj2os@9Ne^i@-{ggRl$6ut6S_k6qO{C7)5d(V>} zXBa|J!5kI5%V&CA9PLvzi%WPtW0;0Hg2t8B;kFY{xE_|8bqx_68pRM$zV9N?#H^Sy za3P8-<>te~iQR7v`D%yyO<=EzG?xkj8f#SrxiN8aJ@8A6iLPefuxzj!c#0>q>AR}@ zq={^q%nL-Dncwt6$xoNU5l$Fj1FBAqr-pL+_8 z1U7aGPRbwsO9X#9Ondd{$nr?h@grAWXsfA$8G@!z2=#F(ZDrLDs&) zgrUZt66X$Ut8w)%f{H{za8X-2K_-`?2D*=IawZeTEN|SqvP-`bnvXiakib;(ljC$tpwB+Xpuw^v3I73rX&S3~G<@JY9-MZM^8#CVgf4WeiW zYKxJQQUS#>RS)lpR}O-Ko!^>QC0E1AugK#i1i$#kvzkHIY;P)=e8v(;NqaW=)SRGs zg~vg!Xm`UBDh_oHap+N?iTdmOg)sSk|t#-E0q3Ex}_X7T34YE_FIA=5`nd?)&$;YIb3Wp*))FXDQR?S zvtagPC8aLNgQ{12aRo}i_~0w8NnEh!4EwLFWYVlkLFnU$cb4yoj#2dk!($|Q$CtjV z0>m*3tl1p&6I@y9$11!r&Z@?VJBCkC-Oux(B5kz@9vCO;vFu=?tu9CEG3;R!5tb-E z3VE{F!8(;qhBIk|!Q<9l_Zr@rC2+TzVj0N3Up_N@HoI=}(>8I|ij)@_x?{ z$3lv9N%>N_sR7l*QlsqeF}NVs<9`vkmSSb?a zZF|BtPickGYb*^&wF8PKb=`EiNjWhYR4Gr#2}O&mN@ZQ{#Oa`}G9(24^x% zj*m@IHOA--eb{7#lZHOJX%OyyPN-3JNx01t+nTn=<}FrjRZ3`)CTAR8i*6n;+pHz*f( zN`Pu3JtX8e*Mxj@yXiUv3)VLQ1?ZOE>wAwrlat_59sq{=>xJ97^7*`yv!mNRgJ1KQf^=;A-b+ z5f_S{$7e6twCW$__;+tzCawzSXGp~aDoye))r2~QDcq}4f_-#_lGI%_j2pRQUMF3s z%@!$eDu$M`EdW$P%J2+3r)bGP`(-r8FF4?Tku9B3v$v$wh`a{t(CM~4s((vH74d|L zNzX21;WqkKw1K&HR(F*MQ$5~ggEMg8f@(z2z^k}9USUwO zAn4K$ts(JKHqgL67Lnb$Ka_0%Qs7cq#(~t53UrW&DSuzx_m`t*r&0W&7^*s3Ip@SgYTrNQbJNCt4OJ|h^z{zvfo=m1B!x<2H^bslD6@rco=QH?y|+jyP2N6Y6qu`ooDmtr5MWQ8|g@p4xpYpKF$gWlFCQifZHwv{-;rHMH9NdnEa z8#&~5HMzd6di5(u@5EC*wU;r@bM4||O#3OQY^TBttLmeEdBQR`VF7F&zj9dWQ=r+G z4=|Jx0t>L*`pYq zx!%=bi9TgD5p56-Q9&!?CGH{fBS3xGH(>5}FwSoyM$S&a@Ly_XHR+CZ*kY8~G zDNe$+BKtl5B^p7r1{+$<3e!{4o^JYjK)+r10ls~D^~5h@!e{}2Ls!onoR44no$yvG*Fi1k#K6(o+!SP4B#Vh5w=K>#Qc{vokUhP`?vi$q0(k#1_`>ak6$6R_%zjQO^71#4<=LmJSOXZVHKR@s> z^%;iokZ^Zlu5E4R+7W}poRaVDP`Mm+_r^XcwQL`~COs)m!WRuYd_dmWUsB>)iJe%{ z8DBohiO=Z4KP9#d=RlV`Siy>30KJmqHNL~@^|Z9MvxfWY@QD)FCGPiMXVOCDS!peI zI?G$2%;eTwiAjB~34FgRn9mDB_LJ+352tb0+>z9?=; zW-cQHNTcy|%&r6sjN;#>s2m~$ZNZqOd+VOlTbHJ(wr9rjO~%IDT;EL85%%8^NmO+u z+!p@^mNfDmD$AFqV>4ypeCa{6Ha-zQi^6>S(Jbu7Xb32iG4n54E)$bReS2Ov4KPmse3iv;g=Rb01i1M7>KO-BOn%g8XfQaw< zGA*w3H+VX7RSEUOxxNq&kc~!Js*K00P6`>@R;fv0xSqcgo0_i_sE8DGNfirSJQ`T$ zm=3I&-R(uat$*QHP~laF;SLBLN7yhyoGc{b@Q>e-s8i;cRZ6bmoA;FZlKF$`!J% zfHlFUH~cEqtH3;4?c9ez-}1>kE@9can3PHmP2Gf({8J#78P4K;&OtVG0U?b<5nEc) zYFH}{N25I6N{wr-8VJ8~f)T@LF~z}`1;Vd*Y%d~z{*fR%o&ib1L1lzN{gHs|1CvOV z&Xo{m@nq`U8x-cy3BRw~BxTSTa!An=?lwnJ(mI1Q;{A*>%&sgT6*b*2UYre;J5uugTwy-l$7eJrTcj&Qq1Jo1hUakao*Us^U1>0aY0boy6jl9}uVhHL)` z!S+MKUg@#jf`V*uRUC!b3V{1DVnpuKN0Z6;h|mHYL)0#hfes@$5iKtNYL}n378UNp zzxjz32N|KeHbuHgeo24Z{{M}+DNUvlZ(zJ3!+iNd@!!Xsl%boUkg1{FzXpc?1l^Px z@UUDAsT=p&KkXT1O@WHEw8=7JHYSp>zIZ^z=(&v~Y;ov-zOgfj7VYCIThmX&&52eE zHIUEE%zV4+DP{5E;Nbd;-0mg|xAfuqAQ^Y)N@=OR-pvLB()s2u1SRFnXtK})=?Mky zpZQCiTG!UWZHn0b_wfK9(ep725B%%_5tNwM*}0ny1!HrqvL%G3n^~>{l7qZF7?7)P zHau~mKNn?1rOctVZ_j7T7*InvO+O#P%I(dGEY)W!koGNI=ZS z*B@4$C2UHvxJ?Qz=&MF@zZ#awP4tulm_1ASp52Wy~n*175(0Pqkto z24TvRO|Pv=l4KU7cBe3#S3Mry6+?WKFDJ_Cc{sR^SQw(#I0#nqCMU(THz&8GdeGD% z{4$cm!DC=B;`Lm@MoofB-eKu`MpwZVY+k7P0LhxaTJWF}zw`Axx*_Tl)t*ndSQlIx z(qOJt?OSG1HlmnOisD_8<}5Xu&Yl5wsa0^5l1!45VR5~gFy}F~b_=T&p`eZ`u}YzS zX4W@4Cx*~9rahGaF&KW&mpLBr^W0<}GUpA#pOTYPgzTmB4iD@@(-f2{JN6R@wH!4(?3}H zW#AZ~X>ae~?)J?o9=7RXq27BJDeiKur{u5gtEPL&+_#u%UyO-C9mPi;Hj@YloPe}k z$K0^h_y4+F)lxXu-US?*P69t0WdHs1wX-)db!LzQ{**NN=kb~<3I2ccQbI^R@=y|N z&gN4|M?4h^BCwPNB5^6LNfELH@^UJnxn6hCPQk!iqWI0VwKYwO$wQw|^5yj?H8%0s z$TO6c%B57AKR%)rR??&}Tc;~uvHB1jNDj-yYONZs#f7XyhI^gK2IL#$tU}H`LL$G` zafD%{?tqS#dAk?|&l7Z$Qp6)bt)ZM$lk5|tQ*WiK!Kflbp2p_Src)r0&6uO;di8ts zV~5Iz#Y#Yb()(zMUros-zzrfr5^*iJNUo!EYosANb!4^ciK3F zHq_80m-;&{T^C|W;W&CI@~NAbUJ`M5-Gwp)TrbXh(oAmqH~h|(`~6A!eF_yOvPnRV z)*%x^6B#0<)aKLot!-4fW25+mZv$=hcir}p`x@WIkxz$s2LHZz%8n=zTMX=6X1y|y z1us{kQ+;!K|l(PGJ?|1LA zP`L@8ai=~7H5(6s=*oy(I3^5tv9;4XGuC421+BybzEjVX)%%f;UJXih-34GK;8{_G z$JG!=Ffjx_V_y^{cWWdYX?l;dBFX5>i_M(uWah*YHDX}`Zo&xiGgSpMdx)6 zB)?KsM%KqDiB{KJhM9Du&E|z;d zM8))pnS&WDsup3k$<&dYh0uiNhSP{~bo&Ka@|uDsNDL)lwKa)$pWvjSlmtr&_vA8(tG#>Bubi=d=IZRsq+7Y$KvQ#fXw>pc zt39qH|9C|;oAiG6?s;bfCmSw9^kaaR7hoZ;3~x`R7v(9eTcRV^Hlk={E}UDeLE#S0 z<||NSK~Azjm~W^|fj)v$9t(0Acque@b}XFxZsKQvhr(q!lrK?*f_UOCb!;&j1cJOU z($DUQCV6NQPb(3<@B*7`5ydtxZb(37ySTmMo??~(4fI>m6YoE5xOus7q9Z&wQFi8k zYx_}h`D8*?;&&g!W)f6z*)PAqT+3#{ViH31q0|8)!R{!TC8-id-Fj~l0s4*pdh`?F zIVM?*AVj!ruVb8$KS4dyK8c%rs!E%kJZ7fB^WbZt%pO%lM1gJF;p`Pbsf+R$0_919 zZ4-T*9js?nB{RFiG~`64TXUU0g{S_HkoMQ2W* zTyts0O)N@#$ZF#Eb87`v#rqqN_U<`WswzXtIe9JWu%qQqkhn6(MU1!BOZ1e|(ln87 zvDhHgf+oq6HNe_1*CAj&Bi_S)^msTsDQU@eBWm>1eB2q#kDBJmM%6VmQ5e2*$(Jvg z4au{$Al1?&##d|8BWDmnw=?ps8?IxbEG}`Cv0r-OA(W_$;pZt_IHBwX-JUp{z4p5N zq_A4qzmbnW02c{I}!ve=76;zFOFG4`CkQ zdR7y#9t-~G#B_}eTufl&!#AW5V`;5PA3XN-CMZ{I>|sn9X8yYA@wSih8}J55tih>*ib zXhNjVDo_<+=?MNr1%F32stGVhieQoFm=p?5IM1G15V?_w8l?zbwGeX6#VV~SRqKmh~@Eu1{fg$^&?jQ+aPi?>%53?3p zMo|S$9AHa6pM??X0%$*4bAyutdgr6arv0FydnuWP%4kL_zvcU`VvNmk_XQ&WO#RLP zIZ8IpKJuY-DV5bHlb`2j>iMwjmrAHMJvx5+<;bOkoIfEix5##wld;AEN>R7Fu{s$r zKPcF(j7*AQkr1hd3QCHc;5>f{Q`2o=*hJQ$dnaM5f^$wJ(JZKnf|H_B>;YC}Hu87Q zn8V+Z$KOqU#m(vs&kreMdkcA|hC?rzu#gxDSv^lb{-j}%@8VV}#N}^5DMp*`G`|fu z@VneWjIhdJUX_e8ms>dPYVcS%OyGC?bbjFI?RBO<|16)Ow>>S7TMUh|Q`VN9>RMGO zB$mNCb(goLL{1S&XIV-Ujc}7mFtE6Gdp;i1piT9eVHs%FWcxUV=vw!oydYPAIHw*R zCk%L^1zfwg&6!&{)kO02LyAdh$V_#NuE39JE%Hx|CF2sKka@=gL>fjde0Wx|qT2FU z64y7QZ7&`hoJAI=`&3bx)yO3I zbK39aB2buV4J8|MA!b-6aJ_957<5YYY~;Z=u(oXtW=_ey2waidZ!@y0GcI)&7&FQ` ze`yN4+%fQap&J&q2BKDUS0m5(FSe?8xF~riwx;MwgV`-(Np9@q|A9|Oq*=dD5MM1d zYhWN*EvUKL^rH{&h|>(ZneT`&hCqPLchHBg4YDwk?_eORgM#R)g~jUVm-9k9!l0qq zNH0~$3~iG=_oCz1Ft|fV>2kR%lh2^`M&P8iWAFg4sJ76sig5B4=ylD?=0Lw{p&$Cl z6*gVDo5lt^=yAJQ-D8SD*}nR4+9h|lKnRnOb>^YJk$ zM_7B6*6>|+fTR;-YU5fdtd~!yKoN}g4^seQaO13aPTp*q5S@~Wfg7~oFp5|EPc*-R zc^#Oxdo#xR@AcBrGc~lLG}cs_utGRxtSC?`aW|kK`lwZK4j|#Qm4V;liC8u-I=Etn z8}V>QiCaUFv*rRK3T&Xk&9W>y-9}XOeR?f%VqF7y%bj{p^4I<0jw7k`bmA=BPc+fW zSgGDsokij8wSiZjQmZLN7*!zU-w$B`=0)gRR6r6`X`ZPWPRZ71e@+m-28c(BZ4f=kHP84scA@V_^n^oV8PuHfhu3D(@IvY^V+ z%GU|FW*zcZuMHvZ$ZK7#JSI&m4Gq^#2HqELLM3IjVx%Y($uF3o$q5tQMpp8Sy}hOS ztd_lIb;@a5QClLFErT1DY(6s|gqIW^UU)l?SJt^(ZgeS1@mql{rllR~8upfXeJw1Y zDOeIPVxd!}iUvl4$m3*$@DVD)nC}E@lPUl|1IRtO2~bNTP2$YeCWwS(`klKR6j`H; zZ|Rm;y2u+?7K2Ik+?dMgd|sp8M=%@3);Oa@#@I-0jgOellYi8=^^E1F(fftD5t zoEc%HcFeSuvx`zbGEQ+k5WFVOb%~$K9%TCW*>a&N-Nvpv;R}~BQTB!ZWl_|4NTu2q z7`({9qmuvOpyXfQ&%Yu2pD6Ll{Yt{W5v7Btyl!FHB# zsA!B016Trd?xO5UUhG|NeqWBaeRZQo2-`XIPI%pKl_8Y-_kF;`MOfRAqxXy82%GMmY!Qyxp_uMDS`h-;I6R!dTqAako<4y_CJzrh#K(v30Ud@`?JIR5IEL! zzO4nYEm?F&&*$G{ zju5X+**;l7V|MaBLa1wXde4jopLFtgts!BRh|XiM6dC)}OwdW-Ii=)$>%XL!;Hk zm)J)%v{ZHIDc?mg3X<{-oXMa*AIhwm>D+!gLD!3v1(}YE?(^P&8K<}8l(zyVNvTU4 zov>GIx9z2%bbCf|G@EcWs{9HjSyz2WGRKmY0HY;!Aux0^KhYpsxnilHVqJdC9>dLk z2W#A$M+aL|3fhw-iSG$IWn~MIs&6P>XcO(=stX>zEaFRX!_!S24v3rA0E9hMBt!+7 z?}^dXi@|$_7Vq9OZPIHp3*_z%?~&Vv@9x5>+b$udF%yyCOb}j>G2wLo3aIqJog-%T zs?xObfYk~QHkUBEx}nh%mYX207a$cYy&=zFadvIz;@E zoyZf!xyesiNUO$v6C(7I`J{8NFmv1%^~NV2{6kBfznLp`I>;@2Gz@2oRLaR~Z}A=jZvfcFczWBly<#vpH4MORKz=;QSYB>b1MI zuV*z{)2SI%9K1`?cy_2iuk7u^F4J6>aJ)B6A@JBg9AlS7cSAg0R0_D6;iZau8d>-Y zOqW8R@U?Q<0`yr7pU@XQ-E*z_JC#0bmM#7iUN3r|ef%df?%zH&bRYr`8>lXw02RIcK*UN~Is3tT} z)86j5izT6YTBGR_*d%d?H{86)idthXK3- z>?6cpMkorDQFjFZwI!`nH{To(a+9c05Mlc)B6mR$*6bEzaU?sUDeGgM4{@|+Til2o zhca2@Q2wMR($nhhVM7wAg zC3NRHuX8QKI727P?D)(Pd`lq5FgX@qf;FE=qX?-K%=22@whgoZpeg&)`&4O&6h`|Ju!i%k zJsCT>Bti@gI%K<0Q3ZL?g4(kEU&7La6xZHsW$xK8k-1;9bRnV|suoUp@mE-4 zphP7piRL~G7_mV0!#E1xB;hY5q;>2?sr%N}jSw2Mta~-HWpkJq)#DNyIpf|R{n;Ba zB&%;!3_o6Nq5V+BJ7^{hA5w)@q?{41lop&8?}DIem(TzHZSSQ#dJ{IRZu2WCv-pG7 zO0tKiiA;GS*@|8gyk^&UsRu+Zf}GrJQ54uw-x5j{V~qP?`j6)Yf94rTqps(#vfcvb zSQdS_g57z~8uD*1?o@V7^zkJLW|FDiKR+=~F+;oj6u;YA4EaFtClajQCTOQUdhWgA zs3@lxBh?1a(NWnD6Q5(!p&xc$_R=igTkZzE$&2miyu1z!8YG>(NNhNad7fOvS2Kj# z6nc8pN0QO}@L$F<<75Bk~pTp3*6-b@&f*f@nrzQN{VJ5VNHd@~g z7@rAH$G}p3V2q1~$2Cenj#O+M(ED&1kc%~L-!i8?GJixQb_aa%z^hq{u30I#ZXL8C z{0-$KPTCjAW6s_muz7c&|L>yU@3fasMMk0mSjLV3pV$9d6sQ=QJO4|xP1;e;6hIo? z$-ePl-2>D!Ir>;N^Fxx>M}5^GTIV6CBUax-xcWbgy<>RZ>$W}I*l4T`lg4P0hVRp^wa;FAowN4;{q$Vd^LhNn9COS$#|>|IcIR{HZzhZfoj^d) zoO9{zTAwl6+xv!#)1#EkIanX%$IlSBS5OcW;`9vKB!+r{pBU6p4U)0VE-Pmym4`%5 zL5B_6ZUoiYNJ`iuQq+oK$Hn8%|4FY=bm^*Lc4ZVAx(S2;P6E-w$I-WV~g+B6$3rQ%8^N#B&{ zai_H_4#Q*#GQ9ay3rA}N^tFdmQgChpQI4wj8Qf&l1D*(_QUp8^x-``sj!jed(X5q4 zv$$qfyp<((BP zF4qf@s>|7VSSwqwlqy#q;KCvK&z@w;+8~$*&WiM2-}ORQ=xdW@i=D!tbZ-O8k<|wo z0;M(!kd#moOQEYyb5s)s8)iqp^ujStx?W0HDoEcylx{RPqA+`Q7z*<&`EMB9ijSZ$ zOM`o`N@YmqPXU3g`8e@4UKx&`YRwZzY1}!k%IHR&LK9G6>jn|wN#uZ5srtj?` zpWbyp_H})3LjCzcrFRTh>GR@t++5VHDFIcBWlh2L|NNc5&@? zeIq(f&jxOODB5=?Q&tyi@*z4$@oBh)r_Ss&PjaSJrVtf2l}=SI+-mRVA4a^9?Tp*` zSAZmkk}jAzXERn$6!-d|XfiJf^T%o+zF1GEXy0TXDC0R8)3zZi*`_R>oqE(6GccjI zMyku4awb@4RNv&b`n-v}i-HT6_s00>8+0v2ygjmc>fAgjm{AbF6kZ$mF%spVB4C01ULcXz%j%2qBR9@q? zmF)XdbpDJ__(k(?#E7Y7!fQu*Qct7yH(ZzX3U<2XBVm-KE{B^=O;@JeNZLLPAL%XN zpnZ(Biq~*fjYxX05F3ie}~lRZ+X`6IyQfRB;qO zQ25KKzQv~jN31&$rykzR@m%hw)#H;`xEqUO+Fv`<5XMoZQX)KT&#BR!+&{S12H9@z zcEJGL23)R^F%>?BR%hh?IS#yQfj%9Tc&Binty^CSWci`bfnwQmQ00&cN>Iy973rYC z$s~tRtP_c<)!*yFGQn3p8@P;|y7k1@m;FE0t^?k`mBxZA!Rnb#7uuj>BU|5MIaMhV zhHA7P`x(c{8(#VBwb@azgdMVx^bNUeM3jH6Fj28+%4LBZ5^$TL_V^Wtnlssrg-igh zLJA{$ySwKyI4eQv8kx~3gqkL|6@@BTEglLB)zqGQ1N#!!Chx(Us{P z4L1tHk)ndAoJfnv_UD2{zC@c+oef1p!Z-@)=X4_B(n+X9lKylG?K z?Pdi@hwIM3DVr9>{eX_eAUq4DSffV|_JlI41hR)1 zMihm)^D&5do)VkP6BvDylVM0R&6dXcHHJH zQrVTMqXkEnmQ$<)$EDw}97XJj*br$x+uFBqgz7WnK6U~#B$N?3xq$}8q2(yk-uQ<^ zwPWVEg{L&eBH|3=HWKY#?`@4Y|uI5NSP#*cP>*d}CHK616RVMSJl zq$s9p^$r@n=(}-eMqV#h(_w7&UG`sxd?14k@jF(ekcJma#FRf-Cub{a5HTRYAy1kM zrB(}Ey*3s}MoHd{lZY1QmoV;2Ow$QMIa-pPKq?Mv+Hz5-&ufFImb-x1g9bxjB?=Q9 z90h5#Rgxg6Wq{Wq;3jZRhzf^1QVLdlz%jse5n`?0$$R7P)lHjIgNzpun1bWh!aIs9=)a54jM_)~88ykK3sl+IkHLD700(A5BHe@mRgFPL%)_Hv zZVDNgQXIWRN5}kRsE$>+$pqo+v$-SZBiqf?`fZ`))Ym;BG1$Iq=2{N4bYn&;rZZR> zgHQv1$H%x4h(~Pu4uw(NuwuHwHhRps!246kJuK4@RB>AQS+dHOBN#~M*;;( zeS0I-hX!VD|b~K)dGv@_gguy^~QVjM5CIdssn`jjYiR@{}zg!id#vxW(w2GHA zj^-)M?mU_>wdH7S#02k+-&V{}u&FJsla)ly#|Q7LhM{(*tR7+9TgM z;VE`4IKx{3dB?GSPaCv{tD0d_6OKPhSr(=W4Mq(^FP)UcIqsu(+LktsCCF#$h2KJ( z)%t=)DRCI;p#!E-J#o1GY;%Qqr*ZYf{1~Dfrj3;+@1E-@3A@#2$ytQMRT)RQPj=V} zA`BQ+L?bw&DiOd6UOlygPYFJwY_j+=3VNJAI~Me{GPFz&u8m=cljpEPYm}N?C3KVv zva)-h>Y@(FOMk;OJvTXkT_AV!ly3eU*V(2nyCvS(K{(b0ugs!|<{*reYF3%W!R4y$ zhi!_M(VJOiDP}FtVA*2H-f;Hg+CJVDud8_tj@!6n7n5jkr3*r*qP6k6OYwSnurt%N zW4E8Y7w`8C`^z;2e-^6;v9%@vb5?q?8{$?5q!wD6|Z()ju4Q~7X;Xie%INPlE z1fUIsg8T4+{=Zf4U;Jon^uKHPq_U+g7CXu-F8TY83y)GP68>b>?PAq z*jP5X=d;3LJ=6)D8ngmX24x{Tb<~ZL{p4T=cx~oMB6$OsBCh2iX_$+K>T%)I_y-GI zhf)F}; zh2dhcM8mEK6pF!-#-tVP)1B=Ck3&;OrIc{;rcKvNz7KHeI|jCPh!U5Qg~|nSv5aFU z43r%}xs5Q--*2dxvncOf`F`^f;4g~QMeKcVY>dum~)evKU4 zsoT$TVaW&<%H$i)%@&PK7vB5M=G7n40iBJgfeP}xU>m_IigCPYGx>gabl(R=i!baf z`e-V5A~mo13sQ%+yIu{9Fi(ZTw-}`->64!JReFITi5iJS2GwIT-ilp;KC$s|va@qq zH+ow(>NLw_lvK;4lfE`dGUYE8o9RMJm$8yW^%#t;Uq(kI5+Mh-8{C$?);2uUbkam; zd+v37$|`G-u+-eOz;?I5G^c0zu`q^IUfd5hmkE-jj72KQe~yF+{v_+4o38rvyqmTR zd@m@zj2q8WpO$(GfdO1IA(^M1;Eh$SxL7((*Fqp9+8JHe;za9Mg{ zDxN2kII?58qZyJ>e_DxphSdJdL+aSgn1Rz&VhMixh#`4$TdO;%y!4vIbW!7ss#3NN;5iJ--3JttU{NnI7oCG%y|OlG`s1@182^Ose$W#L#P3n}W&TH_vTzGfB*+JlCG&fIPXqPMo zoNBhTTo_unx$9 z9&2-rt%mlKec5IZDZxK*_{^ZO2}=;HAxlIeKwq>~*;*{U=&FOi@lXg2^{m9U_1}5uQ_k3NfgHLY{yS_Y-Np1`g%gSqkkxsi>Y$il<{w;YRqZmT zwlXkp$4fW@lSV5%-|DNM>~-&NMfOVV`C^u-+q!?IT}Zn-|7_d1II0v0c;iQ>=}L_c zQKc9JQS5*%3c-wt@LD`+nzH>U00yTPB^p)0`t;V6UrA{jeO6=2J$bgBs(M@i|L z+j6oJrd=X@wNZSegCyV!N+o=i{OjcTkFnkK-PS=5!mEPK6nk2VMv$Me!mC(!Awq!_ zunMgg`kV&~YvkIJ+t!ufGWQbX-(@FaOO8CN%TL4LDZL49Fu}E&kQR+gmzwWp3!W$z-vb zdQ`!y*K#g=Y$L($u#+C49rqou(bWMt4o(~4{i!M!c@(*=%d-QzPNzE(h56d~aQkTo zO^^IQmA+}r)_uN`$dK)>T zG>r7~00RrF2_Wh=Pjjj^MFS{@+6Om^Plx9p>UY}X_33Q|Y19m|A*0!SD<_JG50@my z7$b&V4$r)y5h4VyA72;uLnubU51Z7!WPgPCq0U3c*&BbDuDRv-JW~(%m7}f^ZdJRc z>LNnTkw$dZdxVfn!9D8RP_l-ALKX2my_e5*CTBi!igBCOfEY_>p&_~rD`oWVlS7xJ zQ!)|s?zHYi>!g*)kBt7ISy^42#hFalyIu^0V6}o6En>wft>T8(zV$v5F`q!SHFX*^ z$e5It2hi#(eqf=#EO^ZPBhLOg*AFp=Eq9zRh5lUw5|d^T%*#LX&U`6@kCq)?lTCpp z!E912B!{y7JP)>#vbr1htkw@e7Qjb^)mKoY;Fa$V`hz@uDZSq}sAKyOSQpXTf;iT4 zPpYt|?Cup&%gk0~xn+n|riQ%~(OFxvkG$w~eQeI{s}b9jy9=!=qY~p>OBdO!zmqh0 z-Jo8`XF8hS)@6qKy#K|2_~nj`8w3S31KcrVz&gV}B)0uW5&cht%v$l%_8T4It4`T_ zKlwATHI`XPrPE@eabnplnHZ>w@^|zEoL&n9)24Y8>E|t1fNSA|1|7Dw(f%-XG_|^V z0+q?Hn@l-aU*xAU7hGY}?8+a54Jll12Ubh9UjgUTEBJdMhZwMslMJapiuAOG&+{Vi z%M(U*$}C9|cW80~ZEXfD zdecA<=h#n{EiWRj8}TZpSNzdtc`o%vvP5y&xhM0Oul#F5b%3)e#t(=BH#`2mjE1xk zc}uy+_T6x+Y1JMHg=D~sYm@453P9s1NG65KI+TEw9M2gCeT(9m>t8E5M@j5v8!Dtf zw-9m<>49`G1W`P$xFZWzT&ZGa@4UG(`V8690t?KULiM@)-rStfr3-2HQA zkEb92o-lzYWzr8mfmUmZ;#2scP>9cH4%8AUsaIgKBUvL9$pl6>s7d&Wa`_Z3O~V!9 z2H)KrbnN;g*E~!olfGwW%eCuLp)zx+)qX~3(4#%xL}8G~pJn4F@KQ*72LqU9V6r>? zcH1U_<*ku*>GS%0@r({^_EORx*jdRG;2_MeLWpK5P<@Pp&os`VCUf-I_v583lrj%8 z=(B}guTb5WQ#MXF4~#FL<`*ZAHl>>%?A<&@kms-E>-4~I2}a||AN0cPMIQKRU=tN? zhvGn%tD;{T?j;@}!rtWJd@71_14^2jn~TrqP_qmyBi1iIcp!jC$^<`HG06$aKGtTD zm{Eoq+*_$XUM_7k(?=!jlH;askg+(t&T-6qoly#}5+YVJXy38fl9`4;m?A&|XH@cw zdw6UjFqcpH*51S7S08`&kT#^^G2E?vwC#5=HpBH5TZqH#Hmw*hn;WJTe3g3I7N;%q z!R6%PFO$Y!pTHo!|3EDO{`3F~?jP3C{`>@#ZOjb;TRne~lUXX(fSg#wx2futCqZy7 ze7dyaGVT);9I;f67Q-|5IVloHG0GN=Hbogyy7T>R*M~cQqrJkiA;W=<&CU90o_5Li z#}=?*yQ+k&xQfQ0rY*l6y{kIWckkdhXxIzAJi|@`PL|GCo^lkB5hY*nGc5f?$Q+*V zESeWTk|d4Z8En1`xajc8f+@`Ql@nNlT96`YBl38JGyyU~(2fWCT71+;4w=v7i7_=v zF6S5I9d2iPUOR%cmH3YQ5J*pGOK|)vz)ZZBmI?FTKJ6RseL(jO>OTtBna*`5rburA z@in;-f~*vMOke>yjOJw92k=4yxt_H|rEx3%Echzm=F=A7oy>C*jo$^z1=~@Wsv$#1i@>B(N9%EEOrUAIIB>Rmdwg+(Yc8m$n0;|)QSUiG&D5RJ zovt!XaeQJ4ntkDZVae#e07f!&Lf9k?Cd5yTUmfU?Z|1Y&gLH-`m!3(>X-mOS(HEnbacqU?MjYSFchr z4ZXe}&K`t(3Hn6Ulj9zcuz59%cGshejJs>%`7=6x=QJtalky8O0&LYMa|XJa1^UyG z!lrqPUScQ>ijVy9254rw5MmT;L8pz;!qClRAW8%xAJfq$Lh&Ny4}knOJ_EtY=sS_> z)6geFFA`P&ub3A|^@tz2fAioWYG~*gVi8-;CMRz9KIo=#evu z15*l2F(_cOXq!yjr^~d9ciME^{e=Z2aL_!&%{r1`=C^tYg*gdD^*3^#kgp~fLy*%L;VY_VSFGB%nPj@ zB}`JEhGroa1g_Me3#q&eImNQtPKQmx)#ui3~}?ALR#&!{V^@%jWHXekzq~ zWgAU?^wyYo%Z&Yb+qWb?K24vP(@oOH_TfO_yo_+-WDk^!O?`$*{tXBLK@!18j4!L7 z&rs%F!|I+~Xs;JLAMcPA9UTG(33f_8J60zUJ9132qpu@pKHjt>F2yC`MkWr?`~Hrh zy@flms7Yxfd{HL5lr|>RE1(IsHawkZ;CdlHWqpg7Rr}*;w|c_S=iLtRwToDdQ@816?|Oy+H5mE`vHt@m*54o>ttWtbKW+9?F)Uj zC*1H=_nuoissE!j4*FJD!AbR2b+8R{d3mqm7)Z-o*4?|QfdzhgAGwsbAHU)iGt?nT zE94rELe~s=?_%S~tX$~N*mcZ4e_SZ8erKvXtn06cGkU<#KJ=aaI-2OvXwxcO}&dMv#pXmSTE^6(fX1@X~ zgiOE)9OwUb7uEHx|4epNw{~1>`S_Nd_7d{RCto2*=2Fq4OHx*RJzFCusW#K(=Hq7q zD0HN4pj?#3`S(pgSddtFqS2ADJL95}f#!-=&GVT7I&I2yHn$h>^mZeKi^kU_w0tAb ztxmV=lB3719R1IVldg2Ct>0pEk-I)Zr39DAjU`&vSi&dx6F(A0M(*TjVdoKfqIdg- zg3|QO3?7f^pD97HulXsVN%Vgd!cYs31bI)q>hq6{E$tWqO@Ioitg&R3_YM0r67nG* zEjf=%FbeoxgN4{{^_$((*GlTH?v8bvW_m}FVTo!QMp`Qx+v4hK+oR>_*(;6ntce!- z+@H2ZGspJ^5MTMmk(&5C7@NR~{3@A-=_^KVD$!I%7BM0)hh`JXF!9XXrtTpq3nm!z()32Vn{ry*)l|Y67lG!tsL16Te5Nu;6KLB%txv(SG5-t zy+wUT={81lHl809gf{5fnM;36_tyG^oJaw<eAhL4lE^AG_Cn z6~Xw(pcLAC0L+NXBjw%N3oGiAOtnL=wTI;Y44Bh@F{k>v3huaSP;N~OjU{>7JoNz~ z-U!z58bk*Aedyy0iiTC{;0toR@m28z{V2XOZ`Bi#4N5CW8nn(Q2_+fdO^^A+8+PFM zgY};$L;f*lYWvoys5~V}@9gDgMF>e>#cr(WNU);(-fqI>i|C_b;D{pl>3II=+_9{NsORI$en)C z+0$>78gv+CF7^^5LS8QTLCcFORnO(7;m?Vcn3Tv)T}hLCRF6Oz z2v*CM%q&-=P42G{q5Gu{qK)gLFMVdHE%D*7msWX4)EizfW4|1bGE65zc@;QhvO~o8 zE(DTLOII)-BeCHS<3s$h+T}pJwZ(9rw}=%x&f4Vr&*n^R-_4%G@hnQ$Gt;X)%9QCZoTp7^S3*c>MIWLD(PN9<)6 zmhMQcM)D?H_U?K%NT6S8P5v_0NT>T2!pLQu{DW8l8DSR9lQ(1S<>Mo+q@$+AYGy6( ztN>$vs0bs140wZBKKBrU_Z9I)bIs!0v06DR^`hS0u*td@gNZp7d(RMGP1Iqgk^26= z(8^vW8IQC-+(kIM^{oBc2L7<;$EFJIl&$yaw$ry*9omt)iF4kEV!buhhDZua*#B%G{RrEM6__ka-)K;@E*0S6Hwt>78z96jcUlcB~) z);M5e?`wBNk@l60j6OeJ@b)i1Zar7V`g#$gqDB{p3&t~&sX_#-u>#5lNXotm85$w? zvf&d5=jOr>Sb$y>J^N77ch-LpIm4xjo$bw18)XUIaExQ4NqPh6?0vN09pzMcVZkMC ziWq@ZqF(65AHmD0My0XOK4nc-TS!_Ja>y+$O=ljohDXZyY6r%fM8hVt?Y>N9z^d0^LM9MNJ z-2^wg20(N%m&WA1qs+*+$)hz_m&tx;yBILv@|QeovWbv2vn3YO(i5{F%b1Jt_GxUk zC77ARP4hX;M1~jssIs}AUpJjB)Nu^&M{d}Jt=AG0Ca^@UoiA{6UAK1|rmTV8)#*3p zO{NGk7G$ot#TA>qE1r210LfH#)TK_a^Tu0`!N$ZPnX5)o7x@{?+Y(kSpbtxmkqC}> zRGRYU`a^o$*QUn9@%^9RKbHR(44padIi8`Qa!3CD-6P-$->#4-rLQ7a68%6Mq{+ZZ zc^%BLq8p*Yt7Z@pgTFdo06IccS8S;?`iD|`v#7d3u+Um#Png40;fZi!yZm|%#l=fl z$enqpw3i`fI+_Dx@YT8s1W_bA0tXZdZpRbiCnc~Q=XUWQ^RQrzrKz^MpU*EX-5ppu zIN*wpw_1b^!U%-alG7CJmY;`;h^wdrpxVh)D{3J4P8jNUTb)%OxGM8n)*&m2DtS{@ zTM*PaKb?uQ!L%K{Ou5j+#6rExoNdiih=L^AEMPkH^5VN9v$izTIEuW~FLcHxT)$q! zYE`y$3a5$6GhY@^tjne~`UF~m8rn57M_HNdt#-AnGxa76o$l@g@U;l@9;hF~Oh)9X zVCP~SVvE6r%mqVlx|$CJ{75c*}m>Mb%KX@yzKr@+0gq<=}Qq{X=4E} zl>C3hRtPy;+Zj9j9z6iw6j3~7LG&^Wey^EK(DxVoV&OLz{GszCs4yT8HYU6izKt!J zauh1h@!dl;es5%8)Y|k(4j~}xEMyAz(>X_HfB!2t;AuIs) zK<{iZEde4E0~?&mTk05k)l|MTQZoK-{5u%5`N}${9fZX22ppL{L&!+M5(a1ZDXZur zGX0_CSQxk>0f|1;LN39cW(*iOt`N0}@W^^D5jDG41N8E_K$cO*Bq1p#LLZTjU|tel zvU+XcHdW}A5%W5>wQ7MA{_sJW_Y6m0-JAP+e~`8DeBO3mG0e_Wz`vf3m!$Bh164t+ zOO=MS9YVt=swoqyGuMx$C0gdnap4bBqD>XbAl;FWm$_7q203NEuCRg5*TEbQQ{l7*dg5G)+-nE;rNOxJmWnz`)_?W29yA zxhvo)og(*V521$&apAxaNtSVeqQ=tj=`r`<;47H(m4E^=m^sQoYK?qjm zE^XpW&%x}ASew;3-5(2tSek(@diyRHNx5?jr)D!L6!DfN2{nS^>m$1fUqq60KNm=- ztf|O|og$h>f;QB(rSH(*);7m_5PYowqgc zJ+uza-9OP7+VyiJi%_RB7}We%>jzc8g~g<%f)QRSy!ucD^dg&a!ICTz|MS@M1HdC- z_<0&2!(GA)V9Q74190$I+7Lp{uGniei|uAK*#cJ^DywM~V<;30%eE$C_Fl4_s2Cf8 zuCH6KR=UeEIxDEfUniU@`*T%F@J<^UufAz1`ex-6j* z2TwB8BW%Bl!lEoAy*O0-y!Cy&91t>LFz?%;_bs)n~-2lb%)uf!Nm&7(J?sV=YBp~7%l~> zB5~@v$c4QhJ8*t$fb+YZBl^xR?G~1W`e~6IB zZLO;f$CrQ0=W$C~ZQ};G!?%D94Cep4KXS6QG`4Z16|u5){a4O|qLaRZlhUvM zfAgNTg8c83%_G%JH;H_davp0*)uhHUz?z?;n5wde!#2K3kBO;bc=6jqmY+nK3a=jC z+slUQmNr|)7rFp!ZC9d*fFN;>porUBS<{!FuyJFMoMKKzIhaSelxg`4sr&^DL^TkB zrD)`~dy_cc1ej&0TA)XC_8)vYZc1Q8XHUQ>X8q>SumUCs;PwH#_$ersFbe$T6>W(E zMUa&ZAw1I~NNmw1ae|kXdM(=!%%VTZ5d%L7w>#G~y&(l#uYu>I9zv zkKRNI$Hd?&g$;e!$q_+9@3`b!prSzaNY)5PWAY<|{5fSo+D2?%L~*F+WTVLsID9jw z8uc>V`f^VaN68CTV=3T=M~q0y;~5dpmc|)B?DpVwTN5yB)*iLg?v~bL0>hf|!@47a z)iU}5UxT+d#?6Zp!f2Qcp=)8ELN9HuHB8-B+y7BUpbA2~cu47x;b2Z_HWWB-W?C$q z(jTiVe#m*Tre|U{u`XU^0`@1TdvDyR=|9khj%uD|2n9*Hu}STw_}xqE z^J3Bl&J~M&?bCu^vob$y(M90iaO}5j+Odk6xTUl2%imuZjO8Szxqy2 z6{9G>UUGuO&DnVO@chD|*U{D8LV#k+th8+1x_YWHshz#|+esvE4uJx@K0;a555_&5 zI~|i!*u$A}9H=vR0t=y`H7CZ9Ein-bF!Wyb37npss+R;w(b-cpi`jrVG>uMWLBporf7x8x&k;1a#G35e=zE`r8Vc47Z_*iI3Vbqe9LJ;YEiGiM`jmR5u4GKw) zbZs?R_)bi%Ov(FAjh9@UV*67uKxgT4+oI$HWe8jOOfpB&`{X+BYCKgtq&lh@Z{m55R|x7jM@c(~)@nO=V~? z(A(#6f>Y-=y!%G!sORfS>It`j9NGCchbWdMvp=TS0> zVK-8}fShMM{LMDndOro-1;8*1a6Iu3$v1z};vdIJ|K`G|L}^998F29BPgG7GVy2a} z>~A;pEqo=}0)5*b`sHg`qe+?ohm}L~JKe6toZC$Lr3kf!ukQDY9KF50AK`xbgn^zhEOQt@`39GASIJ$D(WM5aFY%ddQ5$Q z1y8q~No<{r6q)M${h9F#{Uitvwy~1AM0<`vB|)_KK1VPdNfALZvr?*-jPI`D#Aw5} z(nYRgf+aR+-hc8-ga5`uX*g!fFSpz{jZ;IbjJ97{0biAWJ8tgzMQlcUChK3*GAtdY z9tpr=sK1<}N`O)iLy!^UoWSz0`w4opRwiaev-w}sGW4X?IjaD%tOvkp5=jq?VWw7w*^Vbe2XG0@QnY~pSrT(anXVHFBu-x*FkMvl z36Z4P!r*&h#K&gD84>bMXdyhMyG9OCO*uCFLp#%w5~jSHOKAl*m4pmCQbQ!V_XNh+;OskFuGrS%^DS zqiY1Kf)uVdwxZljI^oi+Y_~waHtcQEPZPu#{VcAt7lLL6njjDJe3K)mY0PWjdD@Cl zN{Lm!L!&5Kchc*Mx>H#FvtlxdR8#gsHG@*DN#<ZvXGPX=a$!<=YQSu(HCSH3XkjhUL)7c`YP(R?3rbHF6qt7+ zU*_W#p~S8cAog=_V|ub59=G6}MdNbU7&wD^-D;`%nr8DWds~3nD`@xbkKe4#^{9|g z@td_xIsu3C5KyhSL}^%xY2%5Kio%4@fdT;5u7b1xcumUy)@Bq{9Fx${f7X3pJU{}l zwx`oC)($I&jYJYXlU2dPY`)@BDIWC(=l?9-+nGz)-={BE4HwakVuwG`{cI?CS~cA6 z(gcVE+S*Wb>b|KCeq}&W^@>U&x!1g|{cenal)>$qSRxySG^bFb?OJ4W**dS~Y5=yH zl`v+ap_JxF;QoV)RdMfBR#z(1fsV$dg#C`*vBhf)mX2n=rab|9o6}@ga5GqG8T$fk z6jm0Ql#6XW3pgavo`s#5qyg@lsq^Vl%69W_3xi@)%Vf*|Fs=c>_z%%Xf6}&!zLm4_ zzghc#D_CiOf=&CaU^V}Z!1P}TEHC=@;%og|H~fh}=f5D3H|zR{@m~nk|345&>*yxL zpg`FIIi7CkzD9!@DAFxX`RPZPu%F%I*sCSO7;^jG?IOe*nF!yEzdqB$VC zVVW>xQ?u8u7l?W5MiC#9Gu-7=&)N&~?*f+Q4*|=MP%Z?J4XT7;Fq^LU7HCP~+>+G7 zFzI|KfiETi*%03VxFx0IXs!gvhBSa|=&Hrz@<a<+nFRg&wn7Fd$wc6*qLzqB}H6Y}a zh2vOA8C)cc@Se0HCQ{Q4mvY|d9pWztw1ti6M*4-o$G;#jXT$Lq0z+OYP?rA*fid}3 zc1L%$0cgr5dM&T_T&!Y-Bf2XC=AedP>aOX6K1Y1bvh1)!##s0E4#1wnRPG~dJwEmN z%i;j#O1<-OhX=*%`G2stuJs#xc>k$fA5{uScmUXY0$}eS_SpZ#o{+7fv$e5}(|=>Z z3Y2_67xkxhE&Ho>mHw?=6`X%**Prc|UF}yd>j92`XjjctV^h0UHP5!~Y12?8nH9lu z%hx_Oh|UyDiU9To3Q^#8(NL_oYnF|P9oG0bjb7s#-!zcN`6QnvCzMW`JTz`H>pbQW zoH;%MN>Io`A zvo;#UfNefSOcla~z=!Ke?f6aYn&Xq%gz^~-{Fix&UBIg>p~1q&2Ez)JvwkFq!~|RF z=R&STl}ZOVku+AMke{S!qKI29&pD@~ky>vQKeUseJjz4cfA4Q5i<)mG{1VN|yCjS4 z7LAKnV1x|NuHJze&JNhNyUu;HuA%YNFOdmr?D(8RU#=PnRDs45r=%w!P89}Rk)XLH z7~o9mVlh*S$5av_i@f<2v_3M4H8NFI1cMz$7sod?^uc%YEd0TOcK@SYApqJH52N|+ z7Yn{2_zR``L-0cc2kqtRV59jE>&dEl{wPUidG&QYKilUI?tZeqv%9Pw0${KrZ%70A zx$;pBp-wdHJeqTCzC;L2op_~6GpmuAB141UWzVTD0ZrLPFY?Vj8@u%6MD%gO9DL8f zHN78^(Qj?~kzH!M;q3Gb)NOzYVq`7#$bC{9KW)!^DN!`|Sz$MoVal{*Ah3xzyP(1o ziM3q)r|2&0<=?>ABEi4H1pwnY0E`U(f2*>vjnO|cA^?q+s!2srGxHESRskVC!n=Q02rSy`bGx=peCFb<0l@_L{2bhOzUaUUw%RZT6#?}r!rkY zOOL=HY)luRVJxQi04Yzbuo;{}{=)x@5n)Z}96tEC-3+Tn=PxoR=7SYcFZ^P}l~G`z zErqXCfSOEGApoe!Omi3QTp)m&%mNa!squ>uQMX)<05uu$d};IBcTEfyY2;8DyIgXF z2waQ45VtNhk_Z?yZcy$D;3%NJ?nWXSw>AG#H6@BBszyXOGvGMnn~X&nC&4+1B-!5M zENjp|YI1WUpeBodR`rJ0TMh)~mB5$reEdjk&)`f%d;N6u{R0_30g!@mZ;`YLF z1W9u(Raj`L?CAo@bI@zAPt`OjWOplv0ekud0V;{YdWmd$u60j6EtJ!S#_4CKh(vQJ zT+keJRA&%BmTDd?fjs&)Tz$;U7hT+zr$lg#9@{sBVi4Y>yLCf$iz{ju2D~YGlC#cq z3_1^@Z|=zD`7|J#PP=9%30>1qbxN(y1M89i#F*RN>5-jLN^p<-GExTqn-~RLO~IRE z*l~l6EnHljz`69RI7Vt^o!WPKVkfS+tosD}vqooK$oH(AWTeb2cf~oLNQ(>pzI}B9 zU2e++z~V9hi~o?P^d~V&8h`l*0G9uz!{wx+wBT>QOr^g75b#v&F981h9{@(cAkgSl zRU{jE*U(7}+c9T(x9$I;Ly;AMe92#Q=oL#&5rDZsAqecj7fKO#!_pzO{hJPz>4M@w z9{Uq`v)RK{mgU0H5}V|(jo{7oXkkD-rEBZ(Q48C?LX#_n&xm;(-@yK&L-;@F5JJgw zEeC{ug}*@)!m@%^e}zdJpQG2Wn2XB26bf(mr$HJOjXEITp%9u09P)i3zGYh*Cf`85 zY^8QVfGHPBNM^)Vg>x28oKl$wsNkXLV_-B%!chGB;(dTAX_(eec3*8|=oi34vDZ5u zi36hpaa_Y0ITa;vT%xDW4eJ8{09vM2C%ayg=urQ>2gyG`yVyI}00p%2T#p)Ye~LoD z)5ri(=&>QmfLiQ8t4Je#&^YqZ;nlye<#TDQgt?X~9LxlcG=Zdf(7E@kVu~b+yVdRN zbBy(t7uuqt1_W|yfYA=nxAUkgrybTQ&oE?^? zgfR>+i#7$r5bwoX)q?i(i)t1I-KcouGA^{WoA#n#o)XFMsX^DAW6p=l4Bx5BC-{6k zZn;=R1bd*E9G-gA_?NOsW;WO-&JE%H_0!V1ak7A9Q`I59T&~)xp0SO>PCF8F;jRbZ zSf?;di5vl#kR&_J_ATsWVm$xLNYYF__)9V7|6Pp7exZ+@A^@zRobQif ze9O`~v4j4XVw~XPazN(3Awcae_f5bumc7s~>nq|F_3)6tQtWA4S)m-e0O`5+2xBd1 zv>~Vc@dR`*47}N66DPQTO@dkT{Su3AU(IP7AFMx!w6R*095T;a{yWrqJ36COA-xeNiqXwIoF^BL>0vh#Z4-?ug?Sl^auL~ z{SnA`wM5OaWpT!*xZQfacKD?npO8ziTvG)7Kwhs*cy>dsM(*+oAY1K=VQb9+sWZ%k z*uru^t&uueA26r^gx1E@%oh;N>(RI}9L*`Mh9l)IRR3}`!ON0=JDPp7N_EL9D?7`) ze>$2t07vsKzY4fu04OA35n@RD%fAI-K`uytpS|2KL5NmEy6*MYV$Az&efPLcJN(Ll zGXB?MY(4bb(e&>?vRbw-y6u-h#U_oDZYYHY**lLpYE@6qmx*O8L5V~)+AxRj&obQb z9*X;ABxL}MB#1)`-;Nk^o24`EQQ#QlVlJ}vD5UdP1{#y$-?cdVbmGLJLGnG|_J6_f zB8qWL1OP(_z&!E4TkOAK_zx|zqO>g_o1^oVQY15T#E-0`NxU*Pro1#v8?z)X#uDQS zdZnmd5;FW|ixWo8VZlz%b*0&MvjeklUJ*fNM}?Jp)WB9hIlE`@=g$;IZ?7OR<3Mhr zwk6s~#~Fy<7JCK2V)qCv{YXwbKZ^5)|EI<7_`!$cMwaH2aJ6QsFan@QhWp(kLj&n5 zTK%fWRT&b1dh9fJ5yFKGsK-snA)D%edW?=0ePamdP{LfMH@_WfVz_9dhw}gGkt;p| z>ha^fciMVR?eWRnAL?-JU+S>vU+NHyAm>kYIEWKDW&KMXlKfVO5YzAgb!hm9Is~@g z?Ux47;rid`@B!-Ul3lw7ph0nK+&_j88mE24#gwd zcTY6Jk}{?N&>?Ci86xy=Iy8&5_!k`#Tbc)b;(1m5O^1#rZY#&R=+3AZ#1c3zDk+ie zzt!O%J#s;Ay#5Yb>2mO(F4PM0;nr(eku>kNHYcb0*p+K{6fWM&V2e(W0sVrSC4dcW zPx^?Yf-wGH*A7;4bAxA8FTW5D?>=u;o{-pVd`kP*#S9fHxk_j1j%y*SDf6<6Z`N8 z7BcjqA!&a{A7Vcakdnuo$W_5aS^*JI3QKI>F$V6Aja-1P@+XhP>A}l>xt@@3XbVGwhT`KE5lzjRu;=s2o(7ym}062g){83J;Oi2MogpZ1`4CIIqJ{OxvB|ZxQp+PP5)uILk)xh40k|&V)Q=yrl&5q^)|ak zb-@9N4pEIpzh@(0@N;c3+UyaUtoitn_L9W+4CAPGIa~{nM8m;KfBIDtaZ7yVR42ZS zF3BU!!JML6I6C$eooEo1KS)VpugyYD5AdA3x&)=w!)bS$R{o4(Moe@_6ugE!nxFh` z=*H-G!abAh;W4Jw7<{opayJwwBefNT@0I%3%fRq~&(&`;bLHn(RasHLMnCPpaGBxV z4L>NYYFPGb^h3qD+Bx7p{F^9!8xXCQz49`CkA7g1tTPqv=kmv3@0z=_k3=jW-^ld# zodRn8zj8|FwlfB;4NHCKXJGc=%-&+6JoW5s_daCN4^QtPQI)tlv^HyC)N`Uo~#kpCtxwd1` z26#a8P*T*hU(&tYDgLK;b4x}6tp*512LP@~_CLe%Url1@hkxw?E6r$CnN)3h6ly4q zLQh+Y8d@bO4oDebv5uHcP*XEZe_JD_9wcH8)#`jGK}Sz6m6^Oii5I(&`9!Os)($rgDskV;~tT@cEpYkq0mQTkRY;P zHrR~`GTFKa^2Zd0*EE0K?@c>r3}5M|Eg`OXo4-y76h*lRpL# z9aXy?YXi5C3q6fJsiu@Cj%cQXY$y4#TZmWsEyR8luoM%OB&khd7I9#{MYSq+t|;hU z#uFID`0Dyk?%XDHYi%CUVA13pu1a`6;jdpjoUffBRKC2Kj#lc*Sd|`lfbq|d7rM_f zlmZ3N9}a2S5mjN4dl-&=_b1ghi`j}BRZ?#;K-av@fma=P_5cpdyErD;2dL`75Eo0TDHe7o#39DdT*mCvVGBqh z7{aU(G|M=lQ+{4P_!*IhYREjxFoNi}Lvxm4rJQb7Z`2c&N~c(nx0}L-`2++y&`+c} zQT<=`fTBOqBtSTOo5MZwTs?V@2YmtY&r_<5fwEeT{+V~W6v(B28Yk!I4&J%dU;bql z|8`g*|2hd{^TDXx7`JCtv;`VoPqb~>Gt9%4FVf6p=s}!?zdSB)2_HVLkj9RzT3}7ejJ}m|zkAn2+G-`a&Y`y7) z5=s$*3lAJ*{~8xbqX`)|1=BBH+T~m~TnLwNTg&@XZ+nePL_|mjD9qZQG+C8!&6h|D zv2odiQxmXIVtEd*pGSAYwrmdA50W)d2$P3119@5{@sH06IL(%jZjs68)i_Rq1k@{mm1PD_5yozJ<@9!7`Ovk62O zqcMZcORIq?07;=KU?rtLlyU48=K-PrQe*@}FW1wc?4z8PWzHK}(2grq$qcN3B#gy< z!4uwNVIpkT`?$O~$h`u=$=U5fpeN4M78td7!;1$7I7|>kBdkz)d<`1-TLM>l-UFg=Sc25n+4B``=?tav#F5ZDvT)Kx(zB z@dmwPD;SHb#tnLF6F}0!i)^}*mVt3@XCw@4L2Xl6?kZj=jPAH?EEfx9ty-3$SNfyh zLVj8wFOo1F#y3VWc2^vCqw>(y!66{vfMo3nPU0`UB-*t=C=SDbsZSoK9gosNx6T(3 zGd2W*CvZCTqP8;YSOnA6XsjQ0pU%(BYgSP(X}9W<^u}iC`yY2whbt~QfhBi0mVUQOdROZB&^3qu+92iZDlFvOtVNDx**;v>k~BQ z&>w4#qkj5UzWj`qp={i{Q@3gZS3q-iSP~?#f5JT%=%RP7-xjIT%&*43o+|V zNbyYPqL8?H%H{0F@QU*b;;*_gnN0bjC!k9k0z4o7x2GX&V`=F8ui9vvDC`Vaq#kgiV=3WeTMX7N88NQ3#$GNj)Uq zHN1-n!viqAw_)BE7mS$793*&vqRcWxF#TO-59%S!z*3k-%0+okr35TD#cg%|P}gU_ z=79fZ8YTsHP3g+2ff3Bm+u|NDxX=Y5th>-@5$G%bG2UGTbw!fXedggTI`>&#kwy|= zYq*m~;OcP@$%M)DB`7;92U8$n6Z_XK2`>N?!_I zg6#A)BXp+h@EFJ)PWRQvFi7wTX&xdBC#4Q0%Qdqne;^SZy1|fklvE-?pKexVhfC#2 z*TDd2c%4sGj2cu}4x_mHy~58iR8H$uHQjDzw_&bdf`Jn)YX`}s3WoiW>J2|m>H@f& z3u9pGp1Ho`xOy9{4&HGsJClp@k0FTnYacjcK^^DrG%dE;(G{aG+C;yl_~iLKCk+t~ zgAbx#b+9@Q4nk|Gl?K|~ns1ybOFPlh-Dv|3#*LfPUcq43p}VA-?Jv&X0;)uiSptu!;D=-fwO~(_bbg1$c)Yx}b6^q)xH%v2-RL zK;q?U&H;mEE9}1RaMa~6YV&x`w_1Y{klC@{eJ@_amnbj`C2a?LOuctcf2~3M`*P~{ z0`e=n7HJI-zde8f%>OO_0TxmID|!KU2Njl?(0tD|R(4BG$qzl2)FX#9i^YT8NWepo z5bd1Owd7Am-Ehhx{MQ>2Mapu=5F<|Uvc1PIrL`h5a z6Cs36$&w7A(y-qd%txl0?K?PGEtT`Q7yy2#b92?gSa(`!!gI33>c+;WvTQg43H+*F zk8f@KW8ybQkzt@;MmL08g20d;bx4;*ka%!68S|T?DBl8fb?)C~P?D?3rT?5dp5`R2 zIM4zV4h}GSjUqD`difuK!twEI@aP@NuqUQa=|859$)oc~gd@r}me$HB0!s^jFQT+NX4G#~n#@n-O!*Xa=l*TTm;NWj>gW*s z=0FDlb>fe9aN)v&tE$wcXh2VgSjA9W=_!R+10k=UZGGkS!=S=}{c;I`%Ka>-<0Z}B zPV~6(F%26tDx#aFXvxnO`FnXF=*wL4)yA}Y*3YQWDs`35V zjYzoqFXZqPcbqm{^u{H%WkQBG+TXH_Gx$7q#^nhm;@ED(E?Td9<|F#0_E4Pg2yFvs5Nbwy1v&Z_a5dGsN{x|>7RYl%D1JG=}OGF$~ z5P2C(6uO~4YU1*4<)$+hae7#qu+@h@LvpCzI*bhpW0sx*_#Ym}(m7GHvb-E%npzK~ zF}z{xZomK-#Xn7K;;9BdfQc#CYc30iKVo8Zn@O_0HwOs<=8fM)48>5xPSP|T(4wY{y=0gAb+ zE)fJkF(>&Z@vfLJKbt!F59Ad~D*lGw+-U(W_U$|T7K#4{{D$^V`0clg{SLpqyV#Wt zgzxa%k3ZqJ_qRBwG76=$TMbDhktkSU`<$ z_B`#%itgV$S)Gpn8c`-dBO2YhL){Co{F;KFg%6CFIs$wpK+&e>C{1` z!~9c4e?Td8Y;_=uyF6RIGlYzrdhty<*>Z(h_HJIJcn5nZe4b&HufW!~ciez~V-=P| ze_$2Y-K}VsnIR%^339<-=}ZO+OKZ`|MM|?j1F#Bs75d*;MN59E34KL%?_aSBab0i# zR)O#vtMK}lik|rn_Bg(SJ^IjGNEq4w6fHfIHsfBYTbCa+wmV_^=ah^qesjT)i}`(S ze{jJ;-vBCl#rP6VvTnK0zFKYEyQAIyj#enBZme&7oq-*IsXeO?*c{)q6WlgPO|e{@ zS%kiTufrp%GEOw0k6^jvdHAGc8-hFf3p+I!QSZ#jpU+tLkBHTdl20{K9g-qkSp#za}yu-0Ja2#U4 zP9&4HA(^qF@H;m$vj3SI*R;Qo_{CQ*1e2*6Q{1y`|6hpu%0CeGe;7LdH=_O)O!T>E zLvQl4;U?2>{Tzl1>8$JNFNBY(i~qhp{Ie-l*$%ej7ef(rvc`MA0KgYt>r@fv$g z9KB>S{n6WlPd#}@z3t;9BWHoT1)cWzPMG}-tUz1SgMH`I&j3OAzvYlW`1Jpt4*y{) z0w}XTOvS&Y!-RjO!#@b0xj#(BKM0@ZjS0#BMfd4Q|F*R8 z0hTs5WIpV>srWkvwY;}}bBAwPZ#8X7(A28+8%4Voq@>89~`7w>?=o`k8@~K zINOqMS9rKX&ohAqq{BZcGuwYphhieRlF~)pav1M1_}f%u{C87v<^PYV82lGgkxt-Q zRW{x4dz+iv6;UtY3x`+p{g-V&{K(zEq{Bb7^7nM8VFfMTq4GFjX@W^#w({NM`P9=c zwWwP4VAK-)H%`&(uy4?m?MU^`_w1iQJ;b))5ZN2Yrn9#xvJ7^ywE87k00>6`UJ8nO z`paKB>B6box=-f?{}F^zYd4@=fFRude+J<@b1QE5KN8_PSMev<^V`|}YY=```CAZb z9u-l3Jfk12Wo_1>6VSbQM;-sD$)@jJ;(H?00wlusE^){sUGL8>kwx>*n%w5}pP1Rm z|H91vttS5`W`=m_lw42_0DHjs%inAA`g=`Y_+69T{-Y+3cmJy%Nk2|MVWj-EpDv-AQ zo{(53oN(@C3-t{d&roMlk9RHRhI2=7d&d6MQ!h8-H?9yMIXVaAN$9*%x#*j~on)AF`*CvZ7a(s==?TxbN2V0`&)Yj+3%{<2;j-`zXv3Q?Tjt#o&NQ(2gp+I z0}`RX2PD98YygMSfP!INj;P7-@eH_V?gpm%QdW}jwhbT7br%(mCm)cc?`D?mvy(O+ zJirDWDn`Og8XK7uVmD51ZmyxRSIu;p@9s8N0bp7oI-pI%46 zvfeGj@n5P~ynze;SFuIoc~j_UdBBDAY7mQ}IGV$Ffb)KI0Ngu*;fD@PGSTdGD3J}~X;wqS?gCNC@e(F|brm)2m4a1J+rg6?#(EZ$OV^?D z?mJsiN#@u)0wFf66rsst#b7lFMnbv+AbSqd3X*Bk1Ey%TVMWXnWsId6XxqLlB7Ej0 zgy90fwtq}VggT-rW(+&mKQR02e0P4hKpMxGvW2^bY|fgn%M)kIzC7X$sl$B09ScjRY{HBU2!P<6ag9tZJS( zBBEuutnqfc3j$&a2WzmH!euhiV@pJs!TD2Vy4)a$EZz%xi_{&=uHwAoEyn_YK zpTR)Bx%J|Sls;YkJhm>)lcmZ`Dl9jFxJ9C9*e5I2B+f{Z z$(nkMx+wSRwgVbTs^Yn|h4;BHun|9?I`9VqYIFLnb>;RzerKe&Tz^X(WKX8qlg8F7 zFF=;UL8N{WmD2^3bd-Df@r@y^y%#ry!LjjW10ARN)!TahiZL}-h1Z)6IFL)-4y#DS1P0p0hLxp-&HK4G?UV=|$0?L7gjVwc449B) zlYDdfZuS5+vNIXv7!oLnYGE^HrHoO*MD=cLo8&O{rTH{08 zvUhle1=z?>pJ=h$r(-$1-%=^8@l*O$0~(4irN@9Tc-RIgd!TOkE$Y>thMg*#KV4OB4EkL3*y89NR}jAC1y9MaU^RA zx)4?wQ>;tyxS=+euXW@-DmF?*J7^|U3p!sfXjqsVolrXW(k!2~N7&1)0RPx-Uwbj+ zjm_AKM5V$Lf-%(HzGDrO)OeiE8hBXeu|$VFWfpo9*M>3NG&W=yQ;#!3h38UL5biiB zyN3vBK7Sv<)N>_*q=b_50e>Ccr2-0%r$f$ED|EdyZAL>HBel(>*ZTxhIUF5<<>k#J z)F1JBRuR!6cxykGQIllIaAl7PZ0ICAkA2jV{LZnzkueAWOcgWU5z8{Ck@Lrc>v141 zF6aAuQa7_Y7kaWeIhi=?ba(OCU1u7(a1@C}Kl13n3(ktEWa(UHmWGB6YS|oQ-*7AC z3TL#vV4s^_LU+F&>-2TJI0?CqE!VywL&@z)a;8A4Sh1Pt4k`9V%lU4 zAAc$clAgx~X_I}}Fq-RHAND6ij=Mom1)Rva%=LuoKX70%UrCn zX}C~Y$7oDtAF_6DuwS}L8(BDC&hl+o2GZRCyRp*9rSb6^I0qmHHTVU;>*1fK9S`tf zTbl;kwjV$@AH{ksTF2znf%J){Riv*9%Ah34&pl49I-D-SHfU$tG%n1eR5ah+3Tqof zx-1Ra_@z7mc0=IsW?R+u3yht@mgC*=g;f5+`S|fNZ^OPXT|83TYDQdN5 zAzvBQL9|dq#?sdE=Zoswh}GbauBom@>NY%2O0*VF4(0M=9T|+VI;!37MjE9sN5uV? z#1@&PTHaSzLt+AK2)LPbJE#()K0=q~_?0EOsC!X-#InRjrH?%<#jou~DBBX83@5mN zfu2>%&VUVh$2`k!W$q)Tral_H0GAs}$Px>n22e+)ixf_Ic*tI!K5ycS?OFJ&*h5Q* zFOLqJaSQ8B;4rfe>n1K&*;pQT4*q+Z2zYLGvD_&}lec4ym5)1fZirM(4O7}aQXU?+xX#=;c6vn0_fu3xjaPYTc?e5#Rg$^NAfL$RZ4{UqF>tS#4Rg9YZG6W~ z%CZbu;4i_j2kBtN2;7t!5P3=91%T7z>}cF55EF*|s9-)(Of%oX#aPBr#>GOacfD-y zOO+uYwhIU1VaKR4m4zkj%iTzrbQ3uiw2n`r_HCP%f$KsbRr>W*3isByuvQg&o}Ec_ zG5I6FuJO=bX_`s8qfj&@3P>7>%Al8n)V>>qqRmXluhCStgzFCN%GiGu0j=9JQs6_!ISJn6Wza_Z!8feUTyXi_qu(dX0W4+659+2UW8(JSqnDjDxa6`}BWGQb}GRByT_Xy|8vvTJ{obi59tG@Ti zfL6SW_$ks&s@4bf?nu}ap(Li-nx&_}@!s<+>=L<4jUxG&qE~w*xkwj7vW8$-YSe<1 zv!a8eoTNl|`%fd#m>$hI!v#{}?yFd0=H0C(3sKu_a};>i|7if=XN`;=ms-58~pK>PsEgkUvB-?v)(~2mY{r@ zca?s53}M3D6*KOGJFgDuio&6D-UAA+vCO)_(|3!H{CJ?wca-Pt{05?s<4fl^du+VR z2CU8$MwwVV@K0wv>JLn(U^Dxzwu5d&^^0Duf{q#jSaaR@sH}tq%VHwKvgME{{ z(iQ!E=La|U{Z>ysuai~n7U24Z83nlPmW1?)8A(|1s-kWCKYz4!Uu8(XL8QwO?Tf_Pc_8HmN(DaFWF zZ6S72_~Is?(ZXhLtfyEj*M%50UBDI- zG~bD;<{zrsg-U?K^K~|Kr)>3Xr6R3maj`7{nAN4WB2)Emoz|jrZZSo~7N&VpV&CSmh4$t|;Smu5T=g~A2^Jugp;T3kH;e5@5&?XXN-)4iO zMSTcdf+FAK;|eC#U?NBv7{o-#Q-xOp@O^Y_5YVDnLt+sDad_6SA|M(cbYUpCMQCj{ zJt3058?GQ?Sy^Kzvo0yksNU5grQ(aVl>_#eAia-)j=~}&jrZ0vvTu@P>C-2ctD^AR zv?c=vk#Puda)&P(LDdtf#v92ROz5fL=@z7{)w!qMmpuA_NSHR7;^52CM9|J303 zPLLYc$52{G4RHiaB1dn^z7L%QuJotdmoLMwi2ois(Ew1^fT<_OZTWVs=^2gil=};Q zJv{}WQschhxCS=_sUnBfzS$-QVIOj0)ebUMQp~R#Ch~jmBgjq-lJvvKvn8~KE2TIn zsXVDFYCSB+#cK~vVw>Sh?ao}Y2RX(cu(5{Sey%%GkRL@&g%?oUZc;AS<<#WCVa&$u zKZ1H!EFnu)JA$Hz`G(oo%iu^nT*VY zvTuDp@M6xo-DWdkUC))VSxK3 z&m#_Lc;M?zDki;!bg=df#x~c-V^J>x4UfzrCb--Gny`o<6%nawibLyl*B>OwTPpbt zqd+sSkV1Q?zRYJ=G#I|qE_6RvXe(#izZ4;N$0A$UFYezR$cO)W z^6+mVE)8gbeuw#~udVj7RPDk2BYKZFMI)*)QJeQT9P}_x-`CABAXt-HXVDZW?Enbv zD;Xy!%HEgL;<6|4mm*GMmc@OYv1^m)S>4}%A9Qs)?k+an*F^1kvsTGA`%U*;C3Md( zpPmrQFDByFh;S&oW8Bdcw-uETqRztdiDZInIO7I{w@kkTa;83#J`lc5U;9BPutk%e zJNmC48w6s!%^nE|BXM4?q(5=$p_#Ao!2?O8dhKtLT?9?!8>Dy|?!;^z+Mqm5QC(E_ zo+K=7Dm_7MYaoycjQCKUiH2hTa;?O;`4vqXGhv!C#<4J8SylBcK)e!{7}Wqxn^t*x zW`lSi8&)2`Rt4aEz$}l*ySw%bLGT6IE`MPYv0qI9%|-a1_DAgNlL7mT7>PacK-V#~ z9(Y>%Wa&Oj2N(<(rjc(h^37OKD1~H|g9ljo?KD70u8k{?cr$Y?aUT)KyYmhTo_F!z zt-&K8LV6GK_LADaO)jskd$%~Z*L6YqcU(bucX?I0-|5}815rVof@PaS-vLd+eNO96 zm2yV|!-3gn7NyPv&%fZq6JE%RYVk+*z`QIbipY^%qmwE!jLc612{Hy_g)TPSGr#$9 zgvn8Vhadk4vk8>@%=zjvtw{PtW6&kxhXy{Jc-{#*yG3^GcP;q%;)$W~6ijw$?-od< zcw#U3e7KA2uYR#h_406Lro7E4p#Tv`ag=Y0bv7E6{0??~%ghzb9M z&UK2jQ8a56okir)yN7{OydZ2Vf*12ELO;Dcu9obg7{2v84V%d8&#n^Pdb1Y&U&HNb z=FZHp53xckK1~BR0man@eEIA_5$5(OecO*zQv4A5bk+NpU=nbcVJBu71?g(L4-Q-& zbzUxfTzu-U1ZN$6U3zDD2u*SBe%Nj#{xCc}lw$R=`gcw1o!Dpi`|P{8diVWjuXo3D z`J3zO2=|SjJh|^)$2K+$%+D#W*{hzd+}|?uX*wFMjzT{`<&}gX=XgAn(~-P+@fWADp?IR3c&FogDJijoof8Ku)-R-~Jpxmv>KkYsX@IUjcgD&CBQ8M8~y?bQ37%aih z-H}p0{CP`W z+S4sA#dx>A5s8>XsszD;<6MF)QRlfA^D-*wk@MOfBVyBWerl2>C~Z7J7^iA~e#+q2 zha=oKi&*&LA|Q$(51*9=v1rPtAZ|FE1U+{&5wuZ;YFFXjA?W##ZY9}mK8dR#tcs`m z6e0Kn$%l#iTy*yu0QA&0OtBm6+){WJs!@ZjLXz9bxgKV=L41ZpGb#Eow#5|G#4{y~ zo|YZB2c>1VBf1)aXoXE!kFPW)B?$X7KC(%jQgd6tQEv>Yr;ea}`UQ03H5bCp z@I;T834A)w0iq335D_WJp92SSAM?wmy+)!X+Qe&e!Q!u(+x_%6y>*XY`(3|o1>cT8 z5kfLH>LcjC++2^pd3HRT*}rX^f07)u_xHSy(|Fc_FYWC4-@6~j7t25bBW+(5VgB8 z;z;)6LDGLgx5C=A32qJ$>q8P%DZm1Zqn2Awwr&IO-rG89*k*k$^s?7uHvOo z-b&)xx9!}Q4^sTl1;q!3Vr@R9=@prcf-vctmCYTTXmWvNQte4=IVa6S)AP&R?fiAv z)Lfu^&;a6-pNs&#%43}@Asdep<&>^Fs)-&GydgdAaeU2_DhprMWhD0j{~{s!jw=uo zH=Rdf-!<&s-m?d?NwL}$sRsC|0+Y}nli24q_=0tdEV3LK;cn(%aXM-Q$|GUDkwD>d zF?a)k**rAGUZ*QQUNo4p>XL_#jD9G2O-LQ#sDj3^JxNLYh|RE6z`s7{t}ReDt#rfl zxIMKCESWq=p{KquD-q?qO%@Z1sXo9;LRW{g?$w9|HD)&a1 zm%m)RR@czD77TAn3Q8Xh8iD02PVsG!dUc$67V$9$-AF{z&4|W@GG@yCiR5kG&7H>b zY9exwYdzQ28G^XLW;{5#1^0wwnk$GLe|LA}$u~V@-4T*d_RUN(MDOQyJWA1SD)7LCS6ajoD zszoK|$Ej25(~SHN;CUsVoSa2sIrvBHBC z=UZ2#(G6HA2$W*Sw6JFVoY5KqDe40_d0hiuv0g>HC$hGm85q@Q(r=g0TZpQ_qoO-~ zRAv$aW8OM4c=Vrv<7g8D1`CMT`^*S8fGZNqEBos+e$_{aX2lb@2i#1a6AL?5LEbfuEYecwZRd5K<6*OjHs74?u6j^d z66-^M9hE2Szd4)LxonGM4sIWU1;#AF3HJ=Py25VlN2r zeH^U@SpxS!3s4D&P}A_ypyW8X zs_o^S0WG+!s2RPM!)wmr&rV+yJ53H|LKO(H!SKws&Mo|i{h8U~iu6TeABX+Dc!u_D}ILi-A1vGF~x@G#j31UK!7#{v}xXcb_4F*bJ ztBL(JPZY({ovyuZJ_nS@OFS>_kIVP$RbcyQOs->oDK5Y{i~J>m>B%A{^?Y6htj(Zr zWJ1}s9E=sO64%3G=fs+fKDIl0E_QOMt+b6>!P$mBt+m1nW!;ukx-@`N}b)p6m4 zZxkK58Xq-@p*DEL5+6{aVT)3(`7t}kf|s7fp0HPRqWD1?_PEx988bB4eK+H=end@7 zrvmnNvfGN_2ejZTn){(xlr>lxSL@~YN#Tle1rQY)6chn#oTyo7df`RqBw8{{^(PFP z6X^CX;L}WQM&ioN+gjDn%uVr6Hu@Hnrik`xT*S>D{WMDiAp;l3=nT&IFlZkF)ucGT zoQ@MG8pWIQ$+S#c$`v|7Qu%AxRP&7Ma2Tb8@j7NhCw%;OK7`VB|_Uxx3W1m`uCb*_D+<9ghqzas|+p znC1$FZ7*H4(?ML*CO}h<8rI5fI$YF8+%gkq`*$XVaL}aSCIo|B7rtM3Vu$U3IptIA z(^?XRUMF=j$T!-g5Rc$XqaUaks6hApdY)GD(`@F;BV@c$9Sm2EH{plyR)Watq13%N z6zzaekNk2!+T{Qwc*l4o{MvzaMb@XZ2z~S!b)7k(;DqK`68sga$NWpT)>ga#t;8?B ztY|}dO^CHBs9BUL0ZYv7F?Xa__Tp0<%qKba0;AiorVn|T1J`vp6&(deA1W-1aLVqk z*|rC;!@fCQP!Xenibn>6(y1C8U)#f{tw&{p!q~E<@nI02o|&Q6OeY9~*Ggl5gId2; z7Pb~On**$Ju1ctG#MH@4(Wo$Wbbt~@?-AFImRW22V1J8UZ86_ihSUCShVLyDw`pOg zYD>TsJ9*8L84^+aDat}*)yejDD0u2xYyT3DUR-)CC3J$6ZG^zH!Y#@V=cNbHh89da zinZntb#*AkX&FiMgjcthy}s!5VfVSVFe~53IQ1aQn?VpOy9j)gbP!g|v z@Hb^=z925yTk2v$Q%KF?;+c>~&l;mi=6>cmp&)khuNdZRSv2Wf%>hu74Xb47N!2>{ zqIw$W*jyhjeBY)OrwF%F{helxBk!(CT*SmPE4lH+H^EGR;R~~*qA8UY%MBQ_pG}mE z;XvuqxaTt6y zO?@&)#h}G2qAN1tQ@o&CFzLBdZeW5+73TUtDeUrDw-!_aZqoF-LSgQ9cq+0XFndw` zd3kGS&eyt-_K-#L@m#yiOkJl@bgxzyNc>E-sK)@}ni40!Nps%V=C&hwzLcFWTTJ@4 zL3D9gpMjdt^ce)|TQ7`8cfpTkr_`u)jNu3v%i^No8#Xl^crq^C1IYu(Xtg7WOwU`_ z&bz!LkKuhCvvUv1Qdzh9S+_9vMMCKf51IIPyvOXcfwLiGV{vSOWq}4i4ZmffHhXB1 z8hN5c+StMf^nUB{1esU5w8|4$;oLa`D=l=oSUkXVP35Uzf{E+?fIVDTg@RxQ<77~8 zyo2M>R%U&=@Kn8Z7MvdENv`oJ$|K4CQ*$-DW1Z<)P3K~UB+6*-GBaK3B{qdpC3v0J}SDbqC~3i zh=AoNOa!kX=1uX5R6LRgRWgThN^0X+$9xNSw!kUvyT`=XLbIQ0s$x&iK9-cf)9HGF z%_I6{I-9;u5Y3m`rH?bQsyX#%v((&v;cl`sWj|8W*g$K!K4h7N5t?g0^t-y0sxoPE z6iW#P3Z&gNl1CB`g0ZP9)R+$&>z~GmFnyf$Rl(TG1!ewXuXSY-1>?akGNSdVpLYNI zZn;NQ_-X~>ICzzmO;mslvQNcC2Sdeh3c=}8OlIx0i{)x&P`>X>AJV(uNBbZ_ge`mgp;g&ymmu7 z7NS$L6t}rmC=jo`6YJw?3ShFr^2)~{m>!MC5Qt)m9f&4(UOptvl-wB^3644$##qOB zJ9q@&!)t_ad*Cy6Q$t8Tp<)VU=w(g%rp$rpScjoi{B!TK3!_jno9y1VYds#qKu_{9 zp`AAQZgRwHr4yPc>1Ah0WylvMws?GAx@Pkw>rHX6sF~E8_|gm1i#{ZRy)`A^JZo}j zOWCxH$kt?9FoPqcgGZUJEr(xAvEuo-5_ngx-<`gMe|p+%Z;POH$x_+lkc6Wma`pE%5Vjn+jk5wnl%XPeNG z-gXTE$nmla@=?#@rl+#eROF7>p*wOw{F>GEZsAhc*^Cl!q6j=! z!@aq_9+V<2%1#1O4{yf-#GQ+L9u-Uq);V9P2;uGh0>CR}`My_)tKn9S*5kW`e1j1b zoD)_@>9j<&8n(Z>fU<6mEjgTcD;6(TKdH2@#?I$Vxl&TzNmR{})|i?7O&}_99q#6~ z3gcKNR_8Q>&$h!n-;^pMj0WT8fOXBa+KzS;m278HgHP{mH!o?gnXL5)meoal{ZhVRKEXIS{$#Gobc0$@*sdGl6H-@mI^Mt^F z&*#?9!0tWcf?T!i?p%^{VbI{2G5%asS55JAj}^)K<29n5){-EDPUUu9&1US}zS6K& zL6AQ-TUu0kxz{cKO8{SX5aE&w-c6$u1jaWXbu&h55C%)&_UuJ-xIF7DEbiA7uItVv zn4x@Ic;5;gj(d#w;d?${vMyzbX8VBBB<3+y$&|P~5#y39>q*GV&?8aOY~&V^p)V1S zlask>XRXftHY63yMP%kMqm|vR&?n5`M$4NywCs1DSjaG$yL2`&kX6*5Z|I^WO=OWR za+xP-?!Ia~^^NIu&G^P!khD6H-Bo1m3pDj~$V1{EJ(FY4!L-N3Ca;{jn`EYR>C{FV zDI^CdV(XMhY=ByBA&;6W4l5gqAp7a^ZNNmcZO(6wDyqoEW0@SB6*F*?={9(1y>Rp= zCUjjbe^!(*+VDnFij|5G6k^$wnszLr`YtCc;84co+j`IVt*-`mxTZ0OW|?WAc<)U2 zYYq*Qw^;F;61S-~b3>mRXsOU+>IoTVa$b*Fdldt>9%2`9#3fHNr*iiM>0!^7sodsr zH5Hkv%u_-9Lkm5r6*w#wFfxO*Q38H8MQ)}9>MaQ8gM_!MZfI>KmZkk1jPK!Lum)+Z z5N%$+z9Y_QMjgJ>*AKM2G(_nJItRD2__9JjLsVor=J@0aqL|kLRW4mIBQbth`N)u^ zD|n}mQgLD5^88ZHQ;pUT0@>E-Z`0H|x;k%f^{Llzd44V1+J_ z;=ycb$;c2@_TgdnXU`0qq|V9VUVy#i$dB`+W=Q_1?3VdkIvAY&wqmn@Dmo2oBTzSi@`6g-w9Kc|X36JeI$c&IHif4;LXldfx7ZC=4j}6`sy=;L2vgZHts$LWYp?qi&P0> z#FZvT<~fn|*EH}U{EYVIP_4#hpg=xqI($J*Z1;Te(|?J*OnBg#U=V|BQ_5~L%$Ylo zCaAQrep~T8Fc9{0hHZlnq%7s)!LpgTWkkmWNxo@f`iAuUEwoU1kQrgTmsh~gK3nW` zMN0y3@szU?td%}s1_hf{OhV9&cs9GuWiQS(#;8Z|JbQ#C^%ha~+r}6W?A}TRGP|A( zt)ylhPNk#^$eAWgCeU~nex^)g`t@mre2U{*5t4m*fj(}Y{#*$cM~WVz|EY!aCFj(6 z6}-~v4E3I4ND#iVjmku+qHU{J7ZUhYH%)%%jI#nog?OBNCV?y&xb+0dfEhY7k86pt zi`alP!k`EPLfA%rrl2un^oO;f+&E8`JT9$5wi=lMJU7>?BA6l3we!+1j5j-T@PrqH zt;3s*tE@CH(E5TG#dVYq+r+PRL2thl&KX&};E%{&pMmP6`D=uH;t1cu-h9;DzVGT{ zois^7$=-7h5v1G*y^u|yu^tDkPn?j*8$NqB-?1wOlPE5E4Evyjr^B*yT=D!$usUVC z6GdUI9cG~7pwopG>bxT*zsvywt)2jJgZwm4Z_xfxNP!pL{#J^hW7v6LZSNa;yKbXb zmB9K*$J@p3ogyMZ<*(P10^jMUS9?NjCjx`kW%?c8AGgY75Sp?=nUE*Y7 z7YG_$DOUXJbilD#wTp|c1#U(R2W2za=OBI*pVwoijosUnXz2J!o&7!zw>Y87>n$CI zlbt)_K$%@If+1;>-zEhYmRx17nrziVI*oy>Z#xbx!m;LF?Z7{B*Xq!ed;}%`sgZbPFt8g=wu+OB}>E|UYbhq!pG;V-eUTSK(PhC$ z5?0%_<(Dtd>EXE!b#;lkuU`SLmsBU1%@w)DQ_9k~2n2{ValE+24ESVR+t`;nwG)YM zuDGC@ELWV7W{_ep21!3g!Og{y@)7o?WmU{fJH;;26m&tOE3vgInT3Wjs9q(3R1?Km zNJgbm8ceE9(ZNsh3f;y=RZ99Q9;skuIL^elg-p~@%PJX0gVmLOYO#d=CdK*+iTFAv??-h5bK~V8@<0I^qf&xft8WTMGeVpRg-fEDAyd;Gn z*s@bRQ4JFU$ZfD}Pb2#HVIZf)s5-i?jjw|6jghnPxjeX1E9e1D?&*>U_ZzyM>;<#A z%d%O5+skEUk*q9y%q`LNe8|lWjMOy7iCs@K5jH8B~JZB+CH2#G{^vK!p zk{f+G7Kof8Nr2U7#fp=5u>k|5)qN@V8_tdS+}N^Vnf6p-Ewl?hOSLixmgi?NS$xD| zIon}A##a-!!))*avA(E5p*Go#Vm0YST@g9GMicb~gx3=Tig1SZL!NXbM6~l1^Mn@m z!j%7yu5${~ElL_}*|u%lzGd6CZQHhO+rDMnHg4IsG}YbzOwV*oKYb_Si#SiYckZ1h zbFK8QPOHhnLXVrSx1Omnq0US6#?6t%@B4v0<~*?9oM{$4@g0oNAYY9qF2JPKv3wMc zII^F+2SY8|celK0;>{^+2yj=uA){)0=;f}6rp{}%yKL*fMiV$n0y#O*F{R{~ODPL` zirq12h3^8_HO(?UI)V3nkbw*2Wb6N)BEMI1WyC!;kf}D%T&2zbp?D4cZGH(^9jv4c z0Y-uKY9bLS364E1I-z{~$>0-V6z2F1Z*fal%elDmTyRq#?L4(F??K7HMzThd3 zw9kot4@T|LQxa|gKMSS&W#>$IKn3qHsr@`L`#3;}dIOHkCJrYJw|0iA85{Tk+8F9b zhr}8UaBT-uB04DAz>Kq`jYg00m&z`NeZdh5M25eHuzc@5(n^wm}*B0XFK2%XUk*hT%`yTSUDWhZwhN(-f%$ zpo)Lt4=T1xIe*o=qs&qvw^P;r=n6Y2bSps|EgQ}#;7{+Zv49HQ76G2gOH9c{3#RjL z%MMk)_oC$6I5ioy5j9AnPD_1LRDb1yUS*(WRX z}PM%-pgi1A1jw+dccTisY~>)ESEH`C8?*L3Nm$j@824)^3?s5r~$H*PHK` zPDbL)^QjJR{8(nCLgn1N(bKGZt<+&Bp4~q_?JlE%-jhLvP%mC?F)aMrC5yYH8sYgU z;nJ$`K4zOFD%d2!T7rS(fN-SZz~(0tWM1;^S?3%vPWT&E!T-dt<%L;$T|zf{J_Jx$pCN`6?zLj98^yuHl6_(J+Yewo z!v*al3h{QpIdvNY{0zYXpyjZg6~y!)bVku*pe&b^N10+}K0aGUc&oWYiIC{a4AO9$ zr1ou)SCe@zet)ExX-F;bEg(_V1y1MzWTfc;Cd8=plHJ>SI zTn2ex;&({d-mes+rm5JbPLe9p>#ZQOkeNfnxZ8gDhQEJ_AJC_ILQ4QYR z1F?@I%4Yv{%eoRty>aL2E}I^okC45xe& zLkdeE_RUZ&HC%#>z$wa@w)|~OT4QZ z?T*QE$N|0y^amLSY&7brO9l0W%Cxei9yv4?mKx%f>eF}0u8INPvmAi zoNea)xoCf5(b6uv$JHn>Cqr7CJXs9hLPyinL_!=Tw=(3mKh#kcHPU9MqJ!UqDsG0%OfpI$ zOVeqI9UGy->^Js(l#B2yV~GQViHa*j6n6;hrq{dy)SGPa znISi_lrI+lJfbw5RF=}IYL^AYHD(|2MFCXKG6WFzU7@h>Kg!p~ss^IW5|=w;dV?v( z3{uX;o^VKEI{wrI^00XJTlQPOf^brigINolUqKmf?6hHZu-KiA^mRLB6fKJxb#GSfm{sP|^&n-2?{QqwlbfF+ z=4sA;;z?m+Uw(A~%Cb3O_uW}x7_c9%B1reZJkwJn!rDwY&#r|O+I7fP+II9v;>O(G zH!>s7d~>_P#=KsRv*h?RCe_hNWc(X4$`nk4J|R=3B30-}IudV1TH8d1Su6!CLXBWF zw#Ea z$BlQ`eC@e2zzr^rv*(K#&9d1dNq?@|=rC1Ftw3kuGNt~7q8(ZUD?UG{$tKR5BN_s% zz)n&U&y8I(U74-pYylR?v2uU}0nQ*^A)igxWZl})%GT9!SC-_PtzE4XrnZj-7H&71 z#IZqWPwyIx~D|RD0${-bn{Q@S<5sC|Mzf36|QH1Yw1J z2gk}<;8=v+!nk9dZDi-FAPYNgmU`@q8OOav)N?vgJa^A@mYl2~FEvkH2pSR@u_EpiHO=>AN4NAi;IJ8&N#DD{pQ zmMQx^!K+^~xKi43BNI+d6dE@ZgEE=M?JW;GEQ*WhV{)RaBs*3+Hdoy`DOcK=S8Ws!N$*pGDzn}Fl=lvdK->JWE_9eO&eZKd- z(#%R=BcPwUnv7!kj&%|=nkD?Ol=(coe&(d`$Wa>Hw zotl-zl$GY=rv>_2m-^NO_pV`{jG8D#JIH@xmSWWWrQrfg;U89E*;dwTo7cAU%EH?T zvh}v_qoeqI3+wCTiw<(axBKHy|Msp~=|$~EvZO6o5!%c(y?)#bayg8*Yx7GJW7R*) zgq5jzlD#eyl4vJ0M=L*gJb2shef!Go`#+|2N!0zC{(;>H{24d@FYFgb6DK=s*Pm%G z#s7IwcKI3l`e)0tjpDA|1_Od`-O#uF1hO+rKA%k$6>X7plQ6+isi_SXx>RbbD|Dn-=vXsv!H#z?1#!UE~ijw|wvbjj3k^8y;rQ zE2GF!U7!Z4S%5ltC_EeArUbrkj_XHII{NF(Q(!5za7Siu?E-7X5Lyy>2`)pJOMT*p zp6bJGeiycX__8uOAw}Gz|I_*Em)6|UZSqo(NLxT?etQ7&)462}cgk^El-H=#J z^}hF*h-JWdi?WcA5|tS%+V(R3whQd;E+m+29B+!~Ez`(y37$IY{N?kPA#Bu`O^WQn zIeWkvtmyoRE~jh}GOt&V$s-W|{HPXwEXat^n{mZVzi0s-HEJ7#d59QUs)zX)=ERZG zNw#c4^T(~TldpmU1+&rO{N;(OMFS*9x|;ZNo%$AmRt%Z}jeB2Ly6ZozQCDkFI>7vm zowbMxeLFkfp&zz^^nr$rxJu&FGL-gOMqmgH*qAJ-9IqD0JiW2wVe`uM)rdiFbS#Ph z4%Ht$Ix!=W%zipS%M5~x3<6ZHdrOM{BZDjdOYYOt)2jF77mEqfcB2d*q654N7uoyh zNwg)IBJQN%8h1vkI6Y{$RMz8VwZ#LCW%fQh>6hWCNe#_&8v1yBOrzJ_1>EX_JplfF z#A|6XSf(_*>i6%#5+8LVF7Uw_Pr9E741)fzUHAWvLIX1sYZ(J0a|>G&I!Ege)la(( z4vepkl)wDl6Ogn4f*aR-&5e{t;h#+&gwwi zzImsg_m^??zOGO%XG%~nZup%&zUYRj^7=Rwc%7FVu;_AX4 z6R#i*6gh}7Ru(q<>*v|RyJNwD@kuM0Q;Dh*fAzMmqTDZ=xN(JP=p9~FuZRSep6X*J?gCy@I|Xd6%^p}_(BUm1q-{NN`$ zbfOrr!jKY5R>#BLk596t;Dn;)cb`mVVtgt-kL+|i=>|SN%LN7iwUMF2cdG7F#GD=|G)#H1= zZLkKVlT_q%(u%Ubd|pFWg0RQSX{<8mgd4qQytK9H0<g`v%|3s-Z_ zHR5h!-UrLQ5|w{lU*;_0crmfJlW6L_*iDPwfPiZb{xNuIv%jj2hJgf`DceSo_^r(&w z_m~Nvxs1sx!8yo4*N`lF_i_~s{Lg_PtP7yk6)f8mSsWu$r4o2#iP6`sH;I1Kj6Y%| zgh-#vVizm91X}gOwo%Sr>DM-{>NZWtMIk`#I6 z75F#;<>83TWi!q!q>cHeS!-n+nR>9@pAJ#~z*Ua6 zdpa;!GI#5zL&IURpjG&Y$p(*D*Kg*Nn+Ec+l>q4SfUw;$2IYxQHPhY5j>40wGX|sf1Ac z-(&C`w{>)JD6c?U_E?~N$aG;Db!3@xvcp7JZJaT;f{Y+$;RySd%3vU+js+Id$&s;| zHS5- zD3@*9{{aufp}i{3gz_jaG2J#-k^ z1+}d*O)QIt+C`(!v=cQS5OM%SY|pkYw5j;}l~G~g^z?6~1oU5@=sOfS3~3Vl95%<@ z*b_~Z`^${)v$ookwr7*NJ3_DA<~4AiwMBU%oq#$eAK!HiQur~zE&qa18j$Z@xDeT1 z^hJj*jEX?e_)R$9vllOWO&V5rMj9wX7^|YYDhF4VQwL9}z8L~kgNr>qeif$ub9uQ{ z_8jM-!1OZob?X)Eq)K5sv{ zMZ1^x=`L<+zY^S?4sr@uhdCZ8YK0jz4)7KZ6A%z0pxqx-|Zi z_ShG;GKc7w`kZdN)__Y`^Ek{I8G%tAOf|IVVz_7Ut1 zUgNPk*i{XETfg33!h1b;Wue$zPH<1UHpI9x3ucmJDsGN`La!Pge+!lfrM3WCPEI1i zgiE5>!{-AyUbD@Oi|WD1faDA6=IIgsO~G*~Bna%qFrIM`wWQ@umy1ta%rR^mJkk7j zNQ?$jnQ(-liU`!&;w)M+1$^FU=RVOwc?n0KQ5eug4!By*uv_5!%)uH*yA&G%^!c04BTvhP}1rm*Tytu7&w4F+(d@sRC(c0ykvLX^yon_hs4K zi>1933>5%O3PYOQQJ*-_lcxT<(EhRoQ>2>xJ)RUeRynItmBv~5uHSTtF(E_R)=QW@eu zUeg@CWI13jlw*ew%zR-N^#v8#n~e1!58|}V;8Bnz-M~KCRYICTk)ROX243~iP}Oe| zNXOnJ&zI_k$54JYm?WZu7|O|T!uW^g$;9ktIv3Kbi-Yi?=acu+#D;P|BYrxDz3V-s zHDt98)MWkl`Ck)xV$}#mbW+h&76HcY8x#w*w&jEja+IlYCe_P=8vem^ixW7j0__Bv z0h!{VN6ie^8b-B|Og+?Srfy3<7RS!yR&@4Q)}0Y74XkBHVF|LZit*Y4-nwdtexWF= zA&-!}f|v}<1`S#Le#)r2%00t~H}5!j9q(opT2#gT`Hku*Szqf{_r6=iPMbk=Z-iP@ zdq&kHqYqyoy;}!+d6Q}p!J@2j58KvZu^h8sA#|j@FS5SV7<#|SvT(5&Hu^Wzn7(r@mW$6QPA7lHIG!5 zMk_|1zk^yJpUBWiwn7Te=N*BydiDYa;-BZRf2F9dM_Wbe}2D# zI;rBrd}4!DIej4KnS~sqiux7^f9^g`wqao7x+RtbFxg3ddzd*l3z47;v2zFQKiy%0 zfpMbKeM4u@_@#2_y4ID4^)}bB-N4WK{fGKOjj>y9^C$1Z6aH7$qmzlFg@Lt&=RY7L z|L5_axp!J)J86>z!LP4R=xE#`#tE2y&dLtL?L7Y6hartxI&5FCAw_tpHJU~bEB$?U zJ;hk;OA;p^*dyG?i{GznLx(a6oqlEv+L5#|=jbwqNSA|5FDK9Mfz;bas?>sHJ(fn8 zDAylO!p?ncAzP$aoR}30v{ok*P0hm)2A-ND=MgJBz7@EXjM%9uki3HNCOEqYm69nwfKw;{gH9iq16GO!j?BZ{0%={DS0nv7?({6t?r*w8T7 zUL?hL=^h$YW(MFSjsn~t?x*M(#8ZNw@it6S#`qmH9@WHLFDowMTO`M_N0h!h>x}1r z7PY}W%vZ5jE1oLdi^^0``PC`U1LvFnc9Lq!Yll`HbqYo_ch{~Jji$hNefy_V9H7R_ z#@E-uKOWFGa$MiE7F;4vz4Bfkikc{I{|0bqm52uEl~t-pNdqlXk~P^PwgT1hN!^$c zTL8Y?G+vQB-Go!N(GqbXePUp#egtzV6FHhnL$*m$LNrOK**w&MOA)bwrya>*N=Ukw zyPaRwZU9%5C)*Mawun2EHFPQ1Dg7@K(`b1CTOpd$e_PDB@ zXseSk_jIU^(Io%s(y5<{tc z3c3m?iC?Zk&;*HJVWb9;7AgdF)vv7L=B%*^z%+A8;i>#A0qi+wWwUt$q_HWk0Uj0W z=_C3+!=hH79$CP*2dqZrHeRP5;SI6Qnaf~#c0D6;Iti6`;_lha``bHREEqs+LTqCm zmANuU)@Axr*rvWV-;f4>g%X1>G_8&g0+1Ca8H2!P_~=!Lz}?WTAO4D*aOr&mEQPOq zD$J!uark0q2xgw)$R;Q3AftB)231oY2+|mJ1tw51<*02R?^n2tYpi(wQL7-Esx~` zheb$?S?oF)I>aDx_=R-oSZ?N1hl=ZtR4ci_-*ahLI z?oV@1roeac%j`fn02{g+q$u4vJ2a-U5Rg$0)}T(3k)vTRO#qg3F-}$o?Kar$rN0{V zv{TY{%=uvsc&v@1M`%mF1;}CmL(R(yY{#ynBH0diAtoVB4`R!I5AmMpWrKM72_+Mc zji38_g8Qp=9Peimucd#RiEGY%9SdJozkA(o0=FUq^c5|=PitPTP7TM^6xzN@Km9u5 zjq&c8T8&hZ3lU?Z=vI%ndd%rnE&tJGgf*X;*8Bl3AtwJ+CC0`-hl_8Bp6KOx%Kd zs?%)kI^;_{zr;T1m>uPkx9M{@vxGn$o#CDB<@@JL+C(eHhSN{Ujk_7;I zeYc-$9hiY0C6iV@8qCr zdF{^0n-?S_iONr{otIT?Y@&!t2r6g;)(T*ij-y|<(G~&od)%4XjZ1hTRKg~wjE?+$2rZsFX&ATFOvJvJ1v zPI&anS*^0{zV013&6Of?9KgA|Sfg8l12}7}>w0Xgz)kdCzj=s;-$TS~BGAP*Fb9Sy zO;=vbQgOo@S;GGw{bQI}w&YV#{iiK}(=d#gyeDS3HCijoz+JJdYCeTR(Fw}cS0Cxk zJ4wCglpx41D^@qKl-{$pLgyMI?U}{4iH`rOW8dBx?9a&EV#1oMNnc=Zzs<`izhKUM zT^&^F9iBYc)!KO%rMoxb)-xT5*wd}F?I)x&O@dt*Y2)egHzVVvHxEI>_hA7xJXNUd zmVnnqyNyQd%*K-n=!N&8*kr*6_-13SmLK0a`#N}#&Bj&?B6f_UW z?eUtSK;_Hp#b3AqEBLjuV?ASin4i;O55(4Fog()N7UqC3dUl4a%sE4d04W4kxG^4l zP~$9cl!Ap!J^}Zc8$900?WyVks(5B}Y#w;~I1_ia^9}h+qGc>?j z%p=xch9ULCPojxpI+Y%RU}b-Am7P7%h_#s|bo-~lE^%j}?T4R)ZWGtTcuc-qce!Cy zqc2xEHOpQ8jJ|*6jX5nR@dHKGJ^BYfNykAX)D5;iQFz!|Owma-NV%WsU;HF$+x9Wh zd=pE{(zyi^ZR}yU6wZl1B1D{i(D_U00PNue(b}&_!UimjB=Gwun-{*o@B^N>H0=94 zfy-F7YLi{|$hE1D&!~aNXuV;RU5EsvqAs@;$}525M{F|E8)rmA#E=z#NY0Ura_`ft zBdQHCg~;H`K{x}ER0dkEepDwB|5BY?m3@1!3-$hDq4Hd6R^0>SI$+8Y%rKf^D^4SB;% zvjN93(vVe;tMAD7+aKM`$9~|<_}k;*u`?4dzMif&&6xV+VuX9y2SFhxVQ*ljWCPs$ z;PV`+i}`IMgH<0t_v{S+zpF<7@a_PdlRoGC#N-#me+8ThyV%(O58yOLMdu%YQy=*F zZ~IenPWzG{z-gkXQxXbt=K8N>x*x!)j)t|lA~9vvM*MfLl%yjGw}cc18tIOg`^+an zQBu+&F-L%u3R`9!G_)XB$V*une_v6bo<1FO3@nLdQR~bc_UoM1~}1{BCws-d}`wAfM!YQ{KH z-bw29fd>c$?)*$w8)3?I#k`ORPn_L8}BU^sQo=O;e0?t!z8AXZy>yR-}tY$J-k+ z>!`S)bY_zt;bAGLKZobrgJA&TduCBZt(DlgcyF(_A52$*h z(x5%xK^}&Iy<^eUm<6D4Y=UDU}Q-oN^PnI%?Oj3~E}- zNh1Q0o>2!KBVtM9EzDAo!NEslETQ!_ z(L!@X-BE&3$cyPyOO0YEy_uroedHQ~`MK(vxNq~#?ldoJp5GdLoldvV^CR7t_K2d( zCBuC`dwk*I7Y(g&Hz%>zz*^3Le5d_M+FqiL?ack$mcO6DM8SH1PpYk*9P#SPtbHSn z`?Aa~9~XJ&iCYVqy%V*l_%m!)&D^XL<|hNCx9Muv2fcVhCDi&F|$=XHho*>oy0o zD8`KoWp9*j81xL~qzd^iQO?y6)1MCvze-;%Q??5#sYsUY3v1cV1@!R9?N!JZd$Kq= z|F>cV$^$vzf2>x#>&v#5LRumI+y|&Nrk}kkYghdyovdQGcOuil9 zS(c&jX~*6`g!51?zelc)95}D8oO3HM55bMgg=u>nHeDt~P(cc|%l>s?e*zrz4=yGW zso|zOZ)cl_ftj8sx#ke>?;>BAGDQ$El3YAf78=6isX_M^u8cppv~_{X6)`K<5D<>R zbx8xS#1=AMaZRl~3Q#feo>+=3*U7u`@9{~({N3;WcVUy#CtQ>NB%IWr#r4~(eJE#rBBqPKaSUfXy!5IUN6NnO@4&Etjtowv2UqAjBh~{EY6?R zy<^@;5(SBovMFl*$bpGjW#QJ0zVdlE{`Y6`dI${?ND|0`-EvAE7#}F$9Zs(<{|3V= zdg9WwJ@5dkVx3Z*?D{dAr+WVdns@w>5FBrV#K#;8y=q&P1wuifUO{vggM z;m+K~W@#<2V|?QvsciNovV{ex!quF15u>ku8>}|I)NE6~jR=HvkEchOeig@rG^5B~ zUPCHj_p^fmjprF@hqcdXbmj43y6%bg>l%GI%(04Xdn(p4VsXj4@7fmOM-5zg^gs^R zuvaQ-Cy>}t)AuAvB&Hk%cDeehrqnY-^l?rd7E?Iw4lIQdyGm4z^p}kA|EIw%IafnM z=qKRjegf{l@FD*r;QsS$bMph%L-?H0b2w1aEb%W?ky^pWvSNZ|C03G9J1znSZvbLz zlA+A}+jj$FZP|JUX!L%)?R5k9{jn9`)Sl#0=I+42ATg_t2J!&yNlSo|F$jmGF>V&w zRA7}`IF~L+%%rRj2VpR+s5z2F=`B{$fx_?rl)eVw*6lc3OCwB4kO8&KIKN94ITc0&Say_>^wHma-9~J6+MqH{> z<;Tt|EjYQ=EL{*X-p&?zgY9&%ZWmenVFS=H0@7{(q;N1;#t@NM0qPOis>c?mxz!n=VW$)5E<~6HOsZq_ zk95MKz)~$hal}j|7@04s;$YKN8KG}#AgTLbyj@G1Brq`f8go z?P~k&dDXP3YeS04Nq^RK0SaTB#ruS|keFan6|j@nc~l~pV8LMk%#X{h(Q+_56(6t? zn@pxrk0dG4YN4E+a`=6VLAwt|chcm6&czr2Q|=Apbiam5dCm4IBk+4Xiz! zES&yXmKnmx`wp7ea`9^-ycN z)@;Z^oDJvb%uLat;NU)a*@LWBmb1>Dpur{n;^5_cbCzzKMVQDK!7@y#N&`FVf8i0d zum>y2E&$^egMdz*D~fTyg>KoG9IWsI0*BR~3k3@^s}-%09MgbR)S!S>M6@M12KtAC zfW{s&Vgh;~LmGj~_VhZxHTot}dV|Q)5_2EZ1m!WsmMCDI_0DvS{bGU&&RiPx`!A=YqO$s=}XS5(Hx6ZYfAw8z;V zVekyToWCW*5-TF~*%su;?td|NKLkv4a7O4w>zo%5_AsKFNrf8;$3?2pOk@Hj<2i<` zNYWT*3YPB4PbGnZTH<7v@q$&z?%)bM-D0|YZY}O83iOTpqsvkbAI+K1lE{I-nH;RNOsb3UAZrKP~_z1S2YUZB zS`&;iO-SXbC*g66d5EIg^SsUNgwU;Bvd58zmK* z&cQ7mvY?W?6AvWjiaVXUAaPhMKRq1kLxUY-O{}&GcT?$85n)LwC=Yot_b^PFI;U4(%$z;ouJNB z@D?Yy&MQB@>TAtUBd_(}CdqY1!tG+2Q&kD>6BFgm7AXb+pX~rxTA05h59r8 zU#zZx_DcYKTdE5ZLWu&;Qn+%`uVc0Z{Fp4kemBTKiu?6kEMC8eBy%8afwCAM-$ zmjWat1!RC|0Hjqd-|smYKKgm?bBPDVBb*Kt`fjeKuCBW8p6=gYJ$t_OS#-5cuOEdF zo=H!8^Y*Xqz+Z1h43ec9&bT-w506L_2J=DgNTePtu&LuL6M+XrP@+-xK;MUYe+Tr6 zXyO)0+*vTdcxmR2^Exl(5QkhkVNUj4;)_Ml|pBpZ`+859nU! z|N1(P8y(1+zg#W)efW8&aC`4k|9)?8_wD|DO!)D{gMs_S`#Slxugf3qZ!{Tepo6iSwyiQ&mn~=`9_pEWOfH4E;W<3?}3tCmZp#jATmHSapDl$lL;KC1i~D3 z>VSy$I8g|V?dcA`_JOw#Pz1iZ@69%rbn7GBM^vsYyuRr50?@)j9q1RFwof1n{!Twi z0s`<-@pc7!i+)*NmnOo4?TvqePCpoOILW6B5w)apP9VD@rsSvcvWcHM&*a$69|<+O zX%r9lk|vQUovMLGI;@{;vj5bi!Q}y8{X8&R_;B`nPbFMiO0tzxy{j$zms<}B+KC)B zlxW(VCm+TD$K>d1m*D=kGg}lHglD$&rWzO3V70lX4Sf#H>1~yWRH0;*Yi25NfVu#D zi$K__dC0T@S4?IqZ)R{4gOxTGiv|c*3n*ZrBsElQBn{N5jb;JV^oF=U@7o>+PaTR4 z786S?z{L@7bXwTdf=z(r)&RayslU8Lp!o^lFVH^{&M{E%F6)@#Hl87VIcEFbc&2jo zhO%Q|a=F&vVxx7VMd;XKqeZKu1IbZ4K6jqp?P7oP`pU4p=-vFh8ToilT;kL3aq9%= zAwRCq^dGriF^;;?_fE4VPzZ^36xZ+I`D5YTMxplJkEP%;)Jmb~MeB60@V_$BZWM}M zXaD-VMXYXkQn-hHe^P=!G@yc^YdE)VAB(y;$Y_fNd7}2k;rDiay`K#Wx`loET^h9i zQZ>yr)xSe}%|9N_;Yl?`SYez--c&_ z!IRRRW(pjK!D(ZXB)CM1f)ZpwL^Mc-rC!Jk0uA;kK_C&VjPF<^Z<{PNz%PMP%@N*z z-bipzpR&?^>^~xOKsyKAfY&@RDDKJW5f?z@GtKDO1%O0YC{>6f!~h6+rY;*v3L;Nu zD79)B#qJTj=gP5Erit1DLM>0!J!uGV;)_t%0Kca9JtW`1%ry+5bwqV#KRRrFr#MKH zHOO~L2);(8&l0s0h?$%2=0UDzIr@IAqw%E1c`9z~cR zB&OvFwhMQgG+2YO=gr{2DYK{nV&Ifo3QB*sq~ZV0pJL(&lTy?tyxNFjr+q+x0ug+5 zM}p1?Zm;csxU{&aPc*@U*uq9~X+SeP_9FNOSYEk37TuhY?aY z`J&=e0^sk!#t7EO1MUZK?&?NT%hS!2L9lf0z?}O86GNH-nvx~3@59p6(nYcam(~n2 z`=#XL_AV*l_3Sy(U_MhO)q!9x(TTEv8;SuBv*#CP9h4@Cj3GqmR{XEWL%(34y4-(Ox%+u$d5@cv0JPl0GtMaf^oidlf|0i$sPx)_{_If* zNwNk^9sMlGu`{>cdxM4pldkW$zv@d zN{ib$aVT4RYb2~|fPxZLOq=ur244wqk1tzCP9+-}VpawyrrZufAH68M8E{vJJ zYGv6B9s$^rSysnFvo9WuQ7vo<)D+ApOkeC3VI!5>qs`oxR~us`GzJ5ySrzsu?6dlW ziugWbqBr^P_TMJbM#Jz;DV|NyIQYLXNTQ^s#6;0sZrafiNm>vq84HD&ohXnSV1X;c z&Jp$j3DiS~Xf6=IMmKSHkBCZ!H#5oOFE10!xD8k3QU+8`Q%=Sux2dKqAP9ztL0QcN z>c#qySOmm4Dm)HIaI@W>DY;=IYYD1*(zkTdJlUr`2ouEk*u zww|b62!0QMT~!;ZJIiorZ%+&~VOUfc2?eBHX@+-5AdV-w(Qwn9=(WrQxdEeeon#&}&-aIM-0cPI*sIWIJ@~WML>cb1va@Bj)y$fB z2m;Bmv%!|58EK5y=$`={qH*KOzeS~;6ef;-M~-bJ9f86R(S#7z77rg91ECxvvfm2@ z&%a+`CvegE%?+n_oI;1!wlkYkpzXUdFIy%?kk;d(zbzQB7daFqQCY(ESW~~Z9hSo2 zv)a1vjr?v#K&d~T$U`w`Y-gB+#qKX_ojQw2H5GPZBGN3ZAChGr+^SUB)E%&i<4}u} zgR0=YD{=qh|LBo@98yX-<++1LqKH8;0wAGSH+cpsH!*}0?4kj3YJuo1xF>Tuhybu^ zF8_YW5VA9h^F;e3!W**YIaps_I+ZLqa8G7v_A+0ipbZsDhkVvm&F$T^L1!7$YEU#A z?>g*;)e^F(j>nAi4z{@hg(@unGB-1lsO<QiTbL_xQGGJWTlx)WL`QNGKYe*HA zrG_9}-ZojUm4dk4akcwdX(AZHPaLBjcux@kH@}6?Nzhu7>Z+R|`#fPn%T$7DSX3qi zo`NScTFaMsf0?|;wFvlu^q9ig<=t-(YsOkY;0~$bQeTsGlI_0V#Zoq|k zg*Et0Ioa|q0ODrgJru&0ObL$b4BsQCNgn%{IU>sJD2dZJXGlRZyu=!5m4)#zED;H* z*jQUPE2xBr9to#VunUw(@UQ2l>n~igJiX~thjOJedv&}p^*6kPP4k0Td!U;}B|T8Y zL8*bJB3CoO%bze;2Ptm;7N;<9lDTsLY%s;6b3?LQscETsh~a9LJo9b~2*qsj0*3V) zO<-SW0%S}*gF2JC<2FA@7*}B~Sm2dIr(-A93-oCt(kZvFmFZq9rj+7Z^DdgVl1I4P z=Zf)kmD|L%6;)QBN0*^&NA`9(m&o8lq;`5gnMmsGnKIsnL-E;?UupA)+1DdQT!|*} zQ{W`0)LSkNo5Te=;V`PsztLn-GclcDy`(NfMAzVoxX+f!e(c4TKIu|U`{^dmiwy)wS82{uEIVIP$K)!HI!)otFg0;>niC2Fx}lLAq`T3 zv~+h$N_VFqAR#T?jdXX1ba$tOAOa#GAs`6vWp{POMSRDf{_h9xGiT16nK={pOjNZj zUZBGR*@So(`#ov)IX}hPflN$j>#o*sN(<1{tL__GAV}=w_ZAF%gAuG*3wJPc>O}hD z-?WN*)DwvfxR?&JQA(Mz_-5oj5LN5$L{?ccq^_fW4x}=t$cr;uUWcMd($w8Yqr(tSfKUt zJ0%!i-kTfwUQT(nDqwJ_+BI->ZM`4Auz=K)r>)NwrW|B9e7W~}DBx>_rYmxz2WImd zju$B(-sSMP71CO#D2Jx2Y^QLM9bvDMq+o%-wSDRHZE!3;l#4?xf1@uwUAW)X98p87 zOruy{Uo~E8H?M?0F}}pcH>o~8c28FEj7_Lg)6;^^Xp?23mIRA?%KE@@m$%FKSgRRx zAbC<)&7}9ax>}DD>Kt1MIew>0bNs?*ClC6WkY~}dGRM+|T?yWz?DJ<%Y}M{FRi)Fz zi|><>$Z@9kpU@kpPvDrT)H>H_5$hakf?Po%ZbN`xdep6Y7!ivWEFR-x;$dUU|uL&OU^IlQ5l_x% zwetQcQW$JB6?y~V^ZVsNa^>5l9|qJ=*VN3vFbj^fOD$IjnxkUl$GUS5Wy$t`fN)ZHkfalBO%;VgOjMit7dpt$pr{I(=8=ho3gcp*(_K}bU3ZpB? zv0)MPh(whrGu&_kF;4N|v9k`bT9m7N9Mf~PP4&V&`pA;qJ$$9@#7x_{&Iz63{vq8G z$i<6OycPkpjRg?x@nqWj&7W!|*2_Q9L7$pX>#Ct6=B<%gS_MFZP2aOaFc-jLNEYG>E|sLF9Vrk2diS=3}-^y*kqfLuT1u{@jD9I_!&6RDT1V6)EgsB~M zFn5K}h{Y({Y3i*~!fTY|b`v0ArYcoh5@{!Y$vaBLbCDtEu787y0g{tTx8*wDWU6q% zvin@5b;fvqTY>*fSyPJMnU=zVdkWe0IFS6_3?Di?s@5<8|C)(>1WKsPrh!;vKInp6c$BIrZp#?hjJf+!ROeL`<>_1-?;A|$ z=DY(EnZh*xtJmy7*k`7vU-GpmWU82xtQ}E}8TtCGYi!@fiZ^Sntc$$f-3*3Ax`egt zqy6l~+W>w%^dDO*~GQR`-sdXq(+7rnnU}8 zaO++s@kU346+IKq3R7QZ1{>M^=4leeVvkjiKnUeBFTGYc52PRtLT}%`RG7=m`~9G# z=t$czc*mz)0sROv5v)_3wv5R9mLdsMZC6hBhCYgN$k2DF6(y^OV3VzW9wLA6*iAa$ zkUW{xv=5n=UzE$JCz{CidAQ^Y=a8C|4t|w^un1X~8g8ykRsDn}Y-5^y?W4uijEQ4Y zm?M(Nd>*#8eR&9n1#0`E-5hw)7X?(Yv#amLnVdZ3PkQ5bAv50i`qJS`Wg)kBfjx#x zK3q2)u0ommq}WCCc}31FI*y$B4RfI(TmYKo_P9Lz>ICPQ2^IQm2{WhGgTOV94*M24 zSXNzinM+pazKoZZEW{s8H8msCOrQ<>CYlIBIaAH0d16{XdB~tpd>_K`tg@C(pirQM z2qfg%JpU9)g-Ol5JK(AiE0IU4kV8CkPFl;QQZ3(Y)4VR&E=yk!lU$%;5}cO=e36i28qKA;cIrGtMCt_C z)mqb6q~jWNz67|hvNh@;VOS?1*sr^};XN*juC!*i*&ADZ0u{uA+EkB3q`;P}^5Dxm zmfj{x-Z^a);=1Cu8pHy7ii01hj*AAwC-0{vMJEL|V$k&4bQ(Y@Qx`-CV%PY+hK)($ z(%ekl3WvVL?AwY3Kgc3By`PCf~1I(F@G7xjh-n)I3SmIU^~ z8wv9A&B|@IOD^54ta0Hcm3OI3-@-bcMRo7?F|t$3R7CkK%J$JeH@I?o@Wc!0+vWa& zl;@hv;RjA(GyIP{&=*$sU34Qdj^kf^MwRSBR5OK(c|V9eMXANq;lw%QS|aytAk_nPH8^KI!LoNDxMHJ2DDRYa4qMJm$KGZ z-?oy@Q^L3?%_$d`ueLTyKCSopZax_?TTkSF{aCsirD{B1p~Ll% z4!$fjd2|S)cg2O#nS|Xe@!|^0aPt|NpiJfG0oVxb^YoEUjAu*OX0M8e)emUf+&HDu z&&cFi!CTDKTm@%{lB_Q`KfM@wRSAR|^K?L9nYA1W^D}S5g7UN&;JC~=Sq`0fo6hwh zuS`7r886rC9ryE#v-t)p)_X`P*{Qr9&9xsApt_;aQRM16;KER|WEEa-3)Omd#hj!z zh;6CHH7X;*#A&=W6I0qLFhyz-u{g;fO&kR+kxsF1LtHBLQe6G`fakHF$Fj(x_(~2@ zovv(6Ko0`wd{wTCW;0Y(-fL=y2|9six(gx965df$EMad>bVVRxw{# zpj=Nz^4sPMv^F*|s^f{8o)wkOdLL3y+ixDeg6lKVo%SoKipuH3*`GE3@J6edZy4A6 z%y$$4lov%K2<1SCL=tTuLbHhQXOR);q)(m)t#J8ZEb> z^GVq^@m|tY#uHzJH8=7{7NaV!^*cQ-+qQ+pE{oEzRvv;H?j0d|pBue&A8=B$o2_+h zgzKJXJ@O-yDT>KZ4xV4coHEC;r(u1K*X_pTRZ)sEnQN5##?31N3yb!37#}&K80x~QBdlXCI-ow~1E zI5|~v!tLZD6BGNCo;FFMJCsNq#0B=(wEN{gr*<(>EVFF(w9$MQDj$|seXNr-YWHcw zUUSi^m-N^Yx$ubt_d>mveTGyUs-pu8FLGyXpJFmrMs)EO*8$!^-`>1~*B z!Nyjm?=Q&E_F=f@L@Jp|PGWf)c19v)J20Vg9knvt3H2W)btZniOjQ}8q16xB-oH4o zO3zuWG+qRott~w=_uu@yInvK5R-Wqnl!M`*u#3Rm>Vrx}tR|)9oa9KC89S-CM)&f8 z>P~gTOT_B>&q1t^xv8ws^Ir>^-{b_zs?iWAVyRi8`(jR{csWL}>=>s*s>diBI?{BZ z!`2lP(2otDu1I#2V{C_0o^dT%6#9rHw-LZ930B#5lEFxiL39d2$CgQTrzDgiV)(>5v!ew)%h z8T^UQ8qL}xKGlh@?&o#~o+gs~J~={R0|Sfip9@>G9e>dnD0I+E3bxt<-#lkHRo zSs4pl9rPMn#nHCe2Z?_YJj+P?Oh_IuBYj&U~JcuGN@U zpxO~Yuo%455aQK%YdsxHU-ZgQbkJz53Z1n{@WDB9@hFUOqWw!{$oQrB;_mqcq1d3H z`|nZF4aqpzmC1Fg`w=Vgt;z}ureZ%9rUbJ_l;SZg$dtIm+Ni2MlC2MlA4((0`!xQ= z&9pzMKv!6QtoCYheT(HwV<&L{TynoZBRpkWGjuNH6L*FkIU-hUcQ5konzPR`a>zDg znC+oE^W*JBiJM|ObMKya(L88Z$f}U%U_>o)NO88anAoqgkT!C)uYUT)NVtH%qq&C0 zp4vtmC_d=bTNosyjKbeQG~H+E!&fmh^`SYLtPKNI|Z z$C%9g;4vO2>mg*?HUf=Et`pLS50p=vhhyu^mt`J3@ABP0b<0=7_Z?hN%CLOi8Iye& z;$Z5Nw5MbvWZXXP>gf8Cu5>*m5PWENoafvbdN<){#A=A={G*!tW$>pb4DWfz;Ok+_ z8-yQznWAESC{Y-X@6^53Z|^Op0BpN;Ys)7z`0mtTmFL<){J zu+Pnxpf8MN!%OgL7~HljRXb-V+cg4 zpuGJmUq=U13kH5WJ6#vxSfIZ7Uq=VrqvU0QGci5O5vNGGk0F)DLvt-HqAJEs$K+@u z7&|kQG$&qmezC|UkviYyYRn#GD$D2`{Fj#4#_YNd7_zJJW=1 z-w8f1x0CT82x%yJCCo@js~GYzWHyw3yw4$4T6Xr7en(>3%JKXMO~IW-uxbv?vjvWW=X&)vK&Z9m5{iUv7X|rG_(#F^g4#UdKqm zhw67s^hncQw@J}_N>Rz3V#L_u)92=t`eDnhyU;p(&}v1LlZyz}eqN}FZ2eDp?ih?qA`@6!Xx`ku! zKx@y3Gwa^jG9+@=D7@8;uMRbPjN=deNd_0XR_{Uk06UY&5%$}7l<6Jsb0pyhb@3?u z1d6=t+AC~(?SlB~@cv~3hfLeTpeSVcy^7C;#);VNaCuh7?)Qa-!=&-+)rtl?eXI;Q z9pT-t&WPM_!d!Rbw$>KVmyeAIYXSpq*e2Xr$8{)`%rtUw!W6H;?Af+DYr8qhCKG#gS99ok)cz>3hgm89$c~^WPM2-cL z0nX+Nn;b#cco($6pT@j zGRvLun|V*uhKyK^UY1bvl=#bVlxRDSFt~mjpn*zFaE~Mj(;vMsCuPam8SxUvQEPPz zW;1vwhc~NgAB`&Er}%zVqoOVmX3g%QQw-68}=`h zbQKtls}4XT(f}RonJ;hULt&OWB7AkGd#))@<&o}j@zL?r(>)U-c;~HHAvJ-DyDlFb z(K9`>v`QbHb{M1)+o1$e^|udSst8n|==DD;Q?CW*_K`=I;Xg=N;IKdrO;Ik0^W#ek zcdnDKw{4outcdGF1B?FT

                                    IW>s`oeQr)7hmNzcCYt?mm2@>;ih>`B)*L; zh_~9(^5XI$Sw62svmzTCn^`f#WKqnnrFk?j+gsLtNqoa+iA#k;DHV>YRq$8Is-Eh- z>$6fenR!@C#jl$RhTMeh#r$_dKuR8ids8dMW1^U|R&+`)-_z^|8R0<69DqF)N}Ih-58Cf5LXdN_gG5bmV|!ulwLOwprI;!}hPq z#>y>;B3r|8vEwb{!yv3XwAwui>&2^lhBNA^l|;@>N%Ql&20v|^E)XIYA>}-z%3Z{+ zsCX3kL{l%p*UjQ%fvVPP%3Ot-0@qPL_k9{U|7oTarI>gsrj;Vg);e%he57&^jxvm1 z5}phtm`Y&zgILNh*5**h)b71WpaaZ{@K@SG969B-@7ave8=MF7-1Oip6tWQtbhjO7 zN%7vsjXL9mgu+h-z&s>#j$-r5#GFY;I4K>BMl1j|Ha1?t>`N+HLG)>pNK58x1Y2gD z%3(Eq5@f+`JmFAM9J5ixZSJUQGL<35Z9eflO|eyGv7i?m&1Cxd; z$yx#2+EaO+XK^Tr(a?$Km< zBUs>*bH`Ln!%+5UX}B9yRR^A)w)dm1L>4EQu)a#i_j4O;54}gJNH6`Rf;tGx$8{D? zoX%@R_&L{dr5r+Kh`N?dg(b(hK*$tR)9`w6M= z3*;iQIz>8(L@!Oz@K8y3)A<`!3NZGugl(bN*BBmHj*ifMFS%tFeJs zejJ|lvYh2L`=xMClafΠi-t=C0|0>Heeyw-CePw*&8=rZ>g(X_(s;!F}azI}E7L z%@P3h?8U)xoYO=BHv$oGBOct|i`yC606}LAt*$>a zQ274*mos6hsC(^9d|T4C{n(756zRROaiLD(N!=E>l%rhA;a`*l z3MK7O9zg_ldFQ4x`|bf3cH<3a;7UO--ji6%luVyZz`}d$^$1j5b}*g0NLiY11ab~U zLQoe^gLV>u5?& z(9jGB#Fdc|a za%Gpzwar+Zsx4fUrA0jXa*x-Whh1ho1mn;biea;30Gr{3Yv|Pre|Z9lW-svyDHSpk zpAn%}{Vu`Y)o{sCc%;l|$WvWWsQo}m@<^PgUm05Od!-V7JU$5tk&bMOv?P{cF$l^; zP1m6yMY_;vRYWB9BUa0a(6MdReQ?w+WiWKw-HUBbmSE<0fWWG7@yMPWSR-NQfiIqT z((&&3n=c8VJCt^hgY9%#x$%k8Qe|b*?(Zw?=o8px^BuH2BpKV;f>I6Y1$!jigsLG( z>NP~(!<2$@P>rG9c?gFr0e6)kA~Kx06U^jHMSVmO)aVm9-|hk*Mt)uC`8dS`6Q%A%`)+! z*3aCi;N6-i&aOGLFX#!I1@0M*(ahT>ahX%+qWaDY=L;Bq3Wbg*HG0ir7oWTG`{1ft zw220*0xlw=E$sc;%<e-bzYgJ+rDnF@DAT)n*!P2J4I+ww|;{LCVwq!VY@NH9*U7>w%~J z+}k%?c&R$|cJD(qx-jFsg;%-a@)VmAv+>lOn&>LfbEI_TS>YZ0z)SgXbyRnq zhC#<817qabuAb7;Rf9vp_Zj>136B9$JzcOdKFc-{m6+PQ%3nxqn(xgB;Be{;q zTAqK+6C5)$pqJIMlIZ{D1$|V% z(4pt7F@+u$DE(<06E2|=2|HM>3MPTT;EYaT7WF6Gh8{{}avoQqa1TAT)=KtQ8OB0x zRq%|XNzvzZGp&go-o@+@TdW)T@6aM0bLoqzQb`Fw_< z&<=11NOuywj|CuJ!KJoWO9yO9k`H2lw=($nEr1p(_`Ih#clc8FPMh?(?b$=~aEbDE zzP8Wa^3AW;9^pXhPVzU2M+U6jGw{`X{87M_>01ylqNmxgbTlIo)oZH=bFE@fHny-M z)AdlBmfVq(c)U%)^*8rdaj4}!+*diFp7PO@hrwiB z+$)V5d2U#2TeYkEK#JE1oR|bUGv4b&Xi&eX6V=IUHhKMX!5q^hGX!{_Ty|N$GB$~Z zrLCl%8Nrlm^Jlz(M07`bBSYI^$5+|katQluw*x}5i^q~a79OhGkK4SH63gCxmvP#v z+Z>H}M1Sxg^x#>pA=}a;&m4CH$y9662Zk$~_$8!md&`z!au7qM)o#V5X+@>U^429P zG3xYFMAJ14l~3z0V_QPVn;(d=wcda0++Y`#;eC!_R@eYhoEz8UMdA5G+WW-zoZsqj z{$WKx2*V`1N@A!ap|72WK_a@Mvsli9mmV|{yAxVq6sal7@~>z!r_(~+>NHkRg^KFT zVw=@;gTpu|9el=Y>gFWOEX!veN)$YXlo^noVD1o@_Y{cj{D6k6{KnF_(Plxq8q&fZ z^Q{3}_TtCZZz@x>rm*L-#U`%1dtg&k?fzX8{c=NYv?ui?JaoJ^&gk4RYi&;+a*r*X zV7_9uI4o3=kT+M-z)_bvG^ZqAkhed3S9_$Vfi#IjfL_gscX!3>91>&!p4?pIDVjdo|YM<2EjtFcB; zWUv*bA^5mU-Az`vxD@%t$uzNK_)vmSx0Wtzh9>3Rmg0WXN7t6*Oj!e6PuFi1Me}E) zj<((5X-*F-18Yax%Q~m6O9c*IYsL(ZKiZj{Zi7vAfSC}i)SJ;3XPev`Zjjb6WPE|E zJU@`GUOzTx)i@7jI5kHb$xk>+NIR;DyF=5V*;*M@6`f(9w62nIc<51tVtbyuJIXmZ zht7$YX5j!wIB!y%y~9n_!B<&@c|f0{mzoocJJ$xvvcN;-$#=mFWuvo>v|fiE8?g9^ZcfWQIkO8>_Q ztf*`BpAmRS{p#5~JKEKoyxyg3@?W$%uBd^l!e#Bx9P^5fJ1_IpfIze?fG9&~=XT8U6(WDjo7 z1=%cAXH*Ro%49#}S$U^jn>a|OLoqaGI#RuMF?|uO(@YJs_fN38xDk5G-NooW$&E?J zd<^VV^?^+ohlCa2kBkR}3F`|u4=x*_Pl9a+U-g?v7%^R8X5P>vg*3&{@P2C=ZbLJvn6+e9HSxHP$ac=k3YvWie-t)Y~&=UAq3?jvV0IB_$96aFb(sCeh4 zlrzM{J$jfF(1&qIUZt`dWWg|mq6qUs0fQT;*xWcXVg#X`$s$s#&fHk2L=<|`j?}X8 z9yQ=pVNhA)Hew6;;jjEAoXpVH-Cg%fo^cWH;eUIP$z@VYFuomSRO&i+ugVqphEEu8 zk?TOR(N*m}0tU4A?1q|;JxB&YOh0!vzkfL^TBck?kn!O9{oVFY-nIfg!NDESSdfg= z%=%-|J#|e?az|vLBe|M#B+HmN6v9*5%^;4;_c5s6J{`@s?g4#cd_b+&8wTkutV6`0 z4bsPm_8$2w-wrh<`nh-5!YF6#&fK(+&9apcs#a%CEXA?fiu_uhBu)I*H!eKclC8nX z$#-o$hmV{Be7D@ipZVjuBA-To!-5iVLZy0F=`IMlNz+R+@Oql~NY&!myB8I27Cb}O zLKd@5;qfY4TCCb(sudgw`M++#MXJh8)`?!iCI@a$;1EcDiKwk*nte-69u*KWOfyet z3{MTJWjnm*^3BM0rfyehcxriqlP4~R1M$91G|0W&@!2X}aoo=I`_tNUsIyI;*AHjYaValSt`2$h(nk8hI`$l%P2JT0BmfhJO5YE0&!e;?`5r(iH6LkO=E zE`2^9#=`iWVQ^>~`%9bCv3n0mEEfwt5U1r>1gDA^&6YispcTsz!LU#jRr{LeL`3LY z%egyVdGAT+2$5&jE*~EWh`A}j)~0rIR!3lu2u7Y@7;DRi`(iN)A~pC>@!p*+WxnM8 zPi8pRFqtBR+RU)(kQ%IwBEx*n%8-+KO7aXu-&35_K;t`h^28$B39c{tgn`3kBK8&$ z(`!g*bY^;Eeeh@K!!^NQRh)AZiY_BfVucue)k=qi_R!Lo2;RgLJl({ntbMwG);qcA znW)9wgx`)kLOn?tH=Sqf#Gw2TcTT}D>P+|kv3a^Mw6tQ>uxd~if;(3_?qxr6J|nB9 zU)gzCO4(T4!Q?Wh~Z`$R@QZY2I~H{I&whW4!8{=vH) zgc_gxgVgnbK|_#a**dNW1(I_|%5ZaLhA`-23YK6Hb`*9*+*p)UgW=Xw?1B2+YEK#C zhb#Cl#Z+0-;5aC8_nhN96=;qSwPePMW3$CMWZZeQy`vgJ<;HGNjC%&miX=Z$G{z9yXuDjr^ z#e=B`a+sv{QYpdMCKZei#bo^!v{fwXhB)-l?DWT`2I3Wq3o^MPGZIo`*R%wrZeTKX7w#IF=7u z%)@)rKa!ao0}JY&k7hhN0@-OO1AVB?XIMD+u@eTkuMfiYHKT@)X}dAZ#CCs&8B)-0 zZzPB^K3JGBCYz*bGi?dnTGlMIo(M!zcQ`bQI?lW@c&kA_Rubf9N)YI7d%Q{acD=~+ z?1{FZOA=74$vK&O8t4Kx?NDmZmzv92D zF|6c8|JcI}&sb_(unf0564TTjTt7NrhT4kVvvUud{`s5G>63dwD#BfruAiWgS?Ksk zctN)xkwXqc3|!uxXBN$hys;EZB!fu)w z4>Ll2E%62v@dKl3ixs4E@CwDt-mp4I8($gMfCU?E8sbZ@H`u<+@$R98YZRG&(EVx5 zm8MVk%G{q>$^kR=wWm7Q$;b&E1YP{dIEKDotcl@M+#zi9ndo;s8sTj0W!#35Q`kqe z^M)cj%Lbq_=5ks_8LlO?sSuA99r9+<%^ay;a5`i`L_3nVz;A-nQ%#;lyTgKud z1n;N>BM5(i-mlF2^?~h#f8{!tlB|aDBbBeVP}(^oD(g<8MHmtR82zEjPb5*6L9|C01)0>4y3=rN zS8?xSlt3hvXwA2oOwxWbsf@LmZ1%n9weApLO=c1wPOEiAxW!&m(55Lgx^kI3Ai4aa zhzcyI1&C4`qR7DWCRq8z+Y#wqL7}C{aQQN2Z({eEKqPF)eUtYhThLoCkf{F(iLcOM zxvz#|u8c&JL#mO~O3lHX=KX;W37$AC$>)PJjqW*WjL{c&lZ-q|GH8*=`JfhLX>tqv zF}4qeGcOKOrnSTu!puH~E&CV5s=O|Urda&i^!_D(jwJOHaP9N5Ue$`HQyRFFl&w|* zi>Mt4<(9B;g<+rI@fPc7VwT}DfKY#Zng*lIRva(8SwCc{w-%_r<>G29C0*XB!u)Vl zNraS-+(IlZe>gDo1z$P4VE(OX-{T(tt!F333d8dW#b+ieR~nui^7GEAl*mEm~HVt8K6RD-97vI93>~EwQ#CApQ^a*6GqJDTciOj91)~2Um#1tiDiJP#_ z;_Wfj>vPd>&?YRh;{DUSdqPCBA44rc^_n) z1WVnizLRIrOYy-s(oMO*Z>T7!%+3Q$HN?n(MI-!7PiScf>#z$V!7Y)&qv_mG{PEE4 zo^kEK*_8L&#i)VCg_Fbei(RZ2T2XU5#s`NNU%vS;mf`g3db~$_0%B%YMe(Y73oDpa zNfka*A)+#%)CEFv;1T&EV+CrMlu;RJ~CMVo| zjVK#rXX=uNmbCA1m`?LSUZsqup!1L#8WBzlDpPy_pnk*OL~)) z1m^abb=JfB8R;1;*c#z3Zl%irrVe2s8XSt9w)GQF%&v+$V)zE5%af~Q;z1RIB|gRy zzkU9q6E+Xj57}@P%;FSe`i+UhH0Z%hrmcq#dZ&`C2NspR_-mf@2Y6?wRb^;E2PYoHPrEqo%XBQ=A$pwPuI;i_1MAca8%QG zt8PA84L(Sv!NV~_>u9uo?1p#2oxSwP2r=)a%K2AJCDGw`e$+e+IBbFDIgFLn7G!WN zaagF@QLU5^hn^Y>^V`JX=c%QHZN!_I=Zmxzl156M@#BY-OLE9N=z)1bHDsHzY(B@P zrBBLY^l}%M`MN!iVL5Owvw6ZA(PC-2(zUlAGewLV&?nFs=JaiQQF>HvJ_sF=1WQUK z)9c8bsEkQ|Iy+KYCgF+mK1dAoINsY-E($~Hi7sSRGQ|b$r(G<}nV8r~NCe3HmGNzU zc8v;^^6W5>Pv2%_8$<OQ{2Rbrp&dszan< z4tvu_eo6DIE0C{aldrhBR(NDYVeEX`uy*3i>ii>(ea9V{rY&)JL#u+I2^HnS!8Yc- zY+23c!Nw?#>U#0p|BdOaC@ma31Vr-+g!y8 z84wJ(YT5l!u;3>s#2TP}U6mbN>Ze=dV=hwx!eA`%C%A%6dPN*e@#>Pril!Fa`e+5C zdy?-UWLT`jY{-SYKiBH{s++`&fW}E8i!(NZ;X@q1U_U0QPD#`p%INxrVBBMmHmTiH zOlO>sA#AN{&WaEui)l$M`9$hcy(>y~J$an8YG7ep4{Ae9iqbq0qbR9D9W(HuJ{dE& zI#yXgYZULUB^y1mH) z#>YM8i5PLB2o9=|q3szjSBQ0>2W38I;7rvLOL{!BYdx7RwWP4hv84je3-!4ZT9_V# z8@vKq=*jA{;7$QHL1qhb=f|q$0f|uJDPc&q4WA^HPnV)fp5NOTMJWPl#Wta8N^mF0 zYe?nP*2qTEg{@(4)x3nNDT9A!8xym|wy)o^`fB~#>F7PP&N2~V_W*ENZ@&*bbSw^p z=zaLH{3X7Jkz#0yB2I{i*``M;2SKmW#ou_s;x09On+NAk-^FVW`6#Gd8+OE_I%{}E zy1Be|um7d4H&ip1cX??H5B$;dJ%+99N*%E^_ExejGgx!Dk?0pR`7SFBua&Y5GfGG_ z@aw@v1XXb4t@QlIv30$_aUfZhznOQW`D#VzF{1D}fLue^HF&_)@QkrJdw6VkhFA(8AR8SLR|_>+Yd2ihyror4pCzt#zEOJOL1)ycq(mLtF%9cS*^k~e?T^EoofWUiGirmu`ZB_1(i-kJ=p5( zo>A1H4C4eUS3GW-Ndvm?^c(>~*sQ-^5L0SsHR%arH)63AT>a zL4!}{ca3maii?dBd0LDPzlG-~;iPk|LE@U^ zxu)W5GOFy9el)8{MPt#hv;o7}OpjFAC}=)8c2&JBX5z#<8?3C(yU-|I%G_JgZA7&h zF|n{>*p5=&d(n3I9z}N%zEb{Cg){fZ5d6|tW+TEwv`V@9u2($O57ZY$i$5>b7|OWE znTxc#nUE_fdF+_l+OWrGO=;EdzIn$@R))QqLQ7edP3R+C*;>F0M-awzb|||V!azO* zhlKJ`dW(Yg)4R__ixS&Rs%tr%qw`2UMku}@=?WOcLhXZXnNl-8haKMKIl2PDOuD>4s`3G3+`zN7*?C@VdN((FT zKMsC1QkK{CDYPH2N%PByiwG+!(MyZ`O8|m6 znG17%f8+WW*meFVU1wYeJX_lt|CHx{-?%C2{6BBF=vrFbcx@7_ z|IX;I*X*ry%?nYeu=U@;EN!fTFt<0v`jvE3 zSwz73g6nGiMcX#IcE56dRTl1dfk3V$dU89*3OHJJQ|G^ma}yJKJI2t??iXeMYC|_m z)pa|@&QKRP>2^aqcXPho&T+6dH?+FDSok-z!wSUgyG6_P`npzDzX)`F)zFW3frpzF zZ*@E9$HhZGx94ArcJBxFFMGV51l0QXs}z1_{aR}`i}(I^*7xvOcY$zj2t^3=Ik)KP zufwuGQ?94oKVAeLZZ|XZk&5d08Ue zMHIKRu`ty)b@=z1$6wX(O}&2C8z653WSZ|-;5W-_063lcds+Vu8u)vOBiG*h5db>K z0@zG9xG|V_aRsd{EP&42)Y|G!6<<`m36%!$>_Giruj+hHSzaK3&Hi3&A!~g{OQ7EF zga_O0Ny!56YkR*bdm_-0{T{ApXzOTbrEdtF8Mkxzy=q9M(cTmR_;lb1@J&2B;a&Le zvi||!R-SKa3gEA=0lhw5{5@rPJtV#hF9Qq)zZd#qq6rfhpk7DIzNzq4(z~dCHE#H@M^&A`g3L82{()P z;~)aS(?CnV-tYaMvb@NE#`JsmzgGSS%|JjZm0g=6K2Vw0VP(IkEU##w)B1e||7hk9 z^io(z&uiTj0!5(yjs||SyqJK@eoyDOFx9oc-Bzyskhb^AuRQ_eG0>6T)Jh`rt=Jn* zo3x>WiM4^8oiz}4-NEHQ9qe@m;?dR45I_cdi-W(fRIR4E4t?1W7SrFF~-b}y~Mn`!{ z;H) z>pcIb7Oz!tW+Gk~0%%zd&@%fEMO<4Sk@6oi_z8Y9kn*qEallYT#|JbM4%o`gx&%}G z|8u#u0EMrgf_wxDFa{KW^+o|qH2#R@*8^r2y7~@33i6*Ut}WgDH40HZ76?d$&9B{V z_R~LR@dIuBUl-`QhBppFd~*T$9Rd0IZpdHg_-DAIy@R!-poOlz{oh{JpK>bzr+Yz( z3<829_p3dXIsY-Y>yt8fWpHgz--N~)jDg}90e$7oUV$p$HyQjhArrLz`{DPNbgiy@ z-Q!p4fSY6txav21%jKZ|kN=-4z83pxgR`3&sDTc&zduL&UJ+Sdo{_g=m0WBLg$(WW z?M(mb6mEse&?OpJ0lF{)ba8V;-Hp2y`h80I*Ua+wdbk!iBg`SL7wB)k0Q{SJ$V^~x3t5?oZ8G9>oNR27S6~LkXXF77P>Q?0U>4@B4qTb4^$sDV10+c!ch)Q-t%*EPU znSamSubnn=gZ~h(^|@=N5_UZyef=NLexd#j;&&(ZntUr4e%o1)8!)RXp!7GZKjO_D z+oYzAfp0}3~>2=q%5zo&O6B0 zyxT{|>yG(V4}y#nAnFqY5D?NI$ZNBD+4BeJty=YK4Y zw&;(Qq{IqQqLfNBi^E4|rczh3_{^7q=k zW_K}pca{TOCP2G4+g9f0U2J75Q+=Qn+#$8*DrV|8KoQA6rTx1WcHIn8cW))D0n5#| zmcMOY$t4@GfJvanC~lb6)R$XH-#gWxE@=AI3Y=r+8h8Mm9s-td(?M%Kzm@sV(xHfj z^|SB&;9pZHrRycdTg6sAfzaXvvN!^=xH&JDxx6cjpXSBChc91kL#zVu6rlM6^T{76 z%WDb*^tO4S3_xL&q{@Lru06PHwrUw}V ze;58Iy#UJvzvpkk6p)%J9B@r(?t=f*ZXE3H zXp1-#g>l1*#Y$0f6(>}ccHIaj4-e{dWY1jAF|S} z&2Ipx(woE07pA+={8suV)^@)a*%+0l*%z?AZopdt=FUG-mRBvyt=v1dZ4xVSg}{NL z^8oGnW)pGdyp{ER7f;a?m?0VhJ?X8(c*jxLxi?Vs>xnq9-t{A8dA0N0js1I*@NWOB zMbwTs6`&*irvXOt{Xg{ohZTR0io-9Bw{SJmf)ZXbS54{%=s3yU`=5ZiWtVCxziT1e~ao0wV| z(%V`5?CM=_%0i>|_g;VdOppDmUFbdg1Na}I|D)t<&-vHu?Y|pFme>4?Td_aw&?^4e zti2We>z%m&qCx!sfArmk_v`(p|K- z!Q#(#cdg`K@6P*|yc+iZknh&PuQ#FnOSg->lm53CedAuVpC!L;gTLNF^)Fi?=1%rM zI{A-bTtk12PxUV}r21CqZPBWJmh@W3zs4N;7m8H(d+4tbh<>I18Uy2B>g1bSsXy=X z|2S%Ys}_C@X7DfCsP&KN+d>=sEc>;ZfBjbT-wZZ;{+PjSUvB=K!EIk2{F{O6*dH_a cp`0Jb7a#$D69mK#_~Qf2j|Ufk#X^w(0nQTU4*&oF diff --git a/shared/src/freenet/library/index/TermEntryReaderWriter.java b/shared/src/freenet/library/index/TermEntryReaderWriter.java index 08d06f89..cf01b736 100644 --- a/shared/src/freenet/library/index/TermEntryReaderWriter.java +++ b/shared/src/freenet/library/index/TermEntryReaderWriter.java @@ -7,6 +7,7 @@ import freenet.library.io.DataFormatException; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.FreenetURI; import freenet.copied.Base64; import java.util.Map; @@ -146,9 +147,9 @@ public TermEntry readObject(DataInputStream dis) throws IOException { case TERM: return new TermTermEntry(subj, rel, dis.readUTF()); case INDEX: - return new TermIndexEntry(subj, rel, readFreenetURI(dis)); + return new TermIndexEntry(subj, rel, FreenetURI.readFullBinaryKeyWithLength(dis)); case PAGE: - String page = readFreenetURI(dis); + FreenetURI page = FreenetURI.readFullBinaryKeyWithLength(dis); int size = dis.readInt(); String title = null; if (size < 0) { @@ -196,13 +197,11 @@ public void writeObject(TermEntry en, DataOutputStream dos) throws IOException { dos.writeUTF(((TermTermEntry)en).term); return; case INDEX: - dos.writeShort(0); - dos.writeUTF(((TermIndexEntry)en).index); + ((TermIndexEntry)en).index.writeFullBinaryKeyWithLength(dos); return; case PAGE: TermPageEntry enn = (TermPageEntry)en; - dos.writeShort(0); - dos.writeUTF(enn.page); + enn.page.writeFullBinaryKeyWithLength(dos); int size = enn.hasPositions() ? enn.positionsSize() : 0; if(enn.title == null) dos.writeInt(size); diff --git a/shared/src/freenet/library/index/TermIndexEntry.java b/shared/src/freenet/library/index/TermIndexEntry.java index 907879a0..561ac637 100644 --- a/shared/src/freenet/library/index/TermIndexEntry.java +++ b/shared/src/freenet/library/index/TermIndexEntry.java @@ -3,6 +3,8 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.library.index; +import freenet.library.io.FreenetURI; + /** ** A {@link TermEntry} that associates a subject term with another index. ** @@ -13,14 +15,14 @@ public class TermIndexEntry extends TermEntry { /** ** Index target of this entry. */ - final public String index; + final public FreenetURI index; - public TermIndexEntry(String s, float r, String i) { + public TermIndexEntry(String s, float r, FreenetURI i) { super(s, r); if (i == null) { throw new IllegalArgumentException("can't have a null index"); } - index = i; + index = i.intern(); } /*======================================================================== @@ -36,7 +38,7 @@ abstract public class TermEntry int a = super.compareTo(o); if (a != 0) { return a; } // OPT NORM make a more efficient way of comparing these - return index.compareTo(((TermIndexEntry)o).index); + return index.toString().compareTo(((TermIndexEntry)o).index.toString()); } @Override public boolean equals(Object o) { diff --git a/shared/src/freenet/library/index/TermPageEntry.java b/shared/src/freenet/library/index/TermPageEntry.java index a090aaa1..98befe87 100644 --- a/shared/src/freenet/library/index/TermPageEntry.java +++ b/shared/src/freenet/library/index/TermPageEntry.java @@ -12,6 +12,8 @@ import java.util.SortedSet; import java.util.TreeSet; +import freenet.library.io.FreenetURI; + /** ** A {@link TermEntry} that associates a subject term with a final target ** {@link FreenetURI} that satisfies the term. @@ -23,7 +25,7 @@ public class TermPageEntry extends TermEntry { /** ** URI of the target */ - final public String page; + final public FreenetURI page; /** * Positions where the term occurs. May be null if we don't have that data. @@ -54,7 +56,7 @@ public class TermPageEntry extends TermEntry { ** @param p Map of positions (where the term appears) to context (fragment ** surrounding it). */ - public TermPageEntry(String s, float r, String u, Map p) { + public TermPageEntry(String s, float r, FreenetURI u, Map p) { this(s, r, u, (String)null, p); } @@ -69,12 +71,12 @@ public TermPageEntry(String s, float r, String u, Map p) { ** @param p Map of positions (where the term appears) to context (fragment ** surrounding it). */ - public TermPageEntry(String s, float r, String u, String t, Map p) { + public TermPageEntry(String s, float r, FreenetURI u, String t, Map p) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); } - page = u; + page = u.intern(); // OPT LOW make the translator use the same URI object as from the URI table? title = t == null ? null : t.intern(); if(p == null) { posFragments = null; @@ -88,12 +90,12 @@ public TermPageEntry(String s, float r, String u, String t, Map /** ** For serialisation. */ - public TermPageEntry(String s, float r, String u, String t, Set pos, Map frags) { + public TermPageEntry(String s, float r, FreenetURI u, String t, Set pos, Map frags) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); } - page = u; + page = u.intern(); // OPT LOW make the translator use the same URI object as from the URI table? title = t; if(pos != null) { positions = new TreeSet(pos); diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java new file mode 100644 index 00000000..76f55fe5 --- /dev/null +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -0,0 +1,97 @@ +package freenet.library.io; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.net.MalformedURLException; + +public class FreenetURI { + + public FreenetURI(String uri) { + throw new RuntimeException("Not implemented yet."); + } + + public static FreenetURI readFullBinaryKeyWithLength( + DataInputStream dis) { + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public FreenetURI intern() { + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public void writeFullBinaryKeyWithLength(DataOutputStream dos) { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // + } + + public boolean isUSK() + throws MalformedURLException { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return false; + } + + public FreenetURI sskForUSK() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public boolean isSSKForUSK() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return false; + } + + public FreenetURI uskForSSK() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public long getEdition() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return 0; + } + + public FreenetURI setMetaString(Object object) { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return this; + } + + public FreenetURI setSuggestedEdition(int i) { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return this; + } + + public String[] getAllMetaStrings() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public Object lastMetaString() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public Object getDocName() { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + + public FreenetURI pushMetaString(String string) { + // TODO Auto-generated method stub + throw new RuntimeException("Not implemented yet."); + // return null; + } + +} diff --git a/shared/src/freenet/library/io/YamlReaderWriter.java b/shared/src/freenet/library/io/YamlReaderWriter.java index d5537b37..b120a576 100644 --- a/shared/src/freenet/library/io/YamlReaderWriter.java +++ b/shared/src/freenet/library/io/YamlReaderWriter.java @@ -34,6 +34,7 @@ import freenet.library.index.TermPageEntry; import freenet.library.index.TermTermEntry; import freenet.library.io.serial.Packer; +import freenet.library.io.FreenetURI; /** @@ -116,11 +117,11 @@ private Yaml makeYAML() { public static class ExtendedRepresenter extends Representer { public ExtendedRepresenter() { - // this.representers.put(FreenetURI.class, new Represent() { - // /*@Override**/ public Node representData(Object data) { - // return representScalar("!FreenetURI", data.toString()); - // } - // }); + this.representers.put(FreenetURI.class, new Represent() { + /*@Override**/ public Node representData(Object data) { + return representScalar("!FreenetURI", data.toString()); + } + }); this.representers.put(Packer.BinInfo.class, new Represent() { /*@Override**/ public Node representData(Object data) { Packer.BinInfo inf = (Packer.BinInfo)data; @@ -160,7 +161,7 @@ public ExtendedConstructor() { this.yamlConstructors.put("!FreenetURI", new AbstractConstruct() { /*@Override**/ public Object construct(Node node) { String uri = (String) constructScalar((ScalarNode)node); - return uri; + return new FreenetURI(uri); } }); this.yamlConstructors.put("!BinInfo", new AbstractConstruct() { diff --git a/shared/test/freenet/library/index/TermEntryTest.java b/shared/test/freenet/library/index/TermEntryTest.java index c16de44d..c93647ba 100644 --- a/shared/test/freenet/library/index/TermEntryTest.java +++ b/shared/test/freenet/library/index/TermEntryTest.java @@ -13,6 +13,7 @@ import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.Packer; +import freenet.library.io.FreenetURI; import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; @@ -32,9 +33,9 @@ public class TermEntryTest extends TestCase { final static TermPageEntry z; final static TermPageEntry v; static { - x = new TermIndexEntry("test", 0.8f, "CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8"); - z = new TermPageEntry("lol", 0.8f, "CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8", null); - v = new TermPageEntry("lol", 0.8f, "CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8", "title", null); + x = new TermIndexEntry("test", 0.8f, new FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); + z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); + v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); } final static TermTermEntry y = new TermTermEntry("test", 0.8f, "lol2"); diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index c168c893..985a30ae 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -39,13 +39,13 @@ import freenet.client.events.ClientEvent; import freenet.client.events.ClientEventListener; import freenet.client.events.ExpectedMIMEEvent; -import freenet.keys.FreenetURI; import freenet.keys.USK; import freenet.library.ArchiverFactory; import freenet.library.FactoryRegister; import freenet.library.index.Index; import freenet.library.index.ProtoIndex; import freenet.library.index.ProtoIndexSerialiser; +import freenet.library.io.FreenetURI; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; @@ -189,18 +189,23 @@ private Library(PluginRespirator pr) { Logger.error(this, "Invalid bookmark URI: "+target+" for "+name, e); continue; } - if(uri.isUSK()) { - BookmarkCallback callback = new BookmarkCallback(name, uri.getAllMetaStrings(), edition); - bookmarkCallbacks.put(name, callback); - USK u; - try { - u = USK.create(uri); - } catch (MalformedURLException e) { - Logger.error(this, "Invalid bookmark USK: "+target+" for "+name, e); - continue; + try { + if(uri.isUSK()) { + BookmarkCallback callback = new BookmarkCallback(name, uri.getAllMetaStrings(), edition); + bookmarkCallbacks.put(name, callback); + USK u; + try { + u = USK.create(new freenet.keys.FreenetURI(uri.toString())); + } catch (MalformedURLException e) { + Logger.error(this, "Invalid bookmark USK: "+target+" for "+name, e); + continue; + } + uskManager.subscribe(u, callback, false, rcBulk); + callback.ret = uskManager.subscribeContent(u, callback, false, pr.getHLSimpleClient().getFetchContext(), RequestStarter.IMMEDIATE_SPLITFILE_PRIORITY_CLASS, rcBulk); } - uskManager.subscribe(u, callback, false, rcBulk); - callback.ret = uskManager.subscribeContent(u, callback, false, pr.getHLSimpleClient().getFetchContext(), RequestStarter.IMMEDIATE_SPLITFILE_PRIORITY_CLASS, rcBulk); + } catch (MalformedURLException e) { + Logger.error(this, "Invalid URI: " + target + " for " + name, e); + continue; } } if (!bookmarks.containsKey("debbies-library-development-index")) { @@ -270,8 +275,9 @@ public synchronized void saveState(){ /** ** Get the index type giving a {@code FreenetURI}. This must not contain ** a metastring (end with "/") or be a USK. + * @throws MalformedURLException */ - public Class getIndexType(FreenetURI indexuri) throws FetchException { + public Class getIndexType(FreenetURI indexuri) throws FetchException, MalformedURLException { if(indexuri.lastMetaString()!=null && indexuri.lastMetaString().equals(XMLIndex.DEFAULT_FILE)) return XMLIndex.class; if(indexuri.lastMetaString()!=null && indexuri.lastMetaString().equals(ProtoIndex.DEFAULT_FILE)) @@ -293,7 +299,7 @@ public Class getIndexType(FreenetURI indexuri) throws FetchException { ClientContext cctx = core.clientContext; FetchContext fctx = hlsc.getFetchContext(); FetchWaiter fw = new FetchWaiter(REQUEST_CLIENT); - final ClientGetter gu = hlsc.fetch(uri, 0x10000, fw, fctx); + final ClientGetter gu = hlsc.fetch(new freenet.keys.FreenetURI(uri.toString()), 0x10000, fw, fctx); gu.setPriorityClass(RequestStarter.INTERACTIVE_PRIORITY_CLASS, cctx); final Class[] c = new Class[1]; @@ -326,7 +332,7 @@ public Class getIndexType(FreenetURI indexuri) throws FetchException { throw new UnsupportedOperationException("Unable to get mime type or got an invalid mime type for index"); } } else if(e.newURI != null) { - uri = e.newURI; + uri = new FreenetURI(e.newURI.toASCIIString()); continue; } } @@ -390,7 +396,7 @@ public String addBookmark(String name, String uri) { try { u = new FreenetURI(uri); if(u.isUSK()) { - uskNew = USK.create(u); + uskNew = USK.create(new freenet.keys.FreenetURI(u.toString())); edition = uskNew.suggestedEdition; } } catch (MalformedURLException e) { @@ -413,7 +419,7 @@ public String addBookmark(String name, String uri) { try { FreenetURI uold = new FreenetURI(old); if(uold.isUSK()) { - USK usk = USK.create(uold); + USK usk = USK.create(new freenet.keys.FreenetURI(uold.toString())); if(!(uskNew != null && usk.equals(uskNew, false))) { uskManager.unsubscribe(usk, callback); uskManager.unsubscribeContent(usk, callback.ret, true); @@ -567,7 +573,6 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali if(uri.isUSK()) edition = uri.getEdition(); indextype = getIndexType(uri); - indexkey = uri.toASCIIString(); } else { throw new AssertionError(); } diff --git a/src/plugins/Library/index/xml/LibrarianHandler.java b/src/plugins/Library/index/xml/LibrarianHandler.java index 1ad680ec..25f45eea 100644 --- a/src/plugins/Library/index/xml/LibrarianHandler.java +++ b/src/plugins/Library/index/xml/LibrarianHandler.java @@ -5,7 +5,7 @@ import freenet.support.Logger; -import freenet.keys.FreenetURI; +import freenet.library.io.FreenetURI; import freenet.library.index.TermPageEntry; import org.xml.sax.Attributes; @@ -228,8 +228,7 @@ public void endElement(String namespaceURI, String localName, String qName) { //if(logMINOR) Logger.minor(this, "Set relevance of "+pageEntry.getTitle()+" to "+pageEntry.rel+" - "+pageEntry.toString()); } - TermPageEntry pageEntry = new TermPageEntry(match.getSubject(), relevance, - inFileURI.intern().toString(), inFileTitle, termpositions); + TermPageEntry pageEntry = new TermPageEntry(match.getSubject(), relevance, inFileURI, inFileTitle, termpositions); result.add(pageEntry); //if(logMINOR) Logger.minor(this, "added "+inFileURI+ " to "+ match); } diff --git a/src/plugins/Library/index/xml/XMLIndex.java b/src/plugins/Library/index/xml/XMLIndex.java index fe59788d..3f9af956 100644 --- a/src/plugins/Library/index/xml/XMLIndex.java +++ b/src/plugins/Library/index/xml/XMLIndex.java @@ -21,11 +21,11 @@ import freenet.client.FetchResult; import freenet.node.RequestStarter; import freenet.node.RequestClient; -import freenet.keys.FreenetURI; import freenet.library.index.Index; import freenet.library.index.TermEntry; import freenet.library.index.TermPageEntry; import freenet.library.index.URIEntry; +import freenet.library.io.FreenetURI; import freenet.library.util.exec.Execution; import freenet.library.util.exec.TaskAbortException; @@ -234,12 +234,12 @@ private synchronized void startFetch(boolean retry) throws FetchException, Malfo FreenetURI u = new FreenetURI(uri); while (true) { try { - rootGetter = hlsc.fetch(u, -1, this, hlsc.getFetchContext().clone()); + rootGetter = hlsc.fetch(new freenet.keys.FreenetURI(u.toString()), -1, this, hlsc.getFetchContext().clone()); Logger.normal(this, "Fetch started : "+toString()); break; } catch (FetchException e) { if (e.newURI != null) { - u = e.newURI; + u = new FreenetURI(e.newURI.toASCIIString()); if(logMINOR) Logger.minor(this, "New URI: "+uri); continue; } else @@ -829,7 +829,7 @@ class StageThreeHandler extends DefaultHandler { if(logDEBUG) Logger.debug(this, "Set relevance of "+title+" to "+relevance+" - "+key); } - TermPageEntry pageEntry = new TermPageEntry(req.getSubject(), relevance, key, title, match.termpositions); + TermPageEntry pageEntry = new TermPageEntry(req.getSubject(), relevance, new FreenetURI(key), title, match.termpositions); result.add(pageEntry); //Logger.minor(this, "added "+inFileURI+ " to "+ match); } diff --git a/src/plugins/Library/ui/ResultNodeGenerator.java b/src/plugins/Library/ui/ResultNodeGenerator.java index 7acc8895..64cbfdfa 100644 --- a/src/plugins/Library/ui/ResultNodeGenerator.java +++ b/src/plugins/Library/ui/ResultNodeGenerator.java @@ -4,15 +4,14 @@ package plugins.Library.ui; -import freenet.keys.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.index.TermIndexEntry; import freenet.library.index.TermPageEntry; import freenet.library.index.TermTermEntry; +import freenet.library.io.FreenetURI; import freenet.support.HTMLNode; import freenet.support.Logger; -import java.net.MalformedURLException; import java.util.Iterator; import java.util.Map.Entry; import java.util.Set; @@ -64,10 +63,7 @@ public synchronized void run(){ generatePageEntryNode(); }catch(RuntimeException e){ exception = e; // Exeptions thrown here are stored in case this is being run in a thread, in this case it is thrown in isDone() or iterator() - throw exception; - } catch (MalformedURLException e) { - exception = new RuntimeException(e); - throw exception; + throw e; } done = true; result = null; @@ -95,9 +91,8 @@ public boolean isDone(){ /** * Parse result into generator - * @throws MalformedURLException */ - private void parseResult() throws MalformedURLException{ + private void parseResult(){ groupmap = new TreeMap(); if(!groupusk) pageset = new TreeMap(RelevanceComparator.comparator); @@ -113,7 +108,8 @@ private void parseResult() throws MalformedURLException{ String sitebase; long uskEdition = Long.MIN_VALUE; // Get the key and name - FreenetURI uri = new FreenetURI(pageEntry.page); + FreenetURI uri; + uri = pageEntry.page; // convert usk's if(uri.isSSKForUSK()){ uri = uri.uskForSSK(); @@ -168,9 +164,8 @@ private HTMLNode generateTermEntryNode(){ /** * Generate node of page results from this generator - * @throws MalformedURLException */ - private void generatePageEntryNode() throws MalformedURLException{ + private void generatePageEntryNode(){ pageEntryNode = new HTMLNode("div", "id", "results"); int results = 0; @@ -243,12 +238,11 @@ private void generatePageEntryNode() throws MalformedURLException{ * Returns an {@link HTMLNode} representation of a {@link TermPageEntry} for display in a browser * @param entry * @param newestVersion if set, the result is shown in full brightness, if unset the result is greyed out - * @throws MalformedURLException */ - private HTMLNode termPageEntryNode(TermPageEntry entry,boolean newestVersion) throws MalformedURLException { - FreenetURI uri = new FreenetURI(entry.page); + private HTMLNode termPageEntryNode(TermPageEntry entry,boolean newestVersion) { + FreenetURI uri = entry.page; String showtitle = entry.title; - String showurl = uri.toShortString(); + String showurl = uri.toString(); if (showtitle == null || showtitle.trim().length() == 0) { showtitle = showurl; } diff --git a/test/freenet/library/io/FreenetURIForTest.java b/test/freenet/library/io/FreenetURIForTest.java new file mode 100644 index 00000000..f255b28d --- /dev/null +++ b/test/freenet/library/io/FreenetURIForTest.java @@ -0,0 +1,15 @@ +package freenet.library.io; + +import java.util.Random; + +public class FreenetURIForTest extends FreenetURI { + + public FreenetURIForTest(String uri) { + super(uri); + throw new RuntimeException("Cannot create for test."); + } + + public static FreenetURI generateRandomCHK(Random rand) { + throw new RuntimeException("Not implemented yet."); + } +} diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 6be7cfc5..7f7bc62f 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -8,12 +8,12 @@ import plugins.Library.util.*; import plugins.Library.*; -import freenet.keys.FreenetURI; import freenet.library.Priority; import freenet.library.index.ProtoIndex; import freenet.library.index.ProtoIndexComponentSerialiser; import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; +import freenet.library.io.FreenetURI; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.*; @@ -158,7 +158,11 @@ public static String testPushIndex() { Random rand = new Random(); @Override public void run() { - idx = new ProtoIndex("CHK@", "test", null, null, 0); + try { + idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); + } catch (java.net.MalformedURLException e) { + throw new AssertionError(e); + } ProtoIndexComponentSerialiser.get().setSerialiserFor(idx); for (String key: push_index_words) { @@ -238,7 +242,12 @@ public static String testPushAndMergeIndex() { Random rand = new Random(); @Override public void run() { - idx = new ProtoIndex("CHK@", "test", null, null, 0); + + try { + idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); + } catch (java.net.MalformedURLException e) { + throw new AssertionError(e); + } ProtoIndexComponentSerialiser.get().setSerialiserFor(idx); try { diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index dc7f0091..789ab051 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -90,7 +90,11 @@ public BIndexTest() { )); protected void newTestSkeleton() { - idx = new ProtoIndex("CHK@yeah", "test", null, null, 0); + try { + idx = new ProtoIndex(new FreenetURI("CHK@yeah"), "test", null, null, 0); + } catch (java.net.MalformedURLException e) { + assertTrue(false); + } csrl.setSerialiserFor(idx); timeDiff(); } diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java new file mode 100644 index 00000000..f1540ea0 --- /dev/null +++ b/test/plugins/Library/index/TermEntryTest.java @@ -0,0 +1,121 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ +package plugins.Library.index; + +import junit.framework.TestCase; + + +import plugins.Library.io.YamlReaderWriter; + +import freenet.keys.FreenetURI; +import freenet.library.io.serial.FileArchiver; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Serialiser.*; +import freenet.library.util.exec.TaskAbortException; + +import java.util.Arrays; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.UUID; +import java.net.MalformedURLException; +import java.io.*; + +/** +** @author infinity0 +*/ +public class TermEntryTest extends TestCase { + + final static TermTermEntry w = new TermTermEntry("test", 0.8f, "lol"); + final static TermIndexEntry x; + final static TermPageEntry z; + final static TermPageEntry v; + static { + try { + x = new TermIndexEntry("test", 0.8f, new FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); + z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); + v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); + } catch (MalformedURLException e) { + throw new AssertionError(); + } + } + final static TermTermEntry y = new TermTermEntry("test", 0.8f, "lol2"); + + public void testBasic() throws TaskAbortException { + File f = new File("TermEntryTest"); + f.mkdir(); + FileArchiver> ym = new FileArchiver>(new YamlReaderWriter(), "test", null, ".yml", f); + + Map map = new HashMap(); + + List l = new ArrayList(); + l.add(w); + l.add(w); + l.add(x); + l.add(y); + l.add(z); + map.put("test", l); + try { + map.put("test2", new Packer.BinInfo(new FreenetURI("http://127.0.0.1:8888/CHK@WtWIvOZXLVZkmDrY5929RxOZ-woRpRoMgE8rdZaQ0VU,rxH~D9VvOOuA7bCnVuzq~eux77i9RR3lsdwVHUgXoOY,AAIC--8/Library.jar"), 123)); + } catch (java.net.MalformedURLException e) { + assert(false); + } + + ym.push(new PushTask>(map)); + PullTask> pt = new PullTask>(""); + try{ + ym.pull(pt); + } catch (Exception e) { + e.printStackTrace(); + } + + assertTrue(pt.data instanceof Map); + Map m = pt.data; + assertTrue(m.get("test") instanceof List); + List ll = (List)m.get("test"); + assertTrue(ll.get(0) instanceof TermTermEntry); + assertTrue(ll.get(1) == ll.get(0)); + // NOTE these tests fail in snakeYAML 1.2 and below, fixed in hg + assertTrue(ll.get(2) instanceof TermIndexEntry); + assertTrue(ll.get(3) instanceof TermTermEntry); + + assertTrue(m.get("test2") instanceof Packer.BinInfo); + Packer.BinInfo inf = (Packer.BinInfo)m.get("test2"); + assertTrue(inf.getID() instanceof FreenetURI); + } + + public void testBinaryReadWrite() throws IOException, TaskAbortException { + TermEntryReaderWriter rw = TermEntryReaderWriter.getInstance(); + ByteArrayOutputStream bo = new ByteArrayOutputStream(); + DataOutputStream oo = new DataOutputStream(bo); + rw.writeObject(v, oo); + rw.writeObject(w, oo); + rw.writeObject(x, oo); + rw.writeObject(y, oo); + rw.writeObject(z, oo); + oo.close(); + ByteArrayInputStream bi = new ByteArrayInputStream(bo.toByteArray()); + DataInputStream oi = new DataInputStream(bi); + TermEntry v1 = rw.readObject(oi); + TermEntry w1 = rw.readObject(oi); + TermEntry x1 = rw.readObject(oi); + TermEntry y1 = rw.readObject(oi); + TermEntry z1 = rw.readObject(oi); + oi.close(); + assertEqualButNotIdentical(v, v1); + assertEqualButNotIdentical(w, w1); + assertEqualButNotIdentical(x, x1); // this will fail before fred@a6e73dbbaa7840bd20d5e3fb95cd2c678a106e85 + assertEqualButNotIdentical(y, y1); + assertEqualButNotIdentical(z, z1); + } + + public static void assertEqualButNotIdentical(Object a, Object b) { + assertTrue(a != b); + assertTrue(a.equals(b)); + assertTrue(a.hashCode() == b.hashCode()); + } + +} diff --git a/test/plugins/Library/util/Generators.java b/test/plugins/Library/util/Generators.java index d1bc5d84..7e9ab3e3 100644 --- a/test/plugins/Library/util/Generators.java +++ b/test/plugins/Library/util/Generators.java @@ -7,8 +7,8 @@ import plugins.Library.index.*; -import freenet.keys.FreenetURI; import freenet.library.index.TermPageEntry; +import freenet.library.io.FreenetURIForTest; import java.util.UUID; import java.util.Random; @@ -38,7 +38,7 @@ public static String rndKey() { } public static TermPageEntry rndEntry(String key) { - return new TermPageEntry(key, (float)Math.random(), FreenetURI.generateRandomCHK(rand).toString(), null); + return new TermPageEntry(key, (float)Math.random(), FreenetURIForTest.generateRandomCHK(rand), null); } } From faa221d0f04176b70059af80894bd141b8a0d6ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 16 Mar 2016 21:22:17 +0100 Subject: [PATCH 107/180] Fixes to get rid of Eclipse warnings. I consider this a step on the way. Next step is to make it work. --- .../src/freenet/library/index/ProtoIndex.java | 7 ++-- .../library/index/ProtoIndexSerialiser.java | 14 +++++++- shared/src/freenet/library/io/FreenetURI.java | 2 +- .../freenet/library/io/YamlReaderWriter.java | 7 +++- .../freenet/library/index/TermEntryTest.java | 11 ++++-- src/plugins/Library/Library.java | 35 ++++++++----------- .../freenet/library/io/FreenetURIForTest.java | 3 +- test/plugins/Library/Tester.java | 9 ++--- test/plugins/Library/index/BIndexTest.java | 2 +- test/plugins/Library/index/TermEntryTest.java | 14 +++++--- .../library/uploader/DirectoryCreator.java | 8 ++++- .../library/uploader/DirectoryUploader.java | 8 ++++- 12 files changed, 78 insertions(+), 42 deletions(-) diff --git a/shared/src/freenet/library/index/ProtoIndex.java b/shared/src/freenet/library/index/ProtoIndex.java index ed35dad8..a1e6a782 100644 --- a/shared/src/freenet/library/index/ProtoIndex.java +++ b/shared/src/freenet/library/index/ProtoIndex.java @@ -13,6 +13,7 @@ import java.util.Set; import java.util.concurrent.Executor; +import freenet.library.io.FreenetURI; import freenet.library.io.serial.ProgressTracker; import freenet.library.io.serial.Serialiser; import freenet.library.util.DataNotLoadedException; @@ -55,7 +56,7 @@ final public class ProtoIndex implements Index { /** ** Request ID for this index */ - protected String reqID; + protected FreenetURI reqID; /** ** Insert ID for this index @@ -93,7 +94,7 @@ final public class ProtoIndex implements Index { final protected SkeletonBTreeMap> utab; - public ProtoIndex(String id, String n, String owner, String ownerEmail, long pages) { + public ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages) { this(id, n, owner, ownerEmail, pages, new Date(), new HashMap(), new SkeletonBTreeMap>(BTREE_NODE_MIN), new SkeletonBTreeMap>(BTREE_NODE_MIN)/*, @@ -101,7 +102,7 @@ public ProtoIndex(String id, String n, String owner, String ownerEmail, long pag ); } - protected ProtoIndex(String id, String n, String owner, String ownerEmail, long pages, Date m, Map x, + protected ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages, Date m, Map x, SkeletonBTreeMap> u, SkeletonBTreeMap> t/*, SkeletonMap f*/ diff --git a/shared/src/freenet/library/index/ProtoIndexSerialiser.java b/shared/src/freenet/library/index/ProtoIndexSerialiser.java index 41f1bab6..8ef8c9d4 100644 --- a/shared/src/freenet/library/index/ProtoIndexSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexSerialiser.java @@ -4,12 +4,14 @@ package freenet.library.index; import java.io.File; +import java.net.MalformedURLException; import java.util.Date; import java.util.LinkedHashMap; import java.util.Map; import freenet.library.FactoryRegister; import freenet.library.io.DataFormatException; +import freenet.library.io.FreenetURI; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Archiver; import freenet.library.io.serial.FileArchiver; @@ -183,7 +185,17 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) try { // FIXME yet more hacks related to the lack of proper asynchronous FreenetArchiver... ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get((Integer)map.get("serialFormatUID"), subsrl); - String reqID = (String)map.get("reqID"); + Object reqIDObject = map.get("reqID"); + FreenetURI reqID; + if (reqIDObject instanceof FreenetURI) { + reqID = (FreenetURI) reqIDObject; + } else { + try { + reqID = new FreenetURI((String) reqIDObject); + } catch (MalformedURLException e) { + throw new DataFormatException("Badly formatted URI", e, null); + } + } String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index 76f55fe5..72c4bd6d 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -6,7 +6,7 @@ public class FreenetURI { - public FreenetURI(String uri) { + public FreenetURI(String uri) throws MalformedURLException { throw new RuntimeException("Not implemented yet."); } diff --git a/shared/src/freenet/library/io/YamlReaderWriter.java b/shared/src/freenet/library/io/YamlReaderWriter.java index b120a576..4b183c4f 100644 --- a/shared/src/freenet/library/io/YamlReaderWriter.java +++ b/shared/src/freenet/library/io/YamlReaderWriter.java @@ -27,6 +27,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; +import java.net.MalformedURLException; /* class definitions added to the extended Yaml processor */ import freenet.library.index.TermEntry; @@ -161,7 +162,11 @@ public ExtendedConstructor() { this.yamlConstructors.put("!FreenetURI", new AbstractConstruct() { /*@Override**/ public Object construct(Node node) { String uri = (String) constructScalar((ScalarNode)node); - return new FreenetURI(uri); + try { + return new FreenetURI(uri); + } catch (java.net.MalformedURLException e) { + throw new ConstructorException("while constructing a FreenetURI", node.getStartMark(), "found malformed URI " + uri, null); + } } }); this.yamlConstructors.put("!BinInfo", new AbstractConstruct() { diff --git a/shared/test/freenet/library/index/TermEntryTest.java b/shared/test/freenet/library/index/TermEntryTest.java index c93647ba..d584579f 100644 --- a/shared/test/freenet/library/index/TermEntryTest.java +++ b/shared/test/freenet/library/index/TermEntryTest.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.HashMap; import java.io.*; +import java.net.MalformedURLException; /** ** @author infinity0 @@ -33,9 +34,13 @@ public class TermEntryTest extends TestCase { final static TermPageEntry z; final static TermPageEntry v; static { - x = new TermIndexEntry("test", 0.8f, new FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); - z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); - v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); + try { + x = new TermIndexEntry("test", 0.8f, new FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); + z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); + v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); + } catch (MalformedURLException e) { + throw new AssertionError(); + } } final static TermTermEntry y = new TermTermEntry("test", 0.8f, "lol2"); diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 985a30ae..382926ec 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -39,13 +39,13 @@ import freenet.client.events.ClientEvent; import freenet.client.events.ClientEventListener; import freenet.client.events.ExpectedMIMEEvent; +import freenet.keys.FreenetURI; import freenet.keys.USK; import freenet.library.ArchiverFactory; import freenet.library.FactoryRegister; import freenet.library.index.Index; import freenet.library.index.ProtoIndex; import freenet.library.index.ProtoIndexSerialiser; -import freenet.library.io.FreenetURI; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; @@ -189,23 +189,18 @@ private Library(PluginRespirator pr) { Logger.error(this, "Invalid bookmark URI: "+target+" for "+name, e); continue; } - try { - if(uri.isUSK()) { - BookmarkCallback callback = new BookmarkCallback(name, uri.getAllMetaStrings(), edition); - bookmarkCallbacks.put(name, callback); - USK u; - try { - u = USK.create(new freenet.keys.FreenetURI(uri.toString())); - } catch (MalformedURLException e) { - Logger.error(this, "Invalid bookmark USK: "+target+" for "+name, e); - continue; - } - uskManager.subscribe(u, callback, false, rcBulk); - callback.ret = uskManager.subscribeContent(u, callback, false, pr.getHLSimpleClient().getFetchContext(), RequestStarter.IMMEDIATE_SPLITFILE_PRIORITY_CLASS, rcBulk); + if(uri.isUSK()) { + BookmarkCallback callback = new BookmarkCallback(name, uri.getAllMetaStrings(), edition); + bookmarkCallbacks.put(name, callback); + USK u; + try { + u = USK.create(new freenet.keys.FreenetURI(uri.toString())); + } catch (MalformedURLException e) { + Logger.error(this, "Invalid bookmark USK: "+target+" for "+name, e); + continue; } - } catch (MalformedURLException e) { - Logger.error(this, "Invalid URI: " + target + " for " + name, e); - continue; + uskManager.subscribe(u, callback, false, rcBulk); + callback.ret = uskManager.subscribeContent(u, callback, false, pr.getHLSimpleClient().getFetchContext(), RequestStarter.IMMEDIATE_SPLITFILE_PRIORITY_CLASS, rcBulk); } } if (!bookmarks.containsKey("debbies-library-development-index")) { @@ -277,7 +272,7 @@ public synchronized void saveState(){ ** a metastring (end with "/") or be a USK. * @throws MalformedURLException */ - public Class getIndexType(FreenetURI indexuri) throws FetchException, MalformedURLException { + public Class getIndexType(FreenetURI indexuri) throws FetchException { if(indexuri.lastMetaString()!=null && indexuri.lastMetaString().equals(XMLIndex.DEFAULT_FILE)) return XMLIndex.class; if(indexuri.lastMetaString()!=null && indexuri.lastMetaString().equals(ProtoIndex.DEFAULT_FILE)) @@ -299,7 +294,7 @@ public Class getIndexType(FreenetURI indexuri) throws FetchException, Malform ClientContext cctx = core.clientContext; FetchContext fctx = hlsc.getFetchContext(); FetchWaiter fw = new FetchWaiter(REQUEST_CLIENT); - final ClientGetter gu = hlsc.fetch(new freenet.keys.FreenetURI(uri.toString()), 0x10000, fw, fctx); + final ClientGetter gu = hlsc.fetch(uri, 0x10000, fw, fctx); gu.setPriorityClass(RequestStarter.INTERACTIVE_PRIORITY_CLASS, cctx); final Class[] c = new Class[1]; @@ -332,7 +327,7 @@ public Class getIndexType(FreenetURI indexuri) throws FetchException, Malform throw new UnsupportedOperationException("Unable to get mime type or got an invalid mime type for index"); } } else if(e.newURI != null) { - uri = new FreenetURI(e.newURI.toASCIIString()); + uri = e.newURI; continue; } } diff --git a/test/freenet/library/io/FreenetURIForTest.java b/test/freenet/library/io/FreenetURIForTest.java index f255b28d..f459dc25 100644 --- a/test/freenet/library/io/FreenetURIForTest.java +++ b/test/freenet/library/io/FreenetURIForTest.java @@ -1,10 +1,11 @@ package freenet.library.io; +import java.net.MalformedURLException; import java.util.Random; public class FreenetURIForTest extends FreenetURI { - public FreenetURIForTest(String uri) { + public FreenetURIForTest(String uri) throws MalformedURLException { super(uri); throw new RuntimeException("Cannot create for test."); } diff --git a/test/plugins/Library/Tester.java b/test/plugins/Library/Tester.java index 7f7bc62f..e8742f56 100644 --- a/test/plugins/Library/Tester.java +++ b/test/plugins/Library/Tester.java @@ -8,12 +8,14 @@ import plugins.Library.util.*; import plugins.Library.*; +import freenet.keys.FreenetURI; +import freenet.node.RequestStarter; + import freenet.library.Priority; import freenet.library.index.ProtoIndex; import freenet.library.index.ProtoIndexComponentSerialiser; import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; -import freenet.library.io.FreenetURI; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.*; @@ -23,7 +25,6 @@ import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.func.Closure; -import freenet.node.RequestStarter; import java.util.*; import java.io.*; @@ -159,7 +160,7 @@ public static String testPushIndex() { @Override public void run() { try { - idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); + idx = new ProtoIndex(new freenet.library.io.FreenetURI("CHK@"), "test", null, null, 0); } catch (java.net.MalformedURLException e) { throw new AssertionError(e); } @@ -244,7 +245,7 @@ public static String testPushAndMergeIndex() { @Override public void run() { try { - idx = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0); + idx = new ProtoIndex(new freenet.library.io.FreenetURI("CHK@"), "test", null, null, 0); } catch (java.net.MalformedURLException e) { throw new AssertionError(e); } diff --git a/test/plugins/Library/index/BIndexTest.java b/test/plugins/Library/index/BIndexTest.java index 789ab051..0c895fc3 100644 --- a/test/plugins/Library/index/BIndexTest.java +++ b/test/plugins/Library/index/BIndexTest.java @@ -91,7 +91,7 @@ public BIndexTest() { protected void newTestSkeleton() { try { - idx = new ProtoIndex(new FreenetURI("CHK@yeah"), "test", null, null, 0); + idx = new ProtoIndex(new freenet.library.io.FreenetURI("CHK@yeah"), "test", null, null, 0); } catch (java.net.MalformedURLException e) { assertTrue(false); } diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java index f1540ea0..eeaf94dc 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/test/plugins/Library/index/TermEntryTest.java @@ -6,9 +6,13 @@ import junit.framework.TestCase; -import plugins.Library.io.YamlReaderWriter; - import freenet.keys.FreenetURI; +import freenet.library.index.TermEntry; +import freenet.library.index.TermEntryReaderWriter; +import freenet.library.index.TermIndexEntry; +import freenet.library.index.TermPageEntry; +import freenet.library.index.TermTermEntry; +import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.FileArchiver; import freenet.library.io.serial.Packer; import freenet.library.io.serial.Serialiser.*; @@ -35,9 +39,9 @@ public class TermEntryTest extends TestCase { final static TermPageEntry v; static { try { - x = new TermIndexEntry("test", 0.8f, new FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); - z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); - v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); + x = new TermIndexEntry("test", 0.8f, new freenet.library.io.FreenetURI("CHK@MIh5-viJQrPkde5gmRZzqjBrqOuh~Wbjg02uuXJUzgM,rKDavdwyVF9Z0sf5BMRZsXj7yiWPFUuewoe0CPesvXE,AAIC--8")); + z = new TermPageEntry("lol", 0.8f, new freenet.library.io.FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); + v = new TermPageEntry("lol", 0.8f, new freenet.library.io.FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); } catch (MalformedURLException e) { throw new AssertionError(); } diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java index cf11f2b2..18a524dd 100644 --- a/uploader/src/freenet/library/uploader/DirectoryCreator.java +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -4,6 +4,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import java.net.MalformedURLException; import java.util.Map; import java.util.Map.Entry; @@ -11,6 +12,7 @@ import freenet.library.index.ProtoIndexComponentSerialiser; import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; +import freenet.library.io.FreenetURI; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PushTask; import freenet.library.util.SkeletonBTreeSet; @@ -38,7 +40,11 @@ class DirectoryCreator { LiveArchiver, SimpleProgress> archiver = (LiveArchiver, SimpleProgress>) srlDisk.getChildSerialiser(); leafsrlDisk = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); - idxDisk = new ProtoIndex("CHK@", "test", null, null, 0L); + try { + idxDisk = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); + } catch (MalformedURLException e) { + throw new AssertionError(e); + } leafsrlDisk.setSerialiserFor(idxDisk); countTerms = 0; diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 020d46cf..2b660ede 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -10,6 +10,7 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; @@ -21,6 +22,7 @@ import freenet.library.index.ProtoIndexComponentSerialiser; import freenet.library.index.ProtoIndexSerialiser; import freenet.library.index.TermEntry; +import freenet.library.io.FreenetURI; import freenet.library.io.serial.LiveArchiver; import freenet.library.io.serial.Serialiser.PullTask; import freenet.library.io.serial.Serialiser.PushTask; @@ -521,7 +523,11 @@ private void makeFreenetSerialisers() { (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); if(lastUploadURI == null) { - idxFreenet = new ProtoIndex("CHK@", "test", null, null, 0L); + try { + idxFreenet = new ProtoIndex(new FreenetURI("CHK@"), "test", null, null, 0L); + } catch (MalformedURLException e) { + throw new AssertionError(e); + } // FIXME more hacks: It's essential that we use the // same FreenetArchiver instance here. leafsrl.setSerialiserFor(idxFreenet); From 454363ff7da04ef6636a34ca3098af003ae8d022 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 17 Apr 2016 00:42:40 +0200 Subject: [PATCH 108/180] Fix types for the Downloader. --- .../freenet/library/uploader/DownloadAll.java | 46 +++++++++++++------ 1 file changed, 31 insertions(+), 15 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index f88b0a05..50d733da 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -436,7 +436,7 @@ Date getStarted() { return started; } - String getFilename() { + String getKey() { return filename; } @@ -491,6 +491,7 @@ void printLeft() { } interface UriProcessor { + public FetchedPage getPage(); public boolean processUri(String uri); } @@ -515,13 +516,15 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro } } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + page_uri, e); + throw new RuntimeException("Cannot process BinInfo value " + value.getID() + + " for " + uriProcessor.getPage().getURI(), + e); } } Map subnodes = (Map) map2.get("subnodes"); logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { - page_level, + uriProcessor.getPage().level, subnodes.size(), }); for (String key : subnodes.keySet()) { @@ -538,7 +541,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro // Must separate map and array! if (map.containsKey("subnodes")) { throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + - page_uri); + uriProcessor.getPage().getURI()); } if (map.get("entries") instanceof Map) { Map entries = @@ -546,7 +549,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro logger.log(Level.FINE, "Contains from {1} to {2} (level {0}) with {3} entries.", new Object[] { - page_level, + uriProcessor.getPage().level, map.get("lkey"), map.get("rkey"), entries.size() @@ -558,7 +561,8 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro foundChildren ++; } } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + " for " + page_uri); + throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + + " for " + uriProcessor.getPage().getURI()); } } return; @@ -568,7 +572,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro logger.log(Level.FINE, "Contains from {1} to {2} (level {0}) with page entries.", new Object[] { - page_level, + uriProcessor.getPage().level, map.get("lkey"), map.get("rkey") }); @@ -582,7 +586,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro && map2.containsKey("size") && map2.containsKey("entries")) { logger.log(Level.FINER, "Starts with entry for {1} (level {0}). Searching for subnodes.", new Object[] { - page_level, + uriProcessor.getPage().level, entry.getKey(), }); String first = null; @@ -598,7 +602,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro Map subnodes = (Map) map3.get("subnodes"); logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { - page_level, + uriProcessor.getPage().level, contents.getKey(), subnodes.size(), }); @@ -611,10 +615,11 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro } continue; } - throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + page_uri); + throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + + uriProcessor.getPage().getURI()); } logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { - page_level, + uriProcessor.getPage().level, first, last, }); @@ -720,7 +725,12 @@ public void receivedAllData(FcpConnection c, AllData ad) { "(max " + maxObjectQueueSize + ")."); page.didSucceed(); UriProcessor uriProcessor = new UriProcessor() { - @Override + @Override + public FetchedPage getPage() { + return page; + } + + @Override public boolean processUri(String uri) { return processAnUri(uri); } @@ -797,7 +807,7 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert c == connection; assert uriGenerated != null; String identifier = uriGenerated.getIdentifier(); - String chk = ongoingUploads.get(identifier).getFilename(); + String chk = ongoingUploads.get(identifier).getKey(); if (!uriGenerated.getURI().equals(chk)) { logger.severe("Were supposed to resurrect " + chk + " but the URI calculated to " + uriGenerated.getURI() + ". " + @@ -814,7 +824,7 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) assert putSuccessful != null; String identifier = putSuccessful.getIdentifier(); final OngoingUpload foundUpload = ongoingUploads.get(identifier); - String chk = foundUpload.getFilename(); + String chk = foundUpload.getKey(); if (!putSuccessful.getURI().equals(chk)) { logger.severe("Uploaded " + putSuccessful.getURI() + " while supposed to upload " + chk + @@ -834,7 +844,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { assert putFailed != null; String identifier = putFailed.getIdentifier(); final OngoingUpload foundUpload = ongoingUploads.get(identifier); - String chk = foundUpload.getFilename(); + String chk = foundUpload.getKey(); logger.severe("Uploaded " + chk + " failed."); failedRecreated++; ongoingUploads.remove(identifier); @@ -849,6 +859,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { } uploadStarter.execute(new Runnable() { public void run() { + logger.fine("Ressurrecting " + filename); uploadCounter++; final String identifier = "Upload" + uploadCounter; ongoingUploads.put(identifier, new OngoingUpload(filename, callback)); @@ -1170,6 +1181,11 @@ public void doMove() { try { readAndProcessYamlData(inputStream, new UriProcessor() { + @Override + public FetchedPage getPage() { + return finalPage; + } + Set seen = new HashSet(); @Override public boolean processUri(String uri) { From b45428214c26953ef85962a07e3a4583605b542d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 17 Apr 2016 17:39:35 +0200 Subject: [PATCH 109/180] Fixes that make all the junit tests work. --- shared/src/freenet/library/io/FreenetURI.java | 53 +++++++++++++++---- .../freenet/library/index/TermEntryTest.java | 13 ++--- ...ForTest.java => FreenetURIForTesting.java} | 4 +- test/plugins/Library/index/TermEntryTest.java | 14 ++--- test/plugins/Library/util/Generators.java | 4 +- 5 files changed, 56 insertions(+), 32 deletions(-) rename test/freenet/library/io/{FreenetURIForTest.java => FreenetURIForTesting.java} (69%) diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index 72c4bd6d..02a0a596 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -1,30 +1,46 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ package freenet.library.io; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.IOException; import java.net.MalformedURLException; +/** + * This is a simpler implementation of the FreenetURI than {link freenet.keys.FreenetURI}. + * + * It has part of the interface in the same way but it is simpler and local to the Library. + */ public class FreenetURI { + private String contents; public FreenetURI(String uri) throws MalformedURLException { - throw new RuntimeException("Not implemented yet."); + contents = uri; + if (!contents.startsWith("CHK@")) { + throw new MalformedURLException("Unhandled keytype"); + } + if (!contents.matches("^.*@.*,.*,AA.*$")) { + throw new MalformedURLException("Cannot find cryptoKey and routingKey structure"); + } } public static FreenetURI readFullBinaryKeyWithLength( - DataInputStream dis) { - throw new RuntimeException("Not implemented yet."); - // return null; + DataInputStream dis) throws IOException { + int len = dis.readShort(); + byte[] buf = new byte[len]; + dis.readFully(buf); + return new FreenetURI(new String(buf)); } public FreenetURI intern() { - throw new RuntimeException("Not implemented yet."); - // return null; + return this; } - public void writeFullBinaryKeyWithLength(DataOutputStream dos) { - // TODO Auto-generated method stub - throw new RuntimeException("Not implemented yet."); - // + public void writeFullBinaryKeyWithLength(DataOutputStream dos) throws IOException { + dos.writeShort(contents.length()); + dos.writeBytes(contents); } public boolean isUSK() @@ -94,4 +110,21 @@ public FreenetURI pushMetaString(String string) { // return null; } + @Override + public int hashCode() { + return this.getClass().hashCode() ^ contents.hashCode(); + } + + @Override + public String toString() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof FreenetURI)) { + return false; + } + return contents.equals(((FreenetURI) o).toString()); + } } diff --git a/shared/test/freenet/library/index/TermEntryTest.java b/shared/test/freenet/library/index/TermEntryTest.java index d584579f..eb3a2ed9 100644 --- a/shared/test/freenet/library/index/TermEntryTest.java +++ b/shared/test/freenet/library/index/TermEntryTest.java @@ -39,7 +39,7 @@ public class TermEntryTest extends TestCase { z = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), null); v = new TermPageEntry("lol", 0.8f, new FreenetURI("CHK@9eDo5QWLQcgSuDh1meTm96R4oE7zpoMBuV15jLiZTps,3HJaHbdW~-MtC6YsSkKn6I0DTG9Z1gKDGgtENhHx82I,AAIC--8"), "title", null); } catch (MalformedURLException e) { - throw new AssertionError(); + throw new AssertionError(e); } } final static TermTermEntry y = new TermTermEntry("test", 0.8f, "lol2"); @@ -62,11 +62,8 @@ public void testBasic() throws TaskAbortException { ym.push(new PushTask>(map)); PullTask> pt = new PullTask>(""); - try{ + ym.pull(pt); - } catch (Exception e) { - e.printStackTrace(); - } assertTrue(pt.data instanceof Map); Map m = pt.data; @@ -109,9 +106,9 @@ public void testBinaryReadWrite() throws IOException, TaskAbortException { } public static void assertEqualButNotIdentical(Object a, Object b) { - assertTrue(a != b); - assertTrue(a.equals(b)); - assertTrue(a.hashCode() == b.hashCode()); + assertTrue(a + " and " + b + " are identical.", a != b); + assertTrue(a + " and " + b + " not equal.", a.equals(b)); + assertTrue(a + " and " + b + " not same hashCode.", a.hashCode() == b.hashCode()); } } diff --git a/test/freenet/library/io/FreenetURIForTest.java b/test/freenet/library/io/FreenetURIForTesting.java similarity index 69% rename from test/freenet/library/io/FreenetURIForTest.java rename to test/freenet/library/io/FreenetURIForTesting.java index f459dc25..248cee28 100644 --- a/test/freenet/library/io/FreenetURIForTest.java +++ b/test/freenet/library/io/FreenetURIForTesting.java @@ -3,9 +3,9 @@ import java.net.MalformedURLException; import java.util.Random; -public class FreenetURIForTest extends FreenetURI { +public class FreenetURIForTesting extends FreenetURI { - public FreenetURIForTest(String uri) throws MalformedURLException { + public FreenetURIForTesting(String uri) throws MalformedURLException { super(uri); throw new RuntimeException("Cannot create for test."); } diff --git a/test/plugins/Library/index/TermEntryTest.java b/test/plugins/Library/index/TermEntryTest.java index eeaf94dc..f46d5e3a 100644 --- a/test/plugins/Library/index/TermEntryTest.java +++ b/test/plugins/Library/index/TermEntryTest.java @@ -6,7 +6,7 @@ import junit.framework.TestCase; -import freenet.keys.FreenetURI; +import freenet.library.io.FreenetURI; import freenet.library.index.TermEntry; import freenet.library.index.TermEntryReaderWriter; import freenet.library.index.TermIndexEntry; @@ -18,13 +18,10 @@ import freenet.library.io.serial.Serialiser.*; import freenet.library.util.exec.TaskAbortException; -import java.util.Arrays; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; -import java.util.Iterator; -import java.util.UUID; import java.net.MalformedURLException; import java.io.*; @@ -48,7 +45,7 @@ public class TermEntryTest extends TestCase { } final static TermTermEntry y = new TermTermEntry("test", 0.8f, "lol2"); - public void testBasic() throws TaskAbortException { + public void testBasic() throws TaskAbortException, MalformedURLException { File f = new File("TermEntryTest"); f.mkdir(); FileArchiver> ym = new FileArchiver>(new YamlReaderWriter(), "test", null, ".yml", f); @@ -62,11 +59,8 @@ public void testBasic() throws TaskAbortException { l.add(y); l.add(z); map.put("test", l); - try { - map.put("test2", new Packer.BinInfo(new FreenetURI("http://127.0.0.1:8888/CHK@WtWIvOZXLVZkmDrY5929RxOZ-woRpRoMgE8rdZaQ0VU,rxH~D9VvOOuA7bCnVuzq~eux77i9RR3lsdwVHUgXoOY,AAIC--8/Library.jar"), 123)); - } catch (java.net.MalformedURLException e) { - assert(false); - } + + map.put("test2", new Packer.BinInfo(new FreenetURI("CHK@WtWIvOZXLVZkmDrY5929RxOZ-woRpRoMgE8rdZaQ0VU,rxH~D9VvOOuA7bCnVuzq~eux77i9RR3lsdwVHUgXoOY,AAIC--8/Library.jar"), 123)); ym.push(new PushTask>(map)); PullTask> pt = new PullTask>(""); diff --git a/test/plugins/Library/util/Generators.java b/test/plugins/Library/util/Generators.java index 7e9ab3e3..b19743d4 100644 --- a/test/plugins/Library/util/Generators.java +++ b/test/plugins/Library/util/Generators.java @@ -8,7 +8,7 @@ import plugins.Library.index.*; import freenet.library.index.TermPageEntry; -import freenet.library.io.FreenetURIForTest; +import freenet.library.io.FreenetURIForTesting; import java.util.UUID; import java.util.Random; @@ -38,7 +38,7 @@ public static String rndKey() { } public static TermPageEntry rndEntry(String key) { - return new TermPageEntry(key, (float)Math.random(), FreenetURIForTest.generateRandomCHK(rand), null); + return new TermPageEntry(key, (float)Math.random(), FreenetURIForTesting.generateRandomCHK(rand), null); } } From 2a181c191bbc420e9afbb78cc925deca476ca3a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 17 Apr 2016 19:18:39 +0200 Subject: [PATCH 110/180] Fixed the file reading and disk index creation. --- shared/src/freenet/library/io/FreenetURI.java | 124 ++++++++++++++++-- .../uploader/TermEntryReaderIterator.java | 5 + 2 files changed, 118 insertions(+), 11 deletions(-) diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index 02a0a596..06bca54f 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -3,10 +3,14 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.library.io; +import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.MalformedURLException; +import java.net.URLEncoder; + +import freenet.copied.Base64; /** * This is a simpler implementation of the FreenetURI than {link freenet.keys.FreenetURI}. @@ -18,29 +22,127 @@ public class FreenetURI { public FreenetURI(String uri) throws MalformedURLException { contents = uri; - if (!contents.startsWith("CHK@")) { - throw new MalformedURLException("Unhandled keytype"); + if (contents.matches("^[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + + "[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + + "[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + + "[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + + "[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]$")) { + return; + } + if (!contents.startsWith("CHK@") && + !contents.startsWith("SSK@") && + !contents.startsWith("KSK@") && + !contents.startsWith("USK@") + ) { + throw new MalformedURLException("Unhandled keytype: " + uri); } - if (!contents.matches("^.*@.*,.*,AA.*$")) { - throw new MalformedURLException("Cannot find cryptoKey and routingKey structure"); + if (!contents.matches("^.*@(.*,.*,A.*|)$")) { + throw new MalformedURLException("Cannot find cryptoKey and routingKey structure: " + uri); } } + static final byte CHK = 1; + static final byte SSK = 2; + static final byte KSK = 3; + static final byte USK = 4; + static final short ClientCHK_EXTRA_LENGTH = 5; + static final short ClientSSK_EXTRA_LENGTH = 5; + + /** + * This method can read the traditional BinaryKey-coded data, from Spider + * while mostly reading the simpler UTF-string key encoded with length 0. + * + * @param dis + * @return a FreenetURI with the read key. + * @throws IOException + */ public static FreenetURI readFullBinaryKeyWithLength( - DataInputStream dis) throws IOException { - int len = dis.readShort(); - byte[] buf = new byte[len]; - dis.readFully(buf); - return new FreenetURI(new String(buf)); + DataInputStream dis1) throws IOException { + int len = dis1.readShort(); + if (len != 0) { + /** + * This is to be able to read the data created by Spider. + */ + byte[] buf = new byte[len]; + dis1.readFully(buf); + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + DataInputStream dis = new DataInputStream(bais); + byte type = dis.readByte(); + String keyType; + if(type == CHK) + keyType = "CHK"; + else if(type == SSK) + keyType = "SSK"; + else if(type == KSK) + keyType = "KSK"; + else + throw new IOException("Unrecognized FreenetURI type " + type); + byte[] routingKey = null; + byte[] cryptoKey = null; + byte[] extra = null; + if((type == CHK) || (type == SSK)) { + // routingKey is a hash, so is exactly 32 bytes + routingKey = new byte[32]; + dis.readFully(routingKey); + // cryptoKey is a 256 bit AES key, so likewise + cryptoKey = new byte[32]; + dis.readFully(cryptoKey); + // Number of bytes of extra depends on key type + int extraLen; + extraLen = (type == CHK ? ClientCHK_EXTRA_LENGTH : ClientSSK_EXTRA_LENGTH); + extra = new byte[extraLen]; + dis.readFully(extra); + } + + String docName = null; + if(type != CHK) + docName = dis.readUTF(); + int count = dis.readInt(); + String[] metaStrings = new String[count]; + for(int i = 0; i < metaStrings.length; i++) + metaStrings[i] = dis.readUTF(); + + StringBuilder b = new StringBuilder(); + + b.append(keyType).append('@'); + if(!"KSK".equals(keyType)) { + if(routingKey != null) + b.append(Base64.encode(routingKey)); + if(cryptoKey != null) + b.append(',').append(Base64.encode(cryptoKey)); + if(extra != null) + b.append(',').append(Base64.encode(extra)); + if(docName != null) + b.append('/'); + } + + if(docName != null) + b.append(URLEncoder.encode(docName, "UTF-8")); + + for(int i = 0; i < metaStrings.length; i++) { + b.append('/').append(URLEncoder.encode(metaStrings[i], "UTF-8")); + } + + return new FreenetURI(b.toString()); + } + + return new FreenetURI(dis1.readUTF()); } public FreenetURI intern() { return this; } + /** + * This is not the real thing. Coded with the length 0, we use a UTF string + * with the key instead. + * + * @param dos + * @throws IOException + */ public void writeFullBinaryKeyWithLength(DataOutputStream dos) throws IOException { - dos.writeShort(contents.length()); - dos.writeBytes(contents); + dos.writeShort(0); + dos.writeUTF(contents); } public boolean isUSK() diff --git a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java index 9ddd894f..c9bd2c6e 100644 --- a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java +++ b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java @@ -1,6 +1,7 @@ package freenet.library.uploader; import java.io.DataInputStream; +import java.io.EOFException; import java.io.IOException; import java.util.Collections; import java.util.HashMap; @@ -60,7 +61,11 @@ public TermEntry next() { } try { return TermEntryReaderWriter.getInstance().readObject(is); + } catch (EOFException e) { + return null; } catch (IOException e) { + System.out.println("Cannot understand read file:"); + e.printStackTrace(); return null; } } From 77313237eb338c18bc885fadf043f67833350fd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 2 Jul 2017 10:15:46 +0200 Subject: [PATCH 111/180] Next step in the work to restore the FreenetURI. --- .../src/freenet/library/index/ProtoIndex.java | 8 ++-- .../index/ProtoIndexComponentSerialiser.java | 31 ++++++------- .../library/index/ProtoIndexSerialiser.java | 28 ++++-------- .../library/index/TermEntryReaderWriter.java | 2 +- .../freenet/library/index/TermPageEntry.java | 31 +++++++++++-- .../src/freenet/library/index/URIEntry.java | 10 +++-- src/plugins/Library/search/ResultSet.java | 8 ++-- .../Library/ui/ResultNodeGenerator.java | 4 +- .../library/uploader/DirectoryUploader.java | 44 +++++++++++-------- .../freenet/library/uploader/FcpArchiver.java | 5 ++- 10 files changed, 98 insertions(+), 73 deletions(-) diff --git a/shared/src/freenet/library/index/ProtoIndex.java b/shared/src/freenet/library/index/ProtoIndex.java index a1e6a782..d35d8bdf 100644 --- a/shared/src/freenet/library/index/ProtoIndex.java +++ b/shared/src/freenet/library/index/ProtoIndex.java @@ -61,7 +61,7 @@ final public class ProtoIndex implements Index { /** ** Insert ID for this index */ - protected String insID; // TODO HIGH maybe move this to WriteableProtoIndex? + protected FreenetURI insID; // TODO HIGH maybe move this to WriteableProtoIndex? /** ** Name for this index. @@ -91,19 +91,19 @@ final public class ProtoIndex implements Index { final public /* DEBUG protected*/ SkeletonBTreeMap> ttab; - final protected SkeletonBTreeMap> utab; + final protected SkeletonBTreeMap> utab; public ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages) { this(id, n, owner, ownerEmail, pages, new Date(), new HashMap(), - new SkeletonBTreeMap>(BTREE_NODE_MIN), + new SkeletonBTreeMap>(BTREE_NODE_MIN), new SkeletonBTreeMap>(BTREE_NODE_MIN)/*, //filtab = new SkeletonPrefixTreeMap(new Token(), TKTAB_MAX)*/ ); } protected ProtoIndex(FreenetURI id, String n, String owner, String ownerEmail, long pages, Date m, Map x, - SkeletonBTreeMap> u, + SkeletonBTreeMap> u, SkeletonBTreeMap> t/*, SkeletonMap f*/ ) { diff --git a/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java index 7eeaddbe..7900e999 100644 --- a/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexComponentSerialiser.java @@ -31,6 +31,7 @@ import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.library.util.exec.TaskInProgressException; +import freenet.library.io.FreenetURI; /** ** Serialiser for the components of a ProtoIndex. @@ -79,8 +80,8 @@ public class ProtoIndexComponentSerialiser { /** ** Translator for the local entries of a node of the ''uri table''. */ - final protected static Translator>, Map> - utab_keys_mtr = new TreeMapTranslator>(utab_keys_ktr); + final protected static Translator>, Map> + utab_keys_mtr = new TreeMapTranslator>(utab_keys_ktr); /** ** Serialiser for the ''targets'' of the values stored in a node of the @@ -94,7 +95,7 @@ public class ProtoIndexComponentSerialiser { ** ''B-tree'' for a ''urikey''. In this case, the values are the actual ** targets and are stored inside the node, so we use a dummy. */ - final protected static MapSerialiser uri_dummy = new DummySerialiser(); + final protected static MapSerialiser uri_dummy = new DummySerialiser(); /** ** {@link Scale} for the root node of the ''B-tree'' that holds @@ -111,9 +112,9 @@ public class ProtoIndexComponentSerialiser { ** {@link Scale} for the root node of the ''B-tree'' that holds ** ''uri-entry mappings'' for a ''urikey''. */ - final protected static Packer.Scale> - uri_data_scale = new Packer.Scale>() { - @Override public int weigh(SkeletonBTreeMap element) { + final protected static Packer.Scale> + uri_data_scale = new Packer.Scale>() { + @Override public int weigh(SkeletonBTreeMap element) { return element.sizeRoot(); } }; @@ -176,7 +177,7 @@ public LiveArchiver, SimpleProgress> getLeafSerialiser() { ** ''B-tree'' that holds ''uri-entry mappings'' for the ''urikey'' mapping ** to the value. */ - final protected BTreePacker, EntryGroupSerialiser>> + final protected BTreePacker, EntryGroupSerialiser>> utab_data; /** @@ -223,12 +224,12 @@ protected ProtoIndexComponentSerialiser(int fmtid, LiveArchiver, EntryGroupSerialiser>>( - new EntryGroupSerialiser>( + utab_data = new BTreePacker, EntryGroupSerialiser>>( + new EntryGroupSerialiser>( leaf_arx, null, - new SkeletonBTreeMap.TreeTranslator(null, null) { - @Override public SkeletonBTreeMap rev(Map tree) throws DataFormatException { + new SkeletonBTreeMap.TreeTranslator(null, null) { + @Override public SkeletonBTreeMap rev(Map tree) throws DataFormatException { return setSerialiserFor(super.rev(tree)); } } @@ -252,7 +253,7 @@ public ProtoIndex setSerialiserFor(ProtoIndex index) { index.ttab.setSerialiser(ttab_keys, ttab_data); // set serialisers on the utab - BTreeNodeSerialiser> utab_keys = new BTreeNodeSerialiser>( + BTreeNodeSerialiser> utab_keys = new BTreeNodeSerialiser>( "uri listings", leaf_arx, index.utab.makeNodeTranslator(utab_keys_ktr, utab_keys_mtr) @@ -268,11 +269,11 @@ public ProtoIndex setSerialiserFor(ProtoIndex index) { ** Set the serialiser for the ''B-tree'' that holds the ''uri-entry ** mappings'' for a ''urikey''. */ - public SkeletonBTreeMap setSerialiserFor(SkeletonBTreeMap entries) { - BTreeNodeSerialiser uri_keys = new BTreeNodeSerialiser( + public SkeletonBTreeMap setSerialiserFor(SkeletonBTreeMap entries) { + BTreeNodeSerialiser uri_keys = new BTreeNodeSerialiser( "uri entries", leaf_arx, - entries.makeNodeTranslator(null, null) // no translator needed as String and URIEntry are both directly serialisable by YamlReaderWriter + entries.makeNodeTranslator(null, null) // no translator needed as FreenetURI and URIEntry are both directly serialisable by YamlReaderWriter ); entries.setSerialiser(uri_keys, uri_dummy); return entries; diff --git a/shared/src/freenet/library/index/ProtoIndexSerialiser.java b/shared/src/freenet/library/index/ProtoIndexSerialiser.java index 8ef8c9d4..54a0be40 100644 --- a/shared/src/freenet/library/index/ProtoIndexSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexSerialiser.java @@ -60,8 +60,8 @@ public ProtoIndexSerialiser(LiveArchiver, SimpleProgress> s) // srl_cls = new HashMap, ProtoIndexSerialiser>(); public static ProtoIndexSerialiser forIndex(Object o, Priority priorityLevel) { - if (o instanceof String) { - return forIndex((String)o, priorityLevel); + if (o instanceof FreenetURI) { + return forIndex((FreenetURI)o, priorityLevel); } else if (o instanceof File) { return forIndex((File)o); } else { @@ -69,7 +69,7 @@ public static ProtoIndexSerialiser forIndex(Object o, Priority priorityLevel) { } } - public static ProtoIndexSerialiser forIndex(String uri, Priority priorityLevel) { + public static ProtoIndexSerialiser forIndex(FreenetURI uri, Priority priorityLevel) { // ProtoIndexSerialiser srl = srl_cls.get(FreenetURI.class); // if (srl == null) { // // java's type-inference isn't that smart, see @@ -107,7 +107,7 @@ public static ProtoIndexSerialiser forIndex(File prefix) { PullTask> serialisable = new PullTask>(task.meta); subsrl.pull(serialisable); task.meta = serialisable.meta; - if (task.meta instanceof String) { // if not FreenetURI, skip this silently so we can test on local files + if (task.meta instanceof FreenetURI) { // if not FreenetURI, skip this silently so we can test on local files serialisable.data.put("reqID", task.meta); } try { @@ -137,9 +137,9 @@ public static class IndexTranslator /** ** URI-table translator */ - Translator>, Map> utrans = new - SkeletonBTreeMap.TreeTranslator>(null, new - ProtoIndexComponentSerialiser.TreeMapTranslator>(null)); + Translator>, Map> utrans = new + SkeletonBTreeMap.TreeTranslator>(null, new + ProtoIndexComponentSerialiser.TreeMapTranslator>(null)); private LiveArchiver, SimpleProgress> subsrl; @@ -185,17 +185,7 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) try { // FIXME yet more hacks related to the lack of proper asynchronous FreenetArchiver... ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get((Integer)map.get("serialFormatUID"), subsrl); - Object reqIDObject = map.get("reqID"); - FreenetURI reqID; - if (reqIDObject instanceof FreenetURI) { - reqID = (FreenetURI) reqIDObject; - } else { - try { - reqID = new FreenetURI((String) reqIDObject); - } catch (MalformedURLException e) { - throw new DataFormatException("Badly formatted URI", e, null); - } - } + FreenetURI reqID = (FreenetURI) map.get("reqID"); String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); @@ -208,7 +198,7 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) totalPages = (Integer)o; Date modified = (Date)map.get("modified"); Map extra = (Map)map.get("extra"); - SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); + SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); SkeletonBTreeMap> ttab = ttrans.rev((Map)map.get("ttab")); return cmpsrl.setSerialiserFor(new ProtoIndex(reqID, name, ownerName, ownerEmail, totalPages, modified, extra, utab, ttab)); diff --git a/shared/src/freenet/library/index/TermEntryReaderWriter.java b/shared/src/freenet/library/index/TermEntryReaderWriter.java index cf01b736..267b7b1e 100644 --- a/shared/src/freenet/library/index/TermEntryReaderWriter.java +++ b/shared/src/freenet/library/index/TermEntryReaderWriter.java @@ -201,7 +201,7 @@ public void writeObject(TermEntry en, DataOutputStream dos) throws IOException { return; case PAGE: TermPageEntry enn = (TermPageEntry)en; - enn.page.writeFullBinaryKeyWithLength(dos); + enn.getPage().writeFullBinaryKeyWithLength(dos); int size = enn.hasPositions() ? enn.positionsSize() : 0; if(enn.title == null) dos.writeInt(size); diff --git a/shared/src/freenet/library/index/TermPageEntry.java b/shared/src/freenet/library/index/TermPageEntry.java index 98befe87..f2ab9886 100644 --- a/shared/src/freenet/library/index/TermPageEntry.java +++ b/shared/src/freenet/library/index/TermPageEntry.java @@ -3,6 +3,7 @@ * http://www.gnu.org/ for further details of the GPL. */ package freenet.library.index; +import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -23,9 +24,23 @@ public class TermPageEntry extends TermEntry { /** - ** URI of the target + ** URI of the target. + * + * This should be a FreenetURI but there are cases where this is a String. + * Lets, be flexible here but attempt to convert them all. */ - final public FreenetURI page; + public Object page; + + public FreenetURI getPage() { + if (page instanceof String) { + try { + page = new FreenetURI((String) page); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + } + return (FreenetURI) page; + } /** * Positions where the term occurs. May be null if we don't have that data. @@ -90,12 +105,20 @@ public TermPageEntry(String s, float r, FreenetURI u, String t, Map pos, Map frags) { + public TermPageEntry(String s, float r, Object u, String t, Set pos, Map frags) { super(s, r); if (u == null) { throw new IllegalArgumentException("can't have a null page"); } - page = u.intern(); // OPT LOW make the translator use the same URI object as from the URI table? + if (u instanceof String) { + try { + page = new FreenetURI((String) u); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + } else { + page = ((FreenetURI) u).intern(); // OPT LOW make the translator use the same URI object as from the URI table? + } title = t; if(pos != null) { positions = new TreeSet(pos); diff --git a/shared/src/freenet/library/index/URIEntry.java b/shared/src/freenet/library/index/URIEntry.java index abd87f07..73162a31 100644 --- a/shared/src/freenet/library/index/URIEntry.java +++ b/shared/src/freenet/library/index/URIEntry.java @@ -7,6 +7,8 @@ import java.util.Set; import java.util.HashSet; +import freenet.library.io.FreenetURI; + /** ** Data associated with a FreenetURI. DOCUMENT expand this... ** @@ -19,7 +21,7 @@ public class URIEntry { /** ** Subject URI of this entry. */ - protected String subject; + protected FreenetURI subject; /** ** Quality rating. Must be in the closed interval [0,1]. @@ -37,17 +39,17 @@ public class URIEntry { */ protected Set terms; - public URIEntry(String u) { + public URIEntry(FreenetURI u) { subject = u; date_checked = new Date(); terms = new HashSet(); } - public String getSubject() { + public FreenetURI getSubject() { return subject; } - public void setSubject(String u) { + public void setSubject(FreenetURI u) { subject = u; } diff --git a/src/plugins/Library/search/ResultSet.java b/src/plugins/Library/search/ResultSet.java index ddd0a176..9e82950b 100644 --- a/src/plugins/Library/search/ResultSet.java +++ b/src/plugins/Library/search/ResultSet.java @@ -330,7 +330,7 @@ private void phrase(Collection... collections) { } // if this termentry has any positions remaining, add it if(positions != null && positions.size() > 0) - addInternal(new TermPageEntry(subject, termPageEntry.rel, termPageEntry.page, termPageEntry.title, positions)); + addInternal(new TermPageEntry(subject, termPageEntry.rel, termPageEntry.getPage(), termPageEntry.title, positions)); } } @@ -343,7 +343,7 @@ private TermEntry convertEntry(TermEntry termEntry, float rel) { if (termEntry instanceof TermTermEntry) entry = new TermTermEntry(subject, rel, ((TermTermEntry)termEntry).term ); else if (termEntry instanceof TermPageEntry) - entry = new TermPageEntry(subject, rel, ((TermPageEntry)termEntry).page, ((TermPageEntry)termEntry).title, ((TermPageEntry)termEntry).positionsMap() ); + entry = new TermPageEntry(subject, rel, ((TermPageEntry)termEntry).getPage(), ((TermPageEntry)termEntry).title, ((TermPageEntry)termEntry).positionsMap() ); else if (termEntry instanceof TermIndexEntry) entry = new TermIndexEntry(subject, rel, ((TermIndexEntry)termEntry).index ); else @@ -373,7 +373,7 @@ private TermEntry mergeEntries(TermEntry... entries) { if(combination instanceof TermIndexEntry){ combination = new TermIndexEntry(subject, entries[0].rel, ((TermIndexEntry)combination).index); } else if(combination instanceof TermPageEntry){ - combination = new TermPageEntry(subject, entries[0].rel, ((TermPageEntry)combination).page, ((TermPageEntry)combination).title, ((TermPageEntry)combination).positionsMap()); + combination = new TermPageEntry(subject, entries[0].rel, ((TermPageEntry)combination).getPage(), ((TermPageEntry)combination).title, ((TermPageEntry)combination).positionsMap()); } else if(combination instanceof TermTermEntry){ combination = new TermTermEntry(subject, entries[0].rel, ((TermTermEntry)combination).term); } else @@ -405,7 +405,7 @@ else if(pageentry1.hasPositions()){ if(pageentry2.hasPositions()) newPos.putAll(pageentry2.positionsMap()); } - return new TermPageEntry(pageentry1.subj, newRel, pageentry1.page, (pageentry1.title!=null)?pageentry1.title:pageentry2.title, newPos); + return new TermPageEntry(pageentry1.subj, newRel, pageentry1.getPage(), (pageentry1.title!=null)?pageentry1.title:pageentry2.title, newPos); } else if(entry1 instanceof TermIndexEntry){ TermIndexEntry castEntry = (TermIndexEntry) entry1; diff --git a/src/plugins/Library/ui/ResultNodeGenerator.java b/src/plugins/Library/ui/ResultNodeGenerator.java index 64cbfdfa..8a358ed8 100644 --- a/src/plugins/Library/ui/ResultNodeGenerator.java +++ b/src/plugins/Library/ui/ResultNodeGenerator.java @@ -109,7 +109,7 @@ private void parseResult(){ long uskEdition = Long.MIN_VALUE; // Get the key and name FreenetURI uri; - uri = pageEntry.page; + uri = pageEntry.getPage(); // convert usk's if(uri.isSSKForUSK()){ uri = uri.uskForSSK(); @@ -240,7 +240,7 @@ private void generatePageEntryNode(){ * @param newestVersion if set, the result is shown in full brightness, if unset the result is greyed out */ private HTMLNode termPageEntryNode(TermPageEntry entry,boolean newestVersion) { - FreenetURI uri = entry.page; + FreenetURI uri = entry.getPage(); String showtitle = entry.title; String showurl = uri.toString(); if (showtitle == null || showtitle.trim().length() == 0) { diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 2b660ede..945ea04e 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -55,7 +55,7 @@ public void run() { mergeToFreenet(directory); } - private String lastUploadURI; + private FreenetURI lastUploadURI; private boolean uskUploadDone; static final int MAX_HANDLING_COUNT = 5; @@ -243,7 +243,11 @@ private static boolean removeAll(File wd) { */ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { if (lastUploadURI == null) { - lastUploadURI = readStringFrom(new File(LAST_URL_FILENAME)); + try { + lastUploadURI = new FreenetURI(readStringFrom(new File(LAST_URL_FILENAME))); + } catch (MalformedURLException e) { + throw new RuntimeException("File contents of " + LAST_URL_FILENAME + " invalid.", e); + } } setupFreenetCacheDir(); @@ -296,9 +300,16 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { long mergeEndTime = System.currentTimeMillis(); System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); - String uri = (String) task4.meta; + FreenetURI uri; + if (task4.meta instanceof FreenetURI) { + uri = (FreenetURI) task4.meta; + } else if (task4.meta instanceof String){ + uri = new FreenetURI((String) task4.meta); + } else { + throw new RuntimeException("Unknown uri " + task4.meta); + } lastUploadURI = uri; - if(writeStringTo(new File(LAST_URL_FILENAME), uri)) { + if(writeStringTo(new File(LAST_URL_FILENAME), uri.toString())) { newtrees.deflate(); diskToMerge = null; terms = null; @@ -310,9 +321,10 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { uploadUSKForFreenetIndex(uri); } catch (TaskAbortException e) { - System.err.println("Failed to upload index for spider: "+e); - e.printStackTrace(); - } + throw new RuntimeException(e); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } } static String readFileLine(final String filename) { @@ -344,8 +356,7 @@ static String readFileLine(final String filename) { br.close(); } } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new RuntimeException(e); } } return line; @@ -366,33 +377,28 @@ protected void writeFileLine(String filename, String string) { bw = new BufferedWriter(new OutputStreamWriter(fos, "UTF-8")); bw.write(string); } catch (UnsupportedEncodingException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - throw new RuntimeException(); + throw new RuntimeException(e); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - throw new RuntimeException(); + throw new RuntimeException(e); } finally { try { if (bw != null) { bw.close(); } } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new RuntimeException(e); } } } - private void uploadUSKForFreenetIndex(String uri) { + private void uploadUSKForFreenetIndex(FreenetURI uri) { String insertURI = readFileLine(PRIV_URI_FILENAME); String keyPart = insertURI.substring("freenet:SSK@".length()); int lastEdition = Integer.parseInt(readFileLine(EDITION_FILENAME)); final ClientPut usk = new ClientPut("USK@" + keyPart + "/" + (lastEdition + 1), "USKupload", UploadFrom.redirect); - usk.setTargetURI(uri); + usk.setTargetURI(uri.toString()); uskUploadDone = false; FcpAdapter fcpListener = new FcpAdapter() { public void receivedPutFailed(FcpConnection fcpConnection, PutFailed result) { diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index c6e846c3..b1cea819 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -22,6 +22,7 @@ import freenet.copied.Base64; import freenet.copied.SHA256; import freenet.library.Priority; +import freenet.library.io.FreenetURI; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; @@ -90,7 +91,9 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, SimpleProgress progress) throws TaskAbortException { if (cacheDir.exists()) { String cacheKey = null; - if (task.meta instanceof String) { + if (task.meta instanceof FreenetURI) { + cacheKey = task.meta.toString(); + } else if (task.meta instanceof String) { cacheKey = (String) task.meta; } else if (task.meta instanceof byte[]) { cacheKey = Base64.encode(SHA256.digest((byte[]) task.meta)); From 448a71464db432af8b47149b20db249a4cfb5e94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 11 Jul 2017 17:23:08 +0200 Subject: [PATCH 112/180] Fixes to allow moving/linking using DownloadAll.java. --- shared/src/freenet/library/io/FreenetURI.java | 28 ++- .../freenet/library/uploader/DownloadAll.java | 226 +++++++++++------- 2 files changed, 167 insertions(+), 87 deletions(-) diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index 06bca54f..d0b1b293 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -9,6 +9,8 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URLEncoder; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import freenet.copied.Base64; @@ -36,6 +38,10 @@ public FreenetURI(String uri) throws MalformedURLException { ) { throw new MalformedURLException("Unhandled keytype: " + uri); } + if (contents.startsWith("KSK@")) { + return; + } + if (!contents.matches("^.*@(.*,.*,A.*|)$")) { throw new MalformedURLException("Cannot find cryptoKey and routingKey structure: " + uri); } @@ -146,10 +152,8 @@ public void writeFullBinaryKeyWithLength(DataOutputStream dos) throws IOExceptio } public boolean isUSK() - throws MalformedURLException { - // TODO Auto-generated method stub - throw new RuntimeException("Not implemented yet."); - // return false; + throws MalformedURLException { + return contents.startsWith("USK@"); } public FreenetURI sskForUSK() { @@ -170,9 +174,21 @@ public FreenetURI uskForSSK() { // return null; } + private static final Pattern FIND_EDITION_PATTERN = Pattern.compile("[^/]*/[^/]*/([---0-9]*)(/.*)?$"); public long getEdition() { - // TODO Auto-generated method stub - throw new RuntimeException("Not implemented yet."); + try { + if (isUSK()) { + Matcher m = FIND_EDITION_PATTERN.matcher(contents); + if (m.matches()) { + return Long.parseLong(m.group(1)); + } else { + throw new RuntimeException("Edition not found in " + contents + "."); + } + } + } catch (MalformedURLException e) { + throw new RuntimeException("Malformed key " + contents + "."); + } + throw new RuntimeException("Not an USK."); // return 0; } diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 50d733da..c83e18e9 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -15,6 +15,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.net.MalformedURLException; import java.nio.file.Files; import java.nio.file.Paths; import java.text.MessageFormat; @@ -53,6 +54,7 @@ import net.pterodactylus.fcp.SubscribedUSKUpdate; import net.pterodactylus.fcp.URIGenerated; import net.pterodactylus.fcp.Verbosity; +import freenet.library.io.FreenetURI; import freenet.library.io.YamlReaderWriter; import freenet.library.io.serial.Packer; import freenet.library.io.serial.Packer.BinInfo; @@ -67,8 +69,8 @@ public class DownloadAll { private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); public final Map stillRunning = new HashMap(); - private String uri; - private String newUri; + private FreenetURI uri; + private FreenetURI newUri; private int edition; private FcpConnection connection; private static int getterCounter = 0; @@ -94,7 +96,7 @@ public class DownloadAll { private Random rand = new Random(); private Date started = new Date(); - public DownloadAll(String u) { + public DownloadAll(FreenetURI u) { uri = u; } @@ -216,16 +218,16 @@ private static class FetchedPage { private Set parents = Collections.synchronizedSet(new WeakHashSet()); private Set children = Collections.synchronizedSet(new HashSet()); - private String uri; + private FreenetURI uri; int level; private boolean succeeded; private boolean failed; - FetchedPage(String u) { + FetchedPage(FreenetURI u) { this(u, 0); } - FetchedPage(String u, int l) { + FetchedPage(FreenetURI u, int l) { uri = u; level = l; } @@ -238,14 +240,14 @@ void addChild(FetchedPage fp) { children.add(fp); } - FetchedPage newChild(String u) { + FetchedPage newChild(FreenetURI u) { FetchedPage child = new FetchedPage(u, level + 1); child.addParent(this); addChild(child); return child; } - String getURI() { + FreenetURI getURI() { return uri; } @@ -328,7 +330,7 @@ void didSucceed() { succeeded = true; } - public FetchedPage findUri(String u) { + public FetchedPage findUri(FreenetURI u) { if (u.equals(uri)) { return this; } @@ -357,7 +359,11 @@ public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedU if (subscribedUSKUpdate.isNewKnownGood() && subscribedUSKUpdate.getEdition() > edition) { updated = true; - newUri = subscribedUSKUpdate.getURI(); + try { + newUri = new FreenetURI(subscribedUSKUpdate.getURI()); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } edition = subscribedUSKUpdate.getEdition(); synchronized (subscriber) { subscriber.notify(); @@ -424,11 +430,11 @@ private double getEstimatedPagesLeft(FetchedPage page) { private static class OngoingUpload { private final Date started = new Date(); - private final String filename; + private final FreenetURI freenetURI; private final Runnable callback; - public OngoingUpload(String fname, Runnable cback) { - filename = fname; + public OngoingUpload(FreenetURI fname, Runnable cback) { + freenetURI = fname; callback = cback; } @@ -436,8 +442,8 @@ Date getStarted() { return started; } - String getKey() { - return filename; + FreenetURI getKey() { + return freenetURI; } void complete() { @@ -490,12 +496,72 @@ void printLeft() { } } + /** + * Convert an object from the yaml to a FreenetURI. + * + * The object can be a FreenetURI already (new style) or a string. + * + * @param obj + * @return a FreenetURI + * @throws MalformedURLException + */ + private static FreenetURI getFreenetURI(Object obj) throws MalformedURLException { + FreenetURI u; + if (obj instanceof FreenetURI) { + u = (FreenetURI) obj; + } else { + u = new FreenetURI((String) obj); + logger.finest("String URI found: " + (String) obj); + } + return u; + } + interface UriProcessor { public FetchedPage getPage(); - public boolean processUri(String uri); + public boolean processUri(FreenetURI uri); } - private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriProcessor, String page_uri, int page_level) + private int processBinInfoValues(Map entries, UriProcessor uriProcessor) + throws MalformedURLException { + int foundChildren = 0; + for (BinInfo value : entries.values()) { + try { + if (uriProcessor.processUri(getFreenetURI(value.getID()))) { + foundChildren ++; + } + + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + uriProcessor.getPage().getURI(), e); + } + } + return foundChildren; + } + + private int processSubnodes(Map map, UriProcessor uriProcessor) + throws MalformedURLException { + int foundChildren = 0; + Map subnodes1 = + (Map) map.get("subnodes"); + Map subnodes; + if (subnodes1.keySet().iterator().next() instanceof FreenetURI) { + subnodes = (Map) map.get("subnodes"); + } else { + subnodes = new LinkedHashMap(); + for (Map.Entry entry : subnodes1.entrySet()) { + subnodes.put(new FreenetURI((String) entry.getKey()), entry.getValue()); + } + logger.finest("String URIs found in subnodes of: " + uriProcessor.getPage()); + } + + for (FreenetURI key : subnodes.keySet()) { + if (uriProcessor.processUri(key)) { + foundChildren ++; + } + } + return foundChildren; + } + + private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriProcessor, int page_level) throws IOException { int foundChildren = 0; try { @@ -508,30 +574,14 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro if (map2.containsKey("entries")) { Map entries = (Map) map2.get("entries"); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (uriProcessor.processUri(u)) { - foundChildren ++; - } - - } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo value " + value.getID() + - " for " + uriProcessor.getPage().getURI(), - e); - } - } + foundChildren += processBinInfoValues(entries, uriProcessor); Map subnodes = (Map) map2.get("subnodes"); logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { uriProcessor.getPage().level, subnodes.size(), }); - for (String key : subnodes.keySet()) { - if (uriProcessor.processUri(key)) { - foundChildren ++; - } - } + foundChildren += processSubnodes(map2, uriProcessor); return; } } @@ -554,17 +604,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro map.get("rkey"), entries.size() }); - for (BinInfo value : entries.values()) { - try { - String u = (String) value.getID(); - if (uriProcessor.processUri(u)) { - foundChildren ++; - } - } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo (2) " + value.getID() + - " for " + uriProcessor.getPage().getURI()); - } - } + foundChildren += processBinInfoValues(entries, uriProcessor); return; } if (map.get("entries") instanceof ArrayList) { @@ -606,12 +646,7 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro contents.getKey(), subnodes.size(), }); - - for (String key : subnodes.keySet()) { - if (uriProcessor.processUri(key)) { - foundChildren ++; - } - } + foundChildren += processSubnodes(map3, uriProcessor); } continue; } @@ -652,7 +687,7 @@ public GetAdapter(FetchedPage u) { getterCounter ++; token = "Getter" + getterCounter; waitingLaps = 0; - getter = new ClientGet(page.getURI(), token); + getter = new ClientGet(page.getURI().toString(), token); getter.setPriority(Priority.prefetch); getter.setVerbosity(Verbosity.ALL); @@ -692,7 +727,7 @@ public void hasBeenWaiting(FetchedPage key) { } - private boolean processAnUri(String uri) { + private boolean processAnUri(FreenetURI uri) { synchronized (roots) { for (FetchedPage root : roots) { FetchedPage foundChild = root.findUri(uri); @@ -731,13 +766,13 @@ public FetchedPage getPage() { } @Override - public boolean processUri(String uri) { + public boolean processUri(FreenetURI uri) { return processAnUri(uri); } }; final InputStream inputStream = ad.getPayloadInputStream(); try { - readAndProcessYamlData(inputStream, uriProcessor, page.getURI(), page.level); + readAndProcessYamlData(inputStream, uriProcessor, page.level); } catch (IOException e) { logger.log(Level.SEVERE, "Cannot unpack.", e); e.printStackTrace(); @@ -784,15 +819,15 @@ public void run() { * If we are running on a host where this CHK is actually cached, * lets upload it from the cache in an attempt to repair the index. * - * @param filename of the file to upload. + * @param freenetURI of the file to upload. * @param callback when the file is successfully uploaded. */ - public boolean upload(final String filename, final Runnable callback) { + public boolean upload(final FreenetURI freenetURI, final Runnable callback) { final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); if (!dir.canRead()) { return false; } - final File file = new File(dir, filename); + final File file = new File(dir, freenetURI.toString()); if (!file.canRead()) { logger.warning("Cannot find " + file + " in the cache."); return false; @@ -807,8 +842,18 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert c == connection; assert uriGenerated != null; String identifier = uriGenerated.getIdentifier(); - String chk = ongoingUploads.get(identifier).getKey(); - if (!uriGenerated.getURI().equals(chk)) { + FreenetURI chk = ongoingUploads.get(identifier).getKey(); + FreenetURI generatedURI; + try { + generatedURI = new FreenetURI(uriGenerated.getURI()); + } catch (MalformedURLException e) { + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + + " that is not possible to convert to an URI. Will upload anyway."); + wrongChkCounterForUpload++; + return; + } + if (!generatedURI.equals(chk)) { logger.severe("Were supposed to resurrect " + chk + " but the URI calculated to " + uriGenerated.getURI() + ". " + "Will upload anyway."); @@ -824,14 +869,23 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) assert putSuccessful != null; String identifier = putSuccessful.getIdentifier(); final OngoingUpload foundUpload = ongoingUploads.get(identifier); - String chk = foundUpload.getKey(); - if (!putSuccessful.getURI().equals(chk)) { - logger.severe("Uploaded " + putSuccessful.getURI() + - " while supposed to upload " + chk + - ". "); - } else { - foundUpload.complete(); - } + FreenetURI chk = foundUpload.getKey(); + FreenetURI generatedURI = null; + try { + generatedURI = new FreenetURI(putSuccessful.getURI()); + } catch (MalformedURLException e) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " that is not possible to convert to an URI."); + } + if (generatedURI != null) { + if (!generatedURI.equals(chk)) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " while supposed to upload " + chk + + ". "); + } else { + foundUpload.complete(); + } + } ongoingUploads.remove(identifier); synchronized (stillRunning) { stillRunning.notifyAll(); @@ -844,7 +898,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { assert putFailed != null; String identifier = putFailed.getIdentifier(); final OngoingUpload foundUpload = ongoingUploads.get(identifier); - String chk = foundUpload.getKey(); + FreenetURI chk = foundUpload.getKey(); logger.severe("Uploaded " + chk + " failed."); failedRecreated++; ongoingUploads.remove(identifier); @@ -859,10 +913,10 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { } uploadStarter.execute(new Runnable() { public void run() { - logger.fine("Ressurrecting " + filename); + logger.fine("Ressurrecting " + freenetURI.toString()); uploadCounter++; final String identifier = "Upload" + uploadCounter; - ongoingUploads.put(identifier, new OngoingUpload(filename, callback)); + ongoingUploads.put(identifier, new OngoingUpload(freenetURI, callback)); final ClientPut putter = new ClientPut("CHK@", identifier); putter.setEarlyEncode(true); putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); @@ -1102,7 +1156,7 @@ private void showProgress() { if (sb.length() > 0) { sb.append(", "); } - String edition = root.getURI().substring(uri.length()); + long edition = root.getURI().getEdition(); sb.append(edition); int succeeded = root.getTreeSizeSucceeded(); int failed = root.getTreeSizeFailed(); @@ -1147,7 +1201,7 @@ private void showProgress() { */ public void doMove() { int count = 0; - File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new"); + File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new2"); if (!toDirectory.mkdir()) { System.err.println("Could not create the directory " + toDirectory); System.exit(1); @@ -1168,8 +1222,8 @@ public void doMove() { final FetchedPage finalPage = page; FileInputStream inputStream; try { - Files.createLink(Paths.get(toDirectory.getPath(), page.uri), Paths.get(page.uri)); - inputStream = new FileInputStream(page.uri); + Files.createLink(Paths.get(toDirectory.getPath(), page.uri.toString()), Paths.get(page.uri.toString())); + inputStream = new FileInputStream(page.uri.toString()); count++; System.out.println("Read file " + count + " in " + page.uri + " level " + page.level + " left: " + objectQueue.size()); } catch (IOException e) { @@ -1186,9 +1240,9 @@ public FetchedPage getPage() { return finalPage; } - Set seen = new HashSet(); + Set seen = new HashSet(); @Override - public boolean processUri(String uri) { + public boolean processUri(FreenetURI uri) { if (seen.contains(uri)) { return false; } @@ -1197,7 +1251,7 @@ public boolean processUri(String uri) { return true; } - }, page.getURI(), page.level); + }, page.level); } catch (IOException e) { System.out.println("Cannot read file " + page.uri); e.printStackTrace(); @@ -1210,9 +1264,19 @@ public boolean processUri(String uri) { public static void main(String[] argv) { if (argv.length > 1 && argv[0].equals("--move")) { - new DownloadAll(argv[1]).doMove(); + try { + new DownloadAll(new FreenetURI(argv[1])).doMove(); + } catch (MalformedURLException e) { + e.printStackTrace(); + System.exit(2); + } } else { - new DownloadAll(argv[0]).doDownload(); + try { + new DownloadAll(new FreenetURI(argv[0])).doDownload(); + } catch (MalformedURLException e) { + e.printStackTrace(); + System.exit(2); + } } } From 1fd4803e8ed21bf3e09a3ef9df11bf5fee01dda4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 11 Jul 2017 17:41:53 +0200 Subject: [PATCH 113/180] Count the amount of URIs of different kinds. --- .../src/freenet/library/uploader/DownloadAll.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index c83e18e9..01f7f876 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -87,6 +87,8 @@ public class DownloadAll { private int successfulBlocks = 0; private long successfulBytes = 0; private int failed = 0; + private long uriUrisSeen = 0; + private long stringUrisSeen = 0; private int recreated = 0; private int failedRecreated = 0; private int avoidFetching = 0; @@ -505,13 +507,14 @@ void printLeft() { * @return a FreenetURI * @throws MalformedURLException */ - private static FreenetURI getFreenetURI(Object obj) throws MalformedURLException { + private FreenetURI getFreenetURI(Object obj) throws MalformedURLException { FreenetURI u; if (obj instanceof FreenetURI) { u = (FreenetURI) obj; + uriUrisSeen ++; } else { u = new FreenetURI((String) obj); - logger.finest("String URI found: " + (String) obj); + stringUrisSeen ++; } return u; } @@ -1133,6 +1136,10 @@ private void showProgress() { if (failedRecreated > 0) { recreatedMessage += " Recreation failed: " + failedRecreated; } + String urisSeenMessage = ""; + if (uriUrisSeen > 0 || stringUrisSeen > 0) { + urisSeenMessage = " URIUrisSeen: " + uriUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); + } String wrongChkCounterForUploadMessage = ""; if (wrongChkCounterForUpload > 0) { wrongChkCounterForUploadMessage = " WrongChkUploaded: " + wrongChkCounterForUpload; @@ -1141,6 +1148,7 @@ private void showProgress() { " blocks: " + successfulBlocks + " bytes: " + successfulBytes + " Failed: " + failed + + urisSeenMessage + recreatedMessage + wrongChkCounterForUploadMessage + " Avoided: " + avoidFetching + "."); From a6423db0844edef22583d3a367f34a260451b472 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 12 Jul 2017 22:01:12 +0200 Subject: [PATCH 114/180] Start from wherever. --- loop.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/loop.sh b/loop.sh index 34203a5c..c038d14a 100755 --- a/loop.sh +++ b/loop.sh @@ -2,8 +2,6 @@ while test -f library.continue.loop do - # tail wrapper.log - # ls -ltr library.index.* - java -jar ../projects/freenet/github/plugin-Library/dist/uploader.jar + java -jar `dirname $0`/dist/uploader.jar sleep 60 done From f9e0652c12bdc81589a923c6ef408e406f43ddad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 07:40:13 +0200 Subject: [PATCH 115/180] Progressively slower start of new fetches. --- uploader/src/freenet/library/uploader/DownloadAll.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 01f7f876..208ceaac 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1302,7 +1302,9 @@ public void waitForSlot() { startCleanupThread(); synchronized (stillRunning) { try { - stillRunning.wait(TimeUnit.SECONDS.toMillis(3)); + stillRunning.wait(TimeUnit.SECONDS.toMillis(1)); + stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(ongoingUploadsSize() * ongoingUploadsSize())); + stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(stillRunning.size() * stillRunning.size())); while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { stillRunning.wait(); } From 95b55b67979f787eaeab8ac5f08974f762edd49e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 09:14:14 +0200 Subject: [PATCH 116/180] Cleaned out old commented-out logic. --- .../src/freenet/library/uploader/Merger.java | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 1a13911a..8c587d53 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -283,14 +283,6 @@ class ProcessedFilenames implements Iterator { doProcessed = true; doNew = true; } - - private boolean addAnotherSelectedFile() { - return false; -// return nextSelected < 20 && -// creator.size() < 10000 && -// movedTerms < 200000 && -// nextSelected * 2.687 + movedTerms * 0.001097 + creator.size() * 0.0 - 1.6463 < 90; - } @Override public boolean hasNext() { @@ -298,10 +290,6 @@ public boolean hasNext() { nextSelected < selectedFilesToMerge.length) { return true; } - if (addAnotherSelectedFile() && - nextSelected < selectedFilesToMerge.length) { - return true; - } if (doAllSelected && nextSelected < selectedFilesToMerge.length) { return true; } @@ -325,10 +313,6 @@ public String next() { processingSelectedFile = true; doSelected = false; return selectedFilesToMerge[nextSelected++]; - } else if (addAnotherSelectedFile() && - nextSelected < selectedFilesToMerge.length) { - processingSelectedFile = true; - return selectedFilesToMerge[nextSelected++]; } else if (doAllSelected && nextSelected < selectedFilesToMerge.length) { return selectedFilesToMerge[nextSelected++]; } else if (doFiltered && nextFiltered < filteredFilesToMerge.length) { From 9050a4a0c0a87ee94719236ee3c8b34e684132a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 09:24:31 +0200 Subject: [PATCH 117/180] Removed not used constructor. --- uploader/src/freenet/library/uploader/IndexPeeker.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 32811977..c5d6c670 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -25,9 +25,6 @@ class IndexPeeker { new SkeletonBTreeMap>(12); IndexPeeker(File dir) { - this(dir, 1); - } - IndexPeeker(File dir, int sections) { directory = dir; String lastCHK = DirectoryUploader.readStringFrom(new File(directory, UploaderPaths.LAST_URL_FILENAME)); String rootFilename = directory + "/" + UploaderPaths.LIBRARY_CACHE + "/" + lastCHK; @@ -41,7 +38,7 @@ class IndexPeeker { } topElements = new HashSet(topTtab.keySet()); activeSections = new LinkedList(); - maxSections = sections; + maxSections = 1; } private static int compare(String a, String b) { From 756d3046aff1c6c155a8334714c99102be63ca24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 09:36:47 +0200 Subject: [PATCH 118/180] Removed the code that caused an empty directory first time. --- .../src/freenet/library/uploader/IndexPeeker.java | 12 +----------- uploader/src/freenet/library/uploader/Merger.java | 5 ----- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index c5d6c670..7e86ee9c 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -19,7 +19,6 @@ class IndexPeeker { private LinkedHashMap topTtab; private Set topElements; private List activeSections = null; - private int maxSections; private static final SkeletonBTreeMap> newtrees = new SkeletonBTreeMap>(12); @@ -38,7 +37,6 @@ class IndexPeeker { } topElements = new HashSet(topTtab.keySet()); activeSections = new LinkedList(); - maxSections = 1; } private static int compare(String a, String b) { @@ -93,18 +91,10 @@ boolean include(String subj) { return true; } } - if (activeSections.size() < maxSections) { + if (activeSections.size() < 1) { activeSections.add(new ChoosenSection(subj)); return true; } return false; } - - void roomForOne() { - maxSections = activeSections.size() + 1; - } - - void roomForNone() { - maxSections = activeSections.size(); - } } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 8c587d53..a1e05b33 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -356,11 +356,6 @@ public Iterator iterator() { while (iterator.hasNext()) { TermEntry tt = iterator.next(); totalTerms ++; - if (processedFilenames.processingSelectedFile) { - creatorPeeker.roomForOne(); - } else { - creatorPeeker.roomForNone(); - } if (creatorPeeker.include(tt.subj)) { creator.putEntry(tt); processedFilenames.movedTerms ++; From 456ac3ff5c78c6f84e2dbb5b8676277211e3b4ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 10:15:05 +0200 Subject: [PATCH 119/180] Added code to abort when a second level is added to the top term list. --- .../src/freenet/library/uploader/IndexPeeker.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 7e86ee9c..09d4f7bb 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -35,6 +35,20 @@ class IndexPeeker { e.printStackTrace(); System.exit(1); } + if (topTtab.size() < 1000) { + // So far the growth of the index and the growth of the elements + // in the top node has gone hand in hand keeping the amount of + // pages to update for each merger low. When the amount of terms + // will exceed 1500 x 2048 the B-tree index will suddenly be + // rebuilt with just two entries on top that will share all the + // terms between them. This means that this logic of splitting + // on the top level only will split into two piles instead of + // over a thousand and there is a risk that way too much will be + // included in each update. This code needs to be improved to + // handle this. + throw new IllegalArgumentException("This version of the script does not handle multi-level tree."); + } + topElements = new HashSet(topTtab.keySet()); activeSections = new LinkedList(); } From e417450e8cc5b9fe264525a59844dd8f76bb407c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 14 Jul 2017 17:46:47 +0200 Subject: [PATCH 120/180] Removed pointless print. --- uploader/src/freenet/library/uploader/DirectoryUploader.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 945ea04e..6fbacb6e 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -191,8 +191,6 @@ protected void mergeToFreenet(File diskDir) { // Ignore System.err.println("Unable to merge old data "+diskDir); return; - } else { - System.out.println("Continuing old bucket: "+f); } ProtoIndex idxDisk = null; From 2d0129061daafab3da75895e5a723a706fea5548 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 17 Jul 2017 17:26:26 +0200 Subject: [PATCH 121/180] Avoid race condition for ongoingUploads. --- .../freenet/library/uploader/DownloadAll.java | 41 ++++++++++++++----- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 208ceaac..e0dc8b4d 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -480,12 +480,14 @@ void printLeft() { String ongoingUploadsMessage = ""; if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { Date oldest = null; - for (Map.Entry entry : ongoingUploads.entrySet()) { - if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { - oldest = entry.getValue().getStarted(); - } + synchronized (ongoingUploads) { + for (Map.Entry entry : ongoingUploads.entrySet()) { + if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { + oldest = entry.getValue().getStarted(); + } + } } - ongoingUploadsMessage = " and " + ongoingUploads.size() + " uploads"; + ongoingUploadsMessage = " and " + ongoingUploadsSize() + " uploads"; if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); } @@ -845,7 +847,10 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert c == connection; assert uriGenerated != null; String identifier = uriGenerated.getIdentifier(); - FreenetURI chk = ongoingUploads.get(identifier).getKey(); + FreenetURI chk; + synchronized (ongoingUploads) { + chk = ongoingUploads.get(identifier).getKey(); + } FreenetURI generatedURI; try { generatedURI = new FreenetURI(uriGenerated.getURI()); @@ -871,7 +876,11 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) assert c == connection; assert putSuccessful != null; String identifier = putSuccessful.getIdentifier(); - final OngoingUpload foundUpload = ongoingUploads.get(identifier); + OngoingUpload ongoingUpload; + synchronized (ongoingUploads) { + ongoingUpload = ongoingUploads.get(identifier); + } + final OngoingUpload foundUpload = ongoingUpload; FreenetURI chk = foundUpload.getKey(); FreenetURI generatedURI = null; try { @@ -889,7 +898,9 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) foundUpload.complete(); } } - ongoingUploads.remove(identifier); + synchronized (ongoingUploads) { + ongoingUploads.remove(identifier); + } synchronized (stillRunning) { stillRunning.notifyAll(); } @@ -900,11 +911,17 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { assert c == connection; assert putFailed != null; String identifier = putFailed.getIdentifier(); - final OngoingUpload foundUpload = ongoingUploads.get(identifier); + OngoingUpload ongoingUpload; + synchronized (ongoingUploads) { + ongoingUpload = ongoingUploads.get(identifier); + } + final OngoingUpload foundUpload = ongoingUpload; FreenetURI chk = foundUpload.getKey(); logger.severe("Uploaded " + chk + " failed."); failedRecreated++; - ongoingUploads.remove(identifier); + synchronized (ongoingUploads) { + ongoingUploads.remove(identifier); + } synchronized (stillRunning) { stillRunning.notifyAll(); } @@ -919,7 +936,9 @@ public void run() { logger.fine("Ressurrecting " + freenetURI.toString()); uploadCounter++; final String identifier = "Upload" + uploadCounter; - ongoingUploads.put(identifier, new OngoingUpload(freenetURI, callback)); + synchronized (ongoingUploads) { + ongoingUploads.put(identifier, new OngoingUpload(freenetURI, callback)); + } final ClientPut putter = new ClientPut("CHK@", identifier); putter.setEarlyEncode(true); putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); From c8bfad68df7bbe3f220340a49490e68f548cd256 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 18 Jul 2017 09:38:16 +0200 Subject: [PATCH 122/180] Put the cast back in the parsing. This will probably not change anything. --- shared/src/freenet/library/io/YamlReaderWriter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared/src/freenet/library/io/YamlReaderWriter.java b/shared/src/freenet/library/io/YamlReaderWriter.java index 4b183c4f..b70a10a6 100644 --- a/shared/src/freenet/library/io/YamlReaderWriter.java +++ b/shared/src/freenet/library/io/YamlReaderWriter.java @@ -120,7 +120,7 @@ public static class ExtendedRepresenter extends Representer { public ExtendedRepresenter() { this.representers.put(FreenetURI.class, new Represent() { /*@Override**/ public Node representData(Object data) { - return representScalar("!FreenetURI", data.toString()); + return representScalar("!FreenetURI", ((FreenetURI) data).toString()); } }); this.representers.put(Packer.BinInfo.class, new Represent() { From 156d16563c2215a6cd600b0e914a544cfb2de23c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 18 Jul 2017 10:20:42 +0200 Subject: [PATCH 123/180] Simplified the logic when calculating next URI to download. --- .../freenet/library/uploader/DownloadAll.java | 75 ++++++++----------- 1 file changed, 33 insertions(+), 42 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index e0dc8b4d..283f99e4 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1078,49 +1078,40 @@ public void doDownload() { lastRoot = roots.get(roots.size() - 1); } - if (!empty) { - try { - FetchedPage taken = objectQueue.take(); - while (!taken.hasParent()) { - logger.finer("Avoid fetching " + taken.getURI()); - taken = null; - avoidFetching++; - if (objectQueue.isEmpty()) { - break; - } - taken = objectQueue.take(); - } - // Randomize the order by rotating the queue - int maxLaps = objectQueue.size(); - if (maxLaps == 0) { - maxLaps = 1; - } - int toRotate = rand.nextInt(maxLaps); - int rotated = 0; - assert taken.level > 0; - for (int i = 0; i < toRotate; i += taken.hasParent(lastRoot) ? taken.level * taken.level * taken.level : 1) { - objectQueue.offer(taken); - taken = objectQueue.take(); - while (!taken.hasParent()) { - taken = null; - avoidFetching++; - if (objectQueue.isEmpty()) { - break; - } - taken = objectQueue.take(); - assert taken.level > 0; - } - rotated++; - } - logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); - if (taken == null) { - break; - } - new GetAdapter(taken); - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(1); + // Randomize the order by rotating the queue + int maxLaps = objectQueue.size(); + if (maxLaps == 0) { + maxLaps = 1; + } + int toRotate = rand.nextInt(maxLaps); + int rotated = 0; + int counted = 0; + + while (!objectQueue.isEmpty()) { + FetchedPage taken; + try { + taken = objectQueue.take(); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + continue; + } + if (!taken.hasParent()) { + logger.finer("Avoid fetching " + taken.getURI()); + taken = null; + avoidFetching++; + continue; + } + + counted += taken.level * taken.level * taken.level; + if (counted < toRotate) { + rotated++; + objectQueue.offer(taken); + continue; } + logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); + new GetAdapter(taken); + break; } subscriberListener.restart(); empty = objectQueue.isEmpty(); From 1d5b1787e90a21a00b90a20859daaceee87212d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 23 Jul 2017 07:24:11 +0200 Subject: [PATCH 124/180] Made Comparable and Serializable. --- shared/src/freenet/library/io/FreenetURI.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index d0b1b293..f8a05eda 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -7,6 +7,7 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.io.Serializable; import java.net.MalformedURLException; import java.net.URLEncoder; import java.util.regex.Matcher; @@ -19,7 +20,12 @@ * * It has part of the interface in the same way but it is simpler and local to the Library. */ -public class FreenetURI { +public class FreenetURI implements Cloneable, Serializable { + /** + * For Serializable. + */ + private static transient final long serialVersionUID = 1L; + private String contents; public FreenetURI(String uri) throws MalformedURLException { From 88186a67bd2cd14bcf1edd9eb15c645a116dafec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 24 Jul 2017 08:57:56 +0200 Subject: [PATCH 125/180] Abort if upload fails. Show times. --- .../freenet/library/uploader/FcpArchiver.java | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index b1cea819..bee97f08 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -127,6 +127,8 @@ public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, " in the cache."); } + private static long lastUriMillis = 0; + private class PushAdapter extends FcpAdapter { private ClientPut putter; private String identifier; @@ -135,6 +137,7 @@ private class PushAdapter extends FcpAdapter { private int progressTotal; private int progressCompleted; private boolean done; + private long started; public PushAdapter(ClientPut p, String i, String t) { putter = p; @@ -147,6 +150,7 @@ public PushAdapter(ClientPut p, String i, String t) { stillRunning.put(token, this); printLeft(); } + started = System.currentTimeMillis(); } /** @@ -165,14 +169,18 @@ void printLeft() { "(" + completed + "/" + total + ")"); } } - + + private String at() { + return " took " + (System.currentTimeMillis() - started) + "ms"; + } + @Override public void receivedPutSuccessful(FcpConnection c, PutSuccessful ps) { assert c == connection; assert ps != null; if (!identifier.equals(ps.getIdentifier())) return; - System.out.println("receivedPutSuccessful for " + token); + System.out.println("receivedPutSuccessful for " + token + at()); markDone(); } @@ -182,7 +190,7 @@ public void receivedPutFetchable(FcpConnection c, PutFetchable pf) { assert pf != null; if (!identifier.equals(pf.getIdentifier())) return; - System.out.println("receivedPutFetchable for " + token); + System.out.println("receivedPutFetchable for " + token + at()); synchronized (this) { this.notifyAll(); } @@ -198,8 +206,9 @@ public void receivedPutFailed(FcpConnection c, PutFailed pf) { synchronized (putter) { putter.notify(); } - System.out.println("receivedPutFailed for " + token); + System.err.println("receivedPutFailed for " + token + at() + " aborting."); markDone(); + System.exit(1); } @Override @@ -224,11 +233,16 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { assert uriGenerated != null; if (!identifier.equals(uriGenerated.getIdentifier())) return; - System.out.println("receivedURIGenerated for " + token); + String sinceTime = ""; + if (lastUriMillis != 0) { + sinceTime = " (" + (System.currentTimeMillis() - lastUriMillis) + "ms since last URI)"; + } + System.out.println("receivedURIGenerated for " + token + at() + sinceTime); uri = uriGenerated.getURI(); synchronized (this) { this.notifyAll(); } + lastUriMillis = System.currentTimeMillis(); } private void markDone() { @@ -269,7 +283,12 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, SimpleProgress progress) throws TaskAbortException { // Slow down the build up of the queue. try { - Thread.sleep(1 + totalBlocksStillUploading * totalBlocksStillUploading); + int stillRunningSize; + synchronized (stillRunning) { + stillRunningSize = stillRunning.size(); + } + final int uploading = totalBlocksStillUploading + stillRunningSize; + Thread.sleep(1 + uploading * uploading); } catch (InterruptedException e1) { throw new RuntimeException("Unexpected interrupt"); } From d3cf20e18e8a92af342778af70e287f4acbc36ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 24 Jul 2017 13:20:33 +0200 Subject: [PATCH 126/180] Use FreenetURI objects within the trees. Also added tests of the ProtoIndexSerialser. Buggfix for DownloadAll. --- .../index/ProtoIndexSerialiserTest.java | 305 ++++++++++++++++++ .../freenet/library/uploader/DownloadAll.java | 2 +- .../freenet/library/uploader/FcpArchiver.java | 16 +- 3 files changed, 318 insertions(+), 5 deletions(-) create mode 100644 shared/test/freenet/library/index/ProtoIndexSerialiserTest.java diff --git a/shared/test/freenet/library/index/ProtoIndexSerialiserTest.java b/shared/test/freenet/library/index/ProtoIndexSerialiserTest.java new file mode 100644 index 00000000..0a4e96c3 --- /dev/null +++ b/shared/test/freenet/library/index/ProtoIndexSerialiserTest.java @@ -0,0 +1,305 @@ +package freenet.library.index; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.util.HashMap; +import java.util.Map; + +import freenet.library.index.TermEntry.EntryType; +import freenet.library.io.FreenetURI; +import freenet.library.io.ObjectStreamReader; +import freenet.library.io.ObjectStreamWriter; +import freenet.library.io.serial.FileArchiver; +import freenet.library.io.serial.LiveArchiver; +import freenet.library.io.serial.Serialiser.PullTask; +import freenet.library.io.serial.Serialiser.PushTask; +import freenet.library.io.serial.Translator; +import freenet.library.ArchiverFactory; +import freenet.library.FactoryRegister; +import freenet.library.Priority; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.SkeletonBTreeSet; +import freenet.library.util.exec.SimpleProgress; +import freenet.library.util.exec.TaskAbortException; + +import junit.framework.TestCase; + +public class ProtoIndexSerialiserTest extends TestCase { + private MockLiveArchiver mockLiveArchiver; + private MockArchiverFactory mockArchiverFactory; + private ProtoIndexSerialiser tested_object; + private ProtoIndex mockProtoIndex; + + /** + * For pull, the meta is a String containing the contents of the stream. + */ + static class MockLiveArchiver implements LiveArchiver, SimpleProgress> { + ObjectStreamReader> reader; + ObjectStreamWriter> writer; + MockLiveArchiver(ObjectStreamReader> r, + ObjectStreamWriter> w) { + reader = r; + writer = w; + } + + byte[] bytesToParse; + String createdOutput; + int archiverResultNumber = 1; + + @Override + public void pull( + freenet.library.io.serial.Serialiser.PullTask> task) + throws TaskAbortException { + assertNotNull(bytesToParse); + InputStream is = new ByteArrayInputStream(bytesToParse); + try { + task.data = reader.readObject(is); + } catch (IOException e) { + throw new TaskAbortException("byte array unparseable", e); + } + } + + @Override + public void push( + freenet.library.io.serial.Serialiser.PushTask> task) + throws TaskAbortException { + ByteArrayOutputStream os = new ByteArrayOutputStream(); + try { + writer.writeObject(task.data, os); + } catch (IOException e) { + throw new TaskAbortException("Could not write", e); + } + createdOutput = os.toString(); + } + + @Override + public void pullLive( + freenet.library.io.serial.Serialiser.PullTask> task, + SimpleProgress p) throws TaskAbortException { + fail("Not yet implemented"); // TODO + } + + @Override + public void pushLive( + freenet.library.io.serial.Serialiser.PushTask> task, + SimpleProgress p) throws TaskAbortException { + if (p != null) { + p.addPartKnown(1, true); + } + push(task); + try { + task.meta = new FreenetURI("CHK@" + (archiverResultNumber++) + ",7,A6"); + } catch (MalformedURLException e) { + throw new TaskAbortException("URL problem", e); + } + if (p != null) { + p.addPartDone(); + } + } + + @Override + public void waitForAsyncInserts() throws TaskAbortException { + // Do nothing + } + } + + class MockArchiverFactory implements ArchiverFactory { + + @Override + public LiveArchiver newArchiver( + S rw, String mime, int size, Priority priorityLevel) { + assertNotNull(rw); + assertEquals(ProtoIndexComponentSerialiser.yamlrw, rw); + assertNotNull(mime); + assertNotSame(0, size); + assertEquals(Priority.Bulk, priorityLevel); + return (LiveArchiver) new MockLiveArchiver(rw, rw); + } + + @Override + public LiveArchiver newArchiver( + S rw, String mime, int size, + LiveArchiver archiver) { + fail("Not called by the tests."); + return null; + } + } + + protected void setUp() throws Exception { + super.setUp(); + mockArchiverFactory = new MockArchiverFactory(); + FactoryRegister.register(mockArchiverFactory); + mockLiveArchiver = new MockLiveArchiver( + ProtoIndexComponentSerialiser.yamlrw, + ProtoIndexComponentSerialiser.yamlrw); + + tested_object = new ProtoIndexSerialiser(mockLiveArchiver); + + FreenetURI reqID = null; + mockProtoIndex = new ProtoIndex(reqID, "name", "owner", "email", 0); + } + + protected void tearDown() throws Exception { + super.tearDown(); + } + + public void testProtoIndexSerialiser() { + assertNotNull(tested_object); + } + + private void assertProtoIndexSerialiserURI(ProtoIndexSerialiser result) { + assertNotNull(result); + assertNotNull(result.getChildSerialiser()); + assertTrue(result.getChildSerialiser() instanceof MockLiveArchiver); + } + + public void testForIndexURIAsObjectPriority() throws MalformedURLException { + FreenetURI uri = new FreenetURI("CHK@"); + Object obj = uri; + Priority priority = Priority.Bulk; + + ProtoIndexSerialiser result = ProtoIndexSerialiser.forIndex(obj, priority); + + assertProtoIndexSerialiserURI(result); + } + + private void assertProtoIndexSerialiserFile(ProtoIndexSerialiser result) { + assertNotNull(result); + assertNotNull(result.getChildSerialiser()); + assertTrue(result.getChildSerialiser() instanceof FileArchiver); + } + + public void testForIndexFileAsObjectPriority() { + File file = new File("file"); + Object obj = file; + Priority priority = Priority.Bulk; + + ProtoIndexSerialiser result = ProtoIndexSerialiser.forIndex(obj, priority); + + assertProtoIndexSerialiserFile(result); + } + + public void testForIndexUnmatchedObjectPriority() throws MalformedURLException { + Object obj = new Object(); + Priority priority = Priority.Bulk; + + try { + ProtoIndexSerialiser.forIndex(obj, priority); + fail("Should have thrown."); + } catch (UnsupportedOperationException e) { + // OK. + } + } + + public void testForIndexFreenetURIPriority() throws MalformedURLException { + FreenetURI uri = new FreenetURI("CHK@"); + Priority priority = Priority.Bulk; + + ProtoIndexSerialiser result = ProtoIndexSerialiser.forIndex(uri, priority); + + assertProtoIndexSerialiserURI(result); + } + + public void testForIndexFile() { + final File prefix = new File("prefix"); + ProtoIndexSerialiser result = ProtoIndexSerialiser.forIndex(prefix); + + assertProtoIndexSerialiserFile(result); + } + + public void testGetChildSerialiser() { + final LiveArchiver, SimpleProgress> result = tested_object.getChildSerialiser(); + + assertNotNull(result); + } + + public void testGetTranslator() { + final Translator> translator = tested_object.getTranslator(); + assertNotNull(translator); + + translator.app(mockProtoIndex); + } + + public void testPull() throws TaskAbortException, MalformedURLException { + final FreenetURI req_id = new FreenetURI("CHK@"); + PullTask task = new PullTask(req_id); + final String name = "New Spider index."; + final long totalPages = 17; + mockLiveArchiver.bytesToParse = ( + "serialVersionUID: " + ProtoIndex.serialVersionUID + "\n" + + "serialFormatUID: " + ProtoIndexComponentSerialiser.FMT_DEFAULT + "\n" + + "totalPages: " + totalPages + "\n" + + "name: " + name + "\n" + + "utab:\n" + + " node_min: 1024\n" + + " size: 0\n" + + " entries: {}\n" + + "ttab:\n" + + " node_min: 1024\n" + + " size: 2470\n" + + " entries:\n" + + " adam: !BinInfo {? &id001 !!binary \"abcdef==\" : 1}\n" + + " subnodes:\n" + + " !FreenetURI 'CHK@123,456,A789': 1234\n" + + " !FreenetURI 'CHK@456,678,A890': 1235\n" + + "").getBytes(); + task.data = mockProtoIndex; + + tested_object.pull(task); + + assertEquals(req_id, task.data.reqID); + assertEquals(name, task.data.name); + assertEquals(totalPages, task.data.totalPages); + assertEquals(new SkeletonBTreeMap>(1024), + task.data.utab); + SkeletonBTreeMap> x = task.data.ttab; + } + + public void testPushEmpty() throws TaskAbortException { + PushTask task = new PushTask(mockProtoIndex); + + tested_object.push(task); + + assertTrue(mockLiveArchiver.createdOutput.contains("serialVersionUID: " + ProtoIndex.serialVersionUID)); + final String emptyBTree = "\n node_min: 1024\n size: 0\n entries: {}\n"; + assertTrue(mockLiveArchiver.createdOutput.contains("\nutab:" + emptyBTree)); + assertTrue(mockLiveArchiver.createdOutput.contains("\nttab:" + emptyBTree)); + } + + public void testPushContents() throws TaskAbortException, MalformedURLException { + ProtoIndexSerialiser srl = ProtoIndexSerialiser.forIndex(new FreenetURI("CHK@"), Priority.Bulk); + LiveArchiver,SimpleProgress> archiver = + (LiveArchiver,SimpleProgress>)(srl.getChildSerialiser()); + ProtoIndexComponentSerialiser leafsrl = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_DEFAULT, archiver); + + PushTask task = new PushTask(mockProtoIndex); + + final int ENTRIES = 10000; + for (int i = 0; i < ENTRIES; i++) { + final SkeletonBTreeSet value = new SkeletonBTreeSet(100); + value.add(new TermPageEntry("a", 1, new FreenetURI("CHK@1,2,A3"), "title", null)); + leafsrl.setSerialiserFor(value); + value.deflate(); + + mockProtoIndex.ttab.put("a" + i, value); + } + + leafsrl.setSerialiserFor(mockProtoIndex); + mockProtoIndex.ttab.deflate(); + + mockLiveArchiver.waitForAsyncInserts(); + + tested_object.push(task); + + assertTrue(mockLiveArchiver.createdOutput.contains("serialVersionUID: " + ProtoIndex.serialVersionUID)); + final String emptyBTree = "\n node_min: 1024\n size: 0\n entries: {}\n"; + assertTrue(mockLiveArchiver.createdOutput.contains("\nutab:" + emptyBTree)); + final String countBTreeProlog = "\n node_min: 1024\n size: " + ENTRIES + "\n entries:\n"; + assertTrue(mockLiveArchiver.createdOutput.contains("\nttab:" + countBTreeProlog)); + } +} diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 283f99e4..07ce740f 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -553,7 +553,7 @@ private int processSubnodes(Map map, UriProcessor uriProcessor) } else { subnodes = new LinkedHashMap(); for (Map.Entry entry : subnodes1.entrySet()) { - subnodes.put(new FreenetURI((String) entry.getKey()), entry.getValue()); + subnodes.put(getFreenetURI(entry.getKey()), entry.getValue()); } logger.finest("String URIs found in subnodes of: " + uriProcessor.getPage()); } diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index bee97f08..c5ea31ba 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -6,6 +6,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.net.MalformedURLException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -133,7 +134,7 @@ private class PushAdapter extends FcpAdapter { private ClientPut putter; private String identifier; private String token; - private String uri; + private FreenetURI uri; private int progressTotal; private int progressCompleted; private boolean done; @@ -238,7 +239,14 @@ public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { sinceTime = " (" + (System.currentTimeMillis() - lastUriMillis) + "ms since last URI)"; } System.out.println("receivedURIGenerated for " + token + at() + sinceTime); - uri = uriGenerated.getURI(); + try { + uri = new FreenetURI(uriGenerated.getURI()); + } catch (MalformedURLException e) { + System.err.println("receivedURIGenerated failed with URI: " + uriGenerated.getURI() + + " for " + token + at() + " aborting."); + markDone(); + System.exit(1); + } synchronized (this) { this.notifyAll(); } @@ -270,7 +278,7 @@ boolean isDone() { return done; } - String getURI() { + FreenetURI getURI() { return uri; } }; @@ -360,7 +368,7 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, task.meta = putterListener.getURI(); // Moving file. - file.renameTo(new File(cacheDir, putterListener.getURI())); + file.renameTo(new File(cacheDir, putterListener.getURI().toString())); startCleanupThread(); } From 325b4547fd48f940b5a3955e0ce497e186129cc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 24 Jul 2017 13:41:32 +0200 Subject: [PATCH 127/180] Changed to logging of String URIs to show problems. --- uploader/src/freenet/library/uploader/DownloadAll.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 07ce740f..be139071 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1148,7 +1148,8 @@ private void showProgress() { } String urisSeenMessage = ""; if (uriUrisSeen > 0 || stringUrisSeen > 0) { - urisSeenMessage = " URIUrisSeen: " + uriUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); + urisSeenMessage = " StringUrisSeen: " + stringUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); + urisSeenMessage += new Formatter().format(" (%.1f%%)", 100.0 * stringUrisSeen / (uriUrisSeen + stringUrisSeen)); } String wrongChkCounterForUploadMessage = ""; if (wrongChkCounterForUpload > 0) { From 493cc5c6b6f3d28cbbd76bc601bdcf8e0fca0b75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 26 Jul 2017 11:24:33 +0200 Subject: [PATCH 128/180] Retry indefinately, if failed. --- loop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loop.sh b/loop.sh index c038d14a..c8b87772 100755 --- a/loop.sh +++ b/loop.sh @@ -1,4 +1,4 @@ -#!/bin/sh -ex +#!/bin/sh -x while test -f library.continue.loop do From 6adf32951859f22eb30926909ff12a7ca98cc43a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 27 Jul 2017 09:40:11 +0200 Subject: [PATCH 129/180] Fixed cast problem when processing subnodes. The problem occured when having a set of keys mixing FreenetURI and String. --- .../freenet/library/uploader/DownloadAll.java | 47 ++++++++----------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index be139071..6a670cc2 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -545,21 +545,10 @@ private int processBinInfoValues(Map entries, UriProcessor uriP private int processSubnodes(Map map, UriProcessor uriProcessor) throws MalformedURLException { int foundChildren = 0; - Map subnodes1 = + Map subnodes = (Map) map.get("subnodes"); - Map subnodes; - if (subnodes1.keySet().iterator().next() instanceof FreenetURI) { - subnodes = (Map) map.get("subnodes"); - } else { - subnodes = new LinkedHashMap(); - for (Map.Entry entry : subnodes1.entrySet()) { - subnodes.put(getFreenetURI(entry.getKey()), entry.getValue()); - } - logger.finest("String URIs found in subnodes of: " + uriProcessor.getPage()); - } - - for (FreenetURI key : subnodes.keySet()) { - if (uriProcessor.processUri(key)) { + for (Object key : subnodes.keySet()) { + if (uriProcessor.processUri(getFreenetURI(key))) { foundChildren ++; } } @@ -580,12 +569,14 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro Map entries = (Map) map2.get("entries"); foundChildren += processBinInfoValues(entries, uriProcessor); - Map subnodes = - (Map) map2.get("subnodes"); - logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { - uriProcessor.getPage().level, - subnodes.size(), - }); + if (logger.isLoggable(Level.FINER)) { + Map subnodes = + (Map) map2.get("subnodes"); + logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { + uriProcessor.getPage().level, + subnodes.size(), + }); + } foundChildren += processSubnodes(map2, uriProcessor); return; } @@ -644,13 +635,15 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro last = contents.getKey(); Map map3 = (Map) contents.getValue(); if (map3.containsKey("subnodes")) { - Map subnodes = - (Map) map3.get("subnodes"); - logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { - uriProcessor.getPage().level, - contents.getKey(), - subnodes.size(), - }); + if (logger.isLoggable(Level.FINER)) { + Map subnodes = + (Map) map3.get("subnodes"); + logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { + uriProcessor.getPage().level, + contents.getKey(), + subnodes.size(), + }); + } foundChildren += processSubnodes(map3, uriProcessor); } continue; From 91cd50f7442bebce3e33346bdd6a64635bd79fdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 28 Jul 2017 07:30:54 +0200 Subject: [PATCH 130/180] Reduce waiting time factor based on download jobs. Also enforced the waiting time based on upload jobs better. --- uploader/src/freenet/library/uploader/DownloadAll.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 6a670cc2..3f674cfa 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -1307,8 +1307,10 @@ public void waitForSlot() { synchronized (stillRunning) { try { stillRunning.wait(TimeUnit.SECONDS.toMillis(1)); - stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(ongoingUploadsSize() * ongoingUploadsSize())); - stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(stillRunning.size() * stillRunning.size())); + for (int i = 0; i < ongoingUploadsSize(); i++) { + stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(ongoingUploadsSize())); + } + stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(stillRunning.size())); while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { stillRunning.wait(); } From 0836c56512b1d9ab4dc45f7d7c4fb50ce8ebc91b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 28 Jul 2017 15:35:50 +0200 Subject: [PATCH 131/180] Use the library implementation of the FreenetURI also in the plugin. The shared libraries require the library implementation, To solve this, the not used implementation of uploading things using the fred implementation of the FreenetURI was removed instead of fixed. --- .../library/index/ProtoIndexSerialiser.java | 3 +- shared/src/freenet/library/io/FreenetURI.java | 48 ++- .../freenet/library/io/FreenetURITest.java | 13 + src/plugins/Library/Library.java | 12 +- .../Library/client/FreenetArchiver.java | 342 ++---------------- .../Library/ui/ResultNodeGenerator.java | 2 +- 6 files changed, 90 insertions(+), 330 deletions(-) create mode 100644 shared/test/freenet/library/io/FreenetURITest.java diff --git a/shared/src/freenet/library/index/ProtoIndexSerialiser.java b/shared/src/freenet/library/index/ProtoIndexSerialiser.java index 54a0be40..08a77d60 100644 --- a/shared/src/freenet/library/index/ProtoIndexSerialiser.java +++ b/shared/src/freenet/library/index/ProtoIndexSerialiser.java @@ -65,7 +65,8 @@ public static ProtoIndexSerialiser forIndex(Object o, Priority priorityLevel) { } else if (o instanceof File) { return forIndex((File)o); } else { - throw new UnsupportedOperationException("Don't know how to retrieve index for object " + o); + throw new UnsupportedOperationException("Don't know how to retrieve index for object " + o + + " (of type " + o.getClass().getName() + ")"); } } diff --git a/shared/src/freenet/library/io/FreenetURI.java b/shared/src/freenet/library/io/FreenetURI.java index f8a05eda..55246ff7 100644 --- a/shared/src/freenet/library/io/FreenetURI.java +++ b/shared/src/freenet/library/io/FreenetURI.java @@ -53,6 +53,10 @@ public FreenetURI(String uri) throws MalformedURLException { } } + public FreenetURI(boolean chk) { + contents = "CHK@"; + } + static final byte CHK = 1; static final byte SSK = 2; static final byte KSK = 3; @@ -60,6 +64,8 @@ public FreenetURI(String uri) throws MalformedURLException { static final short ClientCHK_EXTRA_LENGTH = 5; static final short ClientSSK_EXTRA_LENGTH = 5; + public static final FreenetURI EMPTY_CHK_URI = new FreenetURI(true); + /** * This method can read the traditional BinaryKey-coded data, from Spider * while mostly reading the simpler UTF-string key encoded with length 0. @@ -168,25 +174,31 @@ public FreenetURI sskForUSK() { // return null; } + private static final Pattern SSK_FOR_USK_PATTERN = Pattern.compile("^SSK@([^/]*/[^-/]*)-([0-9]*)(/.*)?$"); public boolean isSSKForUSK() { - // TODO Auto-generated method stub - throw new RuntimeException("Not implemented yet."); - // return false; + Matcher m = SSK_FOR_USK_PATTERN.matcher(contents); + return m.matches(); } public FreenetURI uskForSSK() { - // TODO Auto-generated method stub - throw new RuntimeException("Not implemented yet."); - // return null; + Matcher m = SSK_FOR_USK_PATTERN.matcher(contents); + if (m.matches()) { + try { + return new FreenetURI("USK@" + m.group(1) + "/" + m.group(2) + m.group(3)); + } catch (MalformedURLException e) { + // FALLTHRU + } + } + throw new RuntimeException("Cannot convert " + contents + " to USK."); } - private static final Pattern FIND_EDITION_PATTERN = Pattern.compile("[^/]*/[^/]*/([---0-9]*)(/.*)?$"); + private static final Pattern USK_PATTERN = Pattern.compile("^([^/]*)/([^/]*)/([---0-9]*)(/.*)?$"); public long getEdition() { try { if (isUSK()) { - Matcher m = FIND_EDITION_PATTERN.matcher(contents); + Matcher m = USK_PATTERN.matcher(contents); if (m.matches()) { - return Long.parseLong(m.group(1)); + return Long.parseLong(m.group(3)); } else { throw new RuntimeException("Edition not found in " + contents + "."); } @@ -194,10 +206,26 @@ public long getEdition() { } catch (MalformedURLException e) { throw new RuntimeException("Malformed key " + contents + "."); } - throw new RuntimeException("Not an USK."); + throw new RuntimeException("Cannot find edition in assumed USK: " + contents); // return 0; } + private static final Pattern ROOT_PATTERN = Pattern.compile("^([^/]*)((/[^/]*).*)?$"); + /** + * Calculate the root of the key. I.e. the key and the document name. + * + * The key could be of any kind except an SSK that is really an USK. + * + * @return A String. + */ + public String getRoot() { + Matcher m = ROOT_PATTERN.matcher(contents); + if (m.matches()) { + return m.group(1) + m.group(3); + } + throw new RuntimeException("Cannot determine root: " + contents); + } + public FreenetURI setMetaString(Object object) { // TODO Auto-generated method stub throw new RuntimeException("Not implemented yet."); diff --git a/shared/test/freenet/library/io/FreenetURITest.java b/shared/test/freenet/library/io/FreenetURITest.java new file mode 100644 index 00000000..a4586ca3 --- /dev/null +++ b/shared/test/freenet/library/io/FreenetURITest.java @@ -0,0 +1,13 @@ +package freenet.library.io; + +import java.net.MalformedURLException; + +import junit.framework.TestCase; + +public class FreenetURITest extends TestCase { + public void testGetUSKRoot() throws MalformedURLException { + FreenetURI to = new FreenetURI("USK@aa,bb,Acc/file/12345/meta"); + + assertEquals("USK@aa,bb,Acc/file", to.getRoot()); + } +} diff --git a/src/plugins/Library/Library.java b/src/plugins/Library/Library.java index 382926ec..8009eb44 100644 --- a/src/plugins/Library/Library.java +++ b/src/plugins/Library/Library.java @@ -364,7 +364,7 @@ public Object getAddressTypeFromString(String indexuri) { // return KeyExplorerUtils.sanitizeURI(new ArrayList(), indexuri); KEYEXPLORER // OPT HIGH if it already ends with eg. *Index.DEFAULT_FILE, don't strip // the MetaString, and have getIndexType behave accordingly - FreenetURI tempURI = new FreenetURI(indexuri); + freenet.library.io.FreenetURI tempURI = new freenet.library.io.FreenetURI(indexuri); return tempURI; } catch (MalformedURLException e) { File file = new File(indexuri); @@ -562,9 +562,9 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali try { if (indexkey instanceof File) { indextype = getIndexType((File)indexkey); - } else if (indexkey instanceof FreenetURI) { + } else if (indexkey instanceof freenet.library.io.FreenetURI) { // TODO HIGH make this non-blocking - FreenetURI uri = (FreenetURI)indexkey; + FreenetURI uri = new FreenetURI(indexkey.toString()); if(uri.isUSK()) edition = uri.getEdition(); indextype = getIndexType(uri); @@ -589,11 +589,17 @@ public final Index getIndex(String indexuri, String origIndexName) throws Invali return index; + } catch (MalformedURLException e) { + Logger.warning(this, "Failed to find index type", e); + throw new TaskAbortException("Failed to find index type " + indexuri+" : "+e, e, true); } catch (FetchException e) { + Logger.warning(this, "Failed to find fetch index", e); throw new TaskAbortException("Failed to fetch index " + indexuri+" : "+e, e, true); } catch (UnsupportedOperationException e) { + Logger.warning(this, "Failed to find parse index", e); throw new TaskAbortException("Failed to parse index " + indexuri+" : "+e, e); } catch (RuntimeException e) { + Logger.warning(this, "Failed to find load index", e); throw new TaskAbortException("Failed to load index " + indexuri+" : "+e, e); } } diff --git a/src/plugins/Library/client/FreenetArchiver.java b/src/plugins/Library/client/FreenetArchiver.java index 91124f43..8a496c13 100644 --- a/src/plugins/Library/client/FreenetArchiver.java +++ b/src/plugins/Library/client/FreenetArchiver.java @@ -6,33 +6,20 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.net.MalformedURLException; import java.util.ArrayList; -import java.util.HashSet; - -import plugins.Library.Library; - -import freenet.client.ClientMetadata; import freenet.client.FetchException; import freenet.client.FetchException.FetchExceptionMode; import freenet.client.FetchResult; import freenet.client.HighLevelSimpleClient; -import freenet.client.InsertBlock; -import freenet.client.InsertContext; import freenet.client.InsertException; -import freenet.client.async.BaseClientPutter; import freenet.client.async.ClientContext; -import freenet.client.async.ClientPutCallback; -import freenet.client.async.ClientPutter; -import freenet.client.async.PersistenceDisabledException; import freenet.client.events.ClientEvent; import freenet.client.events.ClientEventListener; import freenet.client.events.SplitfileProgressEvent; import freenet.crypt.SHA256; -import freenet.keys.CHKBlock; -import freenet.keys.FreenetURI; +import freenet.library.io.FreenetURI; import freenet.library.io.ObjectStreamReader; import freenet.library.io.ObjectStreamWriter; import freenet.library.io.serial.LiveArchiver; @@ -40,18 +27,13 @@ import freenet.library.util.exec.SimpleProgress; import freenet.library.util.exec.TaskAbortException; import freenet.node.NodeClientCore; -import freenet.node.RequestClient; import freenet.node.RequestStarter; import freenet.support.Base64; import freenet.support.Logger; import freenet.support.SimpleReadOnlyArrayBucket; -import freenet.support.SizeUtil; import freenet.support.api.Bucket; -import freenet.support.api.RandomAccessBucket; -import freenet.support.io.BucketTools; import freenet.support.io.Closer; import freenet.support.io.FileBucket; -import freenet.support.io.ResumeFailedException; /** ** Converts between a map of {@link String} to {@link Object}, and a freenet @@ -92,10 +74,6 @@ public class FreenetArchiver * long time. */ static final boolean SEMI_ASYNC_PUSH = true; - private final HashSet semiAsyncPushes = new HashSet(); - private final ArrayList pushesFailed = new ArrayList(); - private long totalBytesPushing; - public static void setCacheDir(File dir) { cacheDir = dir; } @@ -120,7 +98,16 @@ public FreenetArchiver(NodeClientCore c, ObjectStreamReader r, ObjectStreamWrite default_mime = mime; expected_bytes = size; } - + + private freenet.keys.FreenetURI toFreenetURI(FreenetURI u) { + try { + return new freenet.keys.FreenetURI(u.toString()); + } catch (MalformedURLException e) { + Logger.error(this, "Failed to create URI", e); + throw new RuntimeException("Failed to complete task: ", e); + } + } + public FreenetArchiver(NodeClientCore c, S rw, String mime, int size, short priority) { this(c, rw, rw, mime, size, priority); } @@ -159,7 +146,7 @@ public FreenetArchiver(NodeC if (task.meta instanceof FreenetURI) { u = (FreenetURI) task.meta; initialMetadata = null; - cacheKey = u.toString(false, true); + cacheKey = u.toString(); } else { initialMetadata = (byte[]) task.meta; u = FreenetURI.EMPTY_CHK_URI; @@ -210,7 +197,7 @@ public FreenetArchiver(NodeC if(initialMetadata != null) res = hlsc.fetchFromMetadata(new SimpleReadOnlyArrayBucket(initialMetadata)); else - res = hlsc.fetch(u); + res = hlsc.fetch(toFreenetURI(u)); ProgressParts prog_new; if (progress != null) { @@ -238,8 +225,12 @@ public FreenetArchiver(NodeC } catch (FetchException e) { if(e.mode == FetchExceptionMode.PERMANENT_REDIRECT && e.newURI != null) { - u = e.newURI; - continue; + try { + u = new FreenetURI(e.newURI.toString()); + continue; + } catch (MalformedURLException e1) { + System.out.println("Cannot convert " + e.newURI + "."); + } } System.out.println("FetchException:"); e.printStackTrace(); @@ -268,281 +259,12 @@ public FreenetArchiver(NodeC } } - /** - ** {@inheritDoc} - ** - ** This implementation produces metdata of type {@link FreenetURI}. - ** - ** If the input metadata is an insert URI (SSK or USK), it will be replaced - ** by its corresponding request URI. Otherwise, the data will be inserted - ** as a CHK. Note that since {@link FreenetURI} is immutable, the {@link - ** FreenetURI#suggestedEdition} of a USK is '''not''' automatically - ** incremented. - */ - /*@Override**/ public void pushLive(PushTask task, final SimpleProgress progress) throws TaskAbortException { - HighLevelSimpleClient hlsc = core.makeClient(priorityClass, false, false); - RandomAccessBucket tempB = null; OutputStream os = null; - - - try { - ClientPutter putter = null; - PushCallback cb = null; - try { - tempB = core.tempBucketFactory.makeBucket(expected_bytes, 2); - os = tempB.getOutputStream(); - writer.writeObject(task.data, os); - os.close(); os = null; - tempB.setReadOnly(); - - boolean insertAsMetadata; - FreenetURI target; - - if(task.meta instanceof FreenetURI) { - insertAsMetadata = false; - target = (FreenetURI) task.meta; - } else { - insertAsMetadata = true; - target = FreenetURI.EMPTY_CHK_URI; - } - InsertBlock ib = new InsertBlock(tempB, new ClientMetadata(default_mime), target); - long startTime = System.currentTimeMillis(); - - // bookkeeping. detects bugs in the SplitfileProgressEvent handler - ProgressParts prog_old = null; - if(progress != null) - prog_old = progress.getParts(); - - // FIXME make retry count configgable by client metadata somehow - // unlimited for push/merge - InsertContext ctx = hlsc.getInsertContext(false); - ctx.maxInsertRetries = -1; - // Early encode is normally a security risk. - // Hopefully it isn't here. - ctx.earlyEncode = true; - - String cacheKey = null; - -// if(!SEMI_ASYNC_PUSH) { -// // Actually report progress. -// if (progress != null) { -// hlsc.addEventHook(new SimpleProgressUpdater(progress)); -// } -// uri = hlsc.insert(ib, false, null, priorityClass, ctx); -// if (progress != null) -// progress.addPartKnown(0, true); -// } else { - // Do NOT report progress. Pretend we are done as soon as - // we have the URI. This allows us to minimise memory usage - // without yet splitting up IterableSerialiser.push() and - // doing it properly. FIXME - if(progress != null) - progress.addPartKnown(1, true); - cb = new PushCallback(progress, ib); - putter = new ClientPutter(cb, ib.getData(), FreenetURI.EMPTY_CHK_URI, ib.clientMetadata, - ctx, priorityClass, - false, null, false, core.clientContext, null, insertAsMetadata ? CHKBlock.DATA_LENGTH : -1); - cb.setPutter(putter); - long tStart = System.currentTimeMillis(); - try { - core.clientContext.start(putter); - } catch (PersistenceDisabledException e) { - // Impossible - } - WAIT_STATUS status = cb.waitFor(); - if(status == WAIT_STATUS.FAILED) { - cb.throwError(); - } else if(status == WAIT_STATUS.GENERATED_URI) { - FreenetURI uri = cb.getURI(); - task.meta = uri; - cacheKey = uri.toString(false, true); - System.out.println("Got URI for asynchronous insert: "+uri+" size "+tempB.size()+" in "+(System.currentTimeMillis() - cb.startTime)); - } else { - Bucket data = cb.getGeneratedMetadata(); - byte[] buf = BucketTools.toByteArray(data); - data.free(); - task.meta = buf; - cacheKey = Base64.encode(SHA256.digest(buf)); - System.out.println("Got generated metadata ("+buf.length+" bytes) for asynchronous insert size "+tempB.size()+" in "+(System.currentTimeMillis() - cb.startTime)); - } - if(progress != null) - progress.addPartDone(); -// } - - if(progress != null) { - ProgressParts prog_new = progress.getParts(); - if (prog_old.known - prog_old.done != prog_new.known - prog_new.done) { - Logger.error(this, "Inconsistency when tracking split file progress (pushing): "+prog_old.known+" of "+prog_old.done+" -> "+prog_new.known+" of "+prog_new.done); - System.err.println("Inconsistency when tracking split file progress (pushing): "+prog_old.known+" of "+prog_old.done+" -> "+prog_new.known+" of "+prog_new.done); - } - } - - task.data = null; - - if(cacheKey != null && cacheDir != null && cacheDir.exists() && cacheDir.canRead()) { - File cached = new File(cacheDir, cacheKey); - Bucket cachedBucket = new FileBucket(cached, false, false, false, false); - BucketTools.copy(tempB, cachedBucket); - } - - if(SEMI_ASYNC_PUSH) - tempB = null; // Don't free it here. - - } catch (InsertException e) { - if(cb != null) { - synchronized(this) { - if(semiAsyncPushes.remove(cb)) - totalBytesPushing -= cb.size(); - } - } - throw new TaskAbortException("Failed to insert content", e, true); - - } catch (IOException e) { - throw new TaskAbortException("Failed to write content to local tempbucket", e, true); - - } catch (RuntimeException e) { - throw new TaskAbortException("Failed to complete task: ", e); - - } - } catch (TaskAbortException e) { - if (progress != null) { progress.abort(e); } - throw e; - - } finally { - Closer.close(os); - Closer.close(tempB); - } - } - enum WAIT_STATUS { FAILED, GENERATED_URI, GENERATED_METADATA; } - - public class PushCallback implements ClientPutCallback { - - public final long startTime = System.currentTimeMillis(); - private ClientPutter putter; - private FreenetURI generatedURI; - private Bucket generatedMetadata; - private InsertException failed; - // See FIXME's in push(), IterableSerialiser. - // We don't do real progress, we pretend we're done when push() returns. -// private final SimpleProgress progress; - private final long size; - private final InsertBlock ib; - - public PushCallback(SimpleProgress progress, InsertBlock ib) { -// this.progress = progress; - this.ib = ib; - size = ib.getData().size(); - } - - public long size() { - return size; - } - - public synchronized void setPutter(ClientPutter put) { - putter = put; - synchronized(FreenetArchiver.this) { - if(semiAsyncPushes.add(this)) - totalBytesPushing += size; - System.out.println("Added insert of " + size + " bytes, now pushing: " + - semiAsyncPushes.size() + - " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); - } - } - - public synchronized WAIT_STATUS waitFor() { - while(generatedURI == null && generatedMetadata == null && failed == null) { - try { - wait(); - } catch (InterruptedException e) { - // Ignore - } - } - if(failed != null) return WAIT_STATUS.FAILED; - if(generatedURI != null) return WAIT_STATUS.GENERATED_URI; - return WAIT_STATUS.GENERATED_METADATA; - } - - public synchronized void throwError() throws InsertException { - if(failed != null) throw failed; - } - - public synchronized FreenetURI getURI() { - return generatedURI; - } - - public synchronized Bucket getGeneratedMetadata() { - return generatedMetadata; - } - - @Override - public void onFailure(InsertException e, BaseClientPutter state) { - System.out.println("Failed background insert (" + generatedURI + "), now pushing: " + - semiAsyncPushes.size() + - " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); - synchronized(this) { - failed = e; - notifyAll(); - } - synchronized(FreenetArchiver.this) { - if(semiAsyncPushes.remove(this)) - totalBytesPushing -= size; - pushesFailed.add(e); - FreenetArchiver.this.notifyAll(); - } - if(ib != null) - ib.free(); - } - - @Override - public void onFetchable(BaseClientPutter state) { - // Ignore - } - - @Override - public synchronized void onGeneratedURI(FreenetURI uri, BaseClientPutter state) { - generatedURI = uri; - notifyAll(); - } - - @Override - public void onSuccess(BaseClientPutter state) { - synchronized(FreenetArchiver.this) { - if(semiAsyncPushes.remove(this)) - totalBytesPushing -= size; - System.out.println("Completed background insert (" + generatedURI + ") in " + - (System.currentTimeMillis()-startTime) + "ms, now pushing: " + - semiAsyncPushes.size() + - " (" + SizeUtil.formatSize(totalBytesPushing) + ")."); - FreenetArchiver.this.notifyAll(); - } - if(ib != null) - ib.free(); - - } - - @Override - public synchronized void onGeneratedMetadata(Bucket metadata, - BaseClientPutter state) { - generatedMetadata = metadata; - notifyAll(); - } - - @Override - public void onResume(ClientContext context) throws ResumeFailedException { - // Ignore. - } - - @Override - public RequestClient getRequestClient() { - return Library.REQUEST_CLIENT; - } - - } @@ -599,24 +321,14 @@ public SimpleProgressUpdater(SimpleProgress prog) { } } - public void waitForAsyncInserts() throws TaskAbortException { - synchronized(this) { - while(true) { - if(!pushesFailed.isEmpty()) { - throw new TaskAbortException("Failed to insert content", pushesFailed.remove(0), true); - } - if(semiAsyncPushes.isEmpty()) { - System.out.println("Asynchronous inserts completed."); - return; // Completed all pushes. - } + @Override + public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, + SimpleProgress p) throws TaskAbortException { + throw new RuntimeException("Not implemented."); + } - try { - wait(); - } catch (InterruptedException e) { - // Ignore - } - } - } + @Override + public void waitForAsyncInserts() throws TaskAbortException { + throw new RuntimeException("Not implemented."); } - } diff --git a/src/plugins/Library/ui/ResultNodeGenerator.java b/src/plugins/Library/ui/ResultNodeGenerator.java index 8a358ed8..c5425250 100644 --- a/src/plugins/Library/ui/ResultNodeGenerator.java +++ b/src/plugins/Library/ui/ResultNodeGenerator.java @@ -117,7 +117,7 @@ private void parseResult(){ uskEdition = uri.getEdition(); } // Get the site base name, key + documentname - uskversion - sitebase = uri.setMetaString(null).setSuggestedEdition(0).toString().replaceFirst("/0", ""); + sitebase = uri.getRoot(); Logger.minor(this, sitebase); // Add site From 58f6141e1245bf43e8a3b10e80879687430f349f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 20 Aug 2017 14:28:06 +0200 Subject: [PATCH 132/180] Avoid creating USK for every new index entry. Only create if it has gone a certain amount of time since the last USK was created. --- .../library/uploader/DirectoryUploader.java | 33 +++++++++++++++++-- .../src/freenet/library/uploader/Merger.java | 2 +- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 6fbacb6e..1865bb55 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -11,11 +11,13 @@ import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; +import java.util.Date; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.TimeUnit; import freenet.library.Priority; import freenet.library.index.ProtoIndex; @@ -45,10 +47,12 @@ class DirectoryUploader implements Runnable { FcpConnection connection; File directory; + boolean forceCreateUSK; - DirectoryUploader(FcpConnection c, File d) { + DirectoryUploader(FcpConnection c, File d, boolean fcu) { connection = c; directory = d; + forceCreateUSK = fcu; } public void run() { @@ -96,6 +100,15 @@ public void run() { * demand. SCALABILITY */ static final int MAX_DISK_ENTRY_SIZE = 10000; + /** Time in hours without creating a new USK for the + * index. Creating the USK is in fact publishing the new version + * of the index. While not creating a new USK, the index could be + * updated with new CHKs several times without publishing. This is + * to avoid too many USKs created (saving time for the creation + * and for the clients). + */ + private static final int MAX_TIME_WITHOUT_NEW_USK = 8; + static final String DISK_DIR_PREFIX = "library-temp-index-"; /** Directory the current idxDisk is saved in. */ File idxDiskDir; @@ -233,6 +246,20 @@ private static boolean removeAll(File wd) { return true; } + /** + * Create a new USK automatically if the old one is older than a + * specific time. + */ + private static boolean createUSK() { + File editionFile = new File(EDITION_FILENAME); + long fileChanged = editionFile.lastModified(); + if (new Date().getTime() - fileChanged > + TimeUnit.HOURS.toMillis(MAX_TIME_WITHOUT_NEW_USK)) { + return true; + } + return false; + } + private final Object inflateSync = new Object(); /** Merge from an on-disk index to an on-Freenet index. @@ -316,7 +343,9 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } // Create the USK to redirect to the CHK at the top of the index. - uploadUSKForFreenetIndex(uri); + if (forceCreateUSK || createUSK()) { + uploadUSKForFreenetIndex(uri); + } } catch (TaskAbortException e) { throw new RuntimeException(e); diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index a1e05b33..3a7e6254 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -182,7 +182,7 @@ public boolean accept(File arg0, String arg1) { FactoryRegister.register(UploaderLibrary.getInstance()); File directoryToMerge = new File(directory, dirsToMerge[0]); - new DirectoryUploader(connection, directoryToMerge).run(); + new DirectoryUploader(connection, directoryToMerge, false).run(); System.out.println("Upload completed."); return; } From ba6a0776298d8feb371333b29a721ca28475f3c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 27 Aug 2017 08:44:34 +0200 Subject: [PATCH 133/180] Enable tests for uploader. --- build.xml | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/build.xml b/build.xml index dbc7fb16..8442f30c 100644 --- a/build.xml +++ b/build.xml @@ -183,6 +183,18 @@ + + + + + + + + + + + @@ -193,7 +205,6 @@ - @@ -219,6 +230,25 @@ + + + + + + + + + + + + + + + + + + From c3734a01c6db75ef4a23f1ababde373afc954110 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 27 Aug 2017 09:51:19 +0200 Subject: [PATCH 134/180] Drop terms that are parts of keys and torrent references. --- .../src/freenet/library/index/TermEntry.java | 20 ++++++++++++++++ .../freenet/library/index/TermEntryTest.java | 24 +++++++++++++++++++ .../src/freenet/library/uploader/Merger.java | 4 ++++ 3 files changed, 48 insertions(+) diff --git a/shared/src/freenet/library/index/TermEntry.java b/shared/src/freenet/library/index/TermEntry.java index 0a9ef220..366e8bcd 100644 --- a/shared/src/freenet/library/index/TermEntry.java +++ b/shared/src/freenet/library/index/TermEntry.java @@ -80,6 +80,26 @@ public TermEntry(TermEntry t, float newRel) { return a.compareTo(b); } + /** + * Is this term a part of a key. + * + * Spider creates a lot of terms like this. It looks like whenever + * it finds a key in the text of a page it creates terms for the parts + * of the key. For now, we would like to avoid changing spider and + * remove them here instead. This also drops torrent references. + * + * @return true if likely part of a key. + */ + public boolean toBeDropped() { + if (subj.matches(".*[0-9][^0-9][^0-9][^0-9]*[0-9].*")) { + return true; + } + if (subj.matches(".*[0-9][^0-9][^0-9]*[0-9][0-9]*[^0-9][^0-9]*[0-9].*")) { + return true; + } + return false; + } + /** ** {@inheritDoc} ** diff --git a/shared/test/freenet/library/index/TermEntryTest.java b/shared/test/freenet/library/index/TermEntryTest.java index eb3a2ed9..8ee0bdfe 100644 --- a/shared/test/freenet/library/index/TermEntryTest.java +++ b/shared/test/freenet/library/index/TermEntryTest.java @@ -111,4 +111,28 @@ public static void assertEqualButNotIdentical(Object a, Object b) { assertTrue(a + " and " + b + " not same hashCode.", a.hashCode() == b.hashCode()); } + private TermEntry TE(String s) { + return new TermEntry(s, 0) { + @Override + public boolean equalsTarget(TermEntry entry) { + return false; + } + + @Override + public EntryType entryType() { + return null; + } + }; + } + + public void testToBeDropped() { + assertFalse(TE("").toBeDropped()); + assertFalse(TE("1h1").toBeDropped()); + assertTrue(TE("1hh1").toBeDropped()); + assertFalse(TE("r2d2").toBeDropped()); + assertFalse(TE("c3po").toBeDropped()); + assertTrue(TE("a1b2c3d4e5").toBeDropped()); + assertFalse(TE("conventional").toBeDropped()); + assertTrue(TE("abcdef12345fedcba54321aabbee").toBeDropped()); + } } diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 3a7e6254..87a26bc2 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -355,6 +355,10 @@ public Iterator iterator() { Iterator iterator = teri.iterator(); while (iterator.hasNext()) { TermEntry tt = iterator.next(); + if (tt.toBeDropped()) { + System.out.println("Ignoring term " + tt); + continue; + } totalTerms ++; if (creatorPeeker.include(tt.subj)) { creator.putEntry(tt); From 02cb7546c761a96eab420a10d52e7642837d190f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 3 Sep 2017 07:10:01 +0200 Subject: [PATCH 135/180] Report the current edition to Spider. --- src/plugins/Library/SpiderIndexURIs.java | 41 ++++++++++-------------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/src/plugins/Library/SpiderIndexURIs.java b/src/plugins/Library/SpiderIndexURIs.java index 16487ca0..82c9cab9 100644 --- a/src/plugins/Library/SpiderIndexURIs.java +++ b/src/plugins/Library/SpiderIndexURIs.java @@ -24,11 +24,6 @@ class SpiderIndexURIs { this.pr = pr; } - synchronized long setEdition(long newEdition) { - if(newEdition < edition) return edition; - else return edition = newEdition; - } - synchronized FreenetURI loadSSKURIs() { if(privURI == null) { File f = new File(SpiderIndexUploader.PRIV_URI_FILENAME); @@ -84,29 +79,12 @@ synchronized FreenetURI loadSSKURIs() { } finally { Closer.close(fos); } - try { - fis = new FileInputStream(new File(SpiderIndexUploader.EDITION_FILENAME)); - BufferedReader br = new BufferedReader(new InputStreamReader(fis, "UTF-8")); - try { - edition = Long.parseLong(br.readLine()); - } catch (NumberFormatException e) { - edition = 0; - } - System.out.println("Edition: "+edition); - fis.close(); - fis = null; - } catch (IOException e) { - // Ignore - edition = 0; - } finally { - Closer.close(fis); - } } return privURI; } synchronized FreenetURI getPrivateUSK() { - return loadSSKURIs().setKeyType("USK").setDocName(SpiderIndexUploader.INDEX_DOCNAME).setSuggestedEdition(edition); + return loadSSKURIs().setKeyType("USK").setDocName(SpiderIndexUploader.INDEX_DOCNAME).setSuggestedEdition(getLastUploadedEdition()); } /** Will return edition -1 if no successful uploads so far, otherwise the correct edition. */ @@ -116,7 +94,22 @@ synchronized FreenetURI getPublicUSK() { } private synchronized long getLastUploadedEdition() { - /** If none uploaded, return -1, otherwise return the last uploaded version. */ + FileInputStream fis = null; + try { + fis = new FileInputStream(new File(SpiderIndexUploader.EDITION_FILENAME)); + BufferedReader br = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + try { + edition = Long.parseLong(br.readLine()); + } catch (NumberFormatException e) { + Logger.error(this, "Failed to parse edition", e); + } + fis.close(); + fis = null; + } catch (IOException e) { + Logger.error(this, "Failed to read edition", e); + } finally { + Closer.close(fis); + } return edition; } From 1bcd91d4a11237ed8fd13991ae405f9c40f02c38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 9 Sep 2017 16:23:55 +0200 Subject: [PATCH 136/180] Removed duplicate logging. --- .../src/freenet/library/uploader/DownloadAll.java | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 3f674cfa..35892483 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -621,10 +621,6 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro if (map2.containsKey("node_min") && map2.containsKey("size") && map2.containsKey("entries")) { - logger.log(Level.FINER, "Starts with entry for {1} (level {0}). Searching for subnodes.", new Object[] { - uriProcessor.getPage().level, - entry.getKey(), - }); String first = null; String last = null; for (Entry contents : map.entrySet()) { @@ -635,15 +631,6 @@ private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriPro last = contents.getKey(); Map map3 = (Map) contents.getValue(); if (map3.containsKey("subnodes")) { - if (logger.isLoggable(Level.FINER)) { - Map subnodes = - (Map) map3.get("subnodes"); - logger.log(Level.FINER, "Entry for {1} (level {0}) contains {2} subnodes.", new Object[] { - uriProcessor.getPage().level, - contents.getKey(), - subnodes.size(), - }); - } foundChildren += processSubnodes(map3, uriProcessor); } continue; From ae9dc1fe7b537b00186d5282372416f3ea0786f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 9 Sep 2017 16:42:04 +0200 Subject: [PATCH 137/180] Removed duplicate logging. --- uploader/src/freenet/library/uploader/DownloadAll.java | 1 - 1 file changed, 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 35892483..15d0f966 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -913,7 +913,6 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { } uploadStarter.execute(new Runnable() { public void run() { - logger.fine("Ressurrecting " + freenetURI.toString()); uploadCounter++; final String identifier = "Upload" + uploadCounter; synchronized (ongoingUploads) { From 6acc6b6066461ca2ace36e8569206075d770834c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 15 Oct 2017 18:24:44 +0200 Subject: [PATCH 138/180] Queue pages to be uploaded. This means that the downloads can be done quicker. --- .../freenet/library/uploader/DownloadAll.java | 41 +++++++++++++++---- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 15d0f966..b3d3ae61 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -64,6 +64,7 @@ */ public class DownloadAll { private static final int PARALLEL_JOBS = 10; + private static final int PARALLEL_UPLOADS = 1; /** Logger. */ private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); @@ -92,6 +93,7 @@ public class DownloadAll { private int recreated = 0; private int failedRecreated = 0; private int avoidFetching = 0; + private int avoidRecreate = 0; private int wrongChkCounterForUpload = 0; private int maxObjectQueueSize = 0; @@ -790,7 +792,7 @@ public void receivedGetFailed(FcpConnection c, GetFailed gf) { markDone(); failed ++; showProgress(); - upload(page.getURI(), new Runnable() { + upload(page, new Runnable() { public void run() { objectQueue.offer(page); recreated ++; @@ -801,18 +803,18 @@ public void run() { /** * We have detected that we cannot download a certain CHK. * - * If we are running on a host where this CHK is actually cached, - * lets upload it from the cache in an attempt to repair the index. + * If this CHK is actually cached, lets upload it from + * the cache in an attempt to repair the index. * - * @param freenetURI of the file to upload. + * @param page the URI to upload. * @param callback when the file is successfully uploaded. */ - public boolean upload(final FreenetURI freenetURI, final Runnable callback) { + public boolean upload(final FetchedPage page, final Runnable callback) { final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); if (!dir.canRead()) { return false; } - final File file = new File(dir, freenetURI.toString()); + final File file = new File(dir, page.getURI().toString()); if (!file.canRead()) { logger.warning("Cannot find " + file + " in the cache."); return false; @@ -880,6 +882,7 @@ public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) } synchronized (ongoingUploads) { ongoingUploads.remove(identifier); + ongoingUploads.notifyAll(); } synchronized (stillRunning) { stillRunning.notifyAll(); @@ -901,6 +904,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { failedRecreated++; synchronized (ongoingUploads) { ongoingUploads.remove(identifier); + ongoingUploads.notifyAll(); } synchronized (stillRunning) { stillRunning.notifyAll(); @@ -913,10 +917,15 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { } uploadStarter.execute(new Runnable() { public void run() { + if (!page.hasParent()) { + avoidRecreate++; + return; + } uploadCounter++; final String identifier = "Upload" + uploadCounter; synchronized (ongoingUploads) { - ongoingUploads.put(identifier, new OngoingUpload(freenetURI, callback)); + ongoingUploads.put(identifier, new OngoingUpload(page.getURI(), callback)); + ongoingUploads.notifyAll(); } final ClientPut putter = new ClientPut("CHK@", identifier); putter.setEarlyEncode(true); @@ -935,6 +944,18 @@ public void run() { e.printStackTrace(); logger.warning("Upload failed for " + file); } + while (true) { + synchronized (ongoingUploads) { + if (ongoingUploads.size() < PARALLEL_UPLOADS) { + break; + } + try { + ongoingUploads.wait(TimeUnit.SECONDS.toMillis(3)); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for upload slot terminated."); + } + } + } } }); return true; @@ -1125,6 +1146,9 @@ private void showProgress() { if (failedRecreated > 0) { recreatedMessage += " Recreation failed: " + failedRecreated; } + if (avoidRecreate > 0) { + recreatedMessage += " Recreation avoided: " + avoidRecreate; + } String urisSeenMessage = ""; if (uriUrisSeen > 0 || stringUrisSeen > 0) { urisSeenMessage = " StringUrisSeen: " + stringUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); @@ -1296,9 +1320,8 @@ public void waitForSlot() { for (int i = 0; i < ongoingUploadsSize(); i++) { stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(ongoingUploadsSize())); } - stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(stillRunning.size())); while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { - stillRunning.wait(); + stillRunning.wait(1 + TimeUnit.MINUTES.toMillis(2)); } } catch (InterruptedException e) { e.printStackTrace(); From c6b13fb44207195545dc70cd0f2df652745b1549 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 11 Nov 2017 12:26:05 +0100 Subject: [PATCH 139/180] Handle the case when there are no filtered or processed files. --- uploader/src/freenet/library/uploader/Merger.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index 87a26bc2..a2758b0e 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -378,7 +378,8 @@ public Iterator iterator() { } if (found) { continue; - } else if (writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { + } else if (writers.size() < 10 || + writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { lastSelected ++; String selectedFilename = SELECTED + lastSelected; IndexPeeker p = new IndexPeeker(directory); From 6688518034244f40e066b57ec06014fb9b3dca1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 15 Nov 2017 23:52:41 +0100 Subject: [PATCH 140/180] Tuned the upload speed to work with less intensity. Both the creation of uploads and the creation of USKs are slowed. --- .../src/freenet/library/uploader/DirectoryUploader.java | 8 +++++--- uploader/src/freenet/library/uploader/FcpArchiver.java | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 1865bb55..436f9cba 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -10,11 +10,13 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; +import java.lang.Math; import java.net.MalformedURLException; import java.util.Date; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; +import java.util.Random; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.TimeUnit; @@ -100,14 +102,14 @@ public void run() { * demand. SCALABILITY */ static final int MAX_DISK_ENTRY_SIZE = 10000; - /** Time in hours without creating a new USK for the + /** Max time without creating a new USK for the * index. Creating the USK is in fact publishing the new version * of the index. While not creating a new USK, the index could be * updated with new CHKs several times without publishing. This is * to avoid too many USKs created (saving time for the creation * and for the clients). */ - private static final int MAX_TIME_WITHOUT_NEW_USK = 8; + private static final int MAX_DAYS_WITHOUT_NEW_USK = 8; static final String DISK_DIR_PREFIX = "library-temp-index-"; /** Directory the current idxDisk is saved in. */ @@ -254,7 +256,7 @@ private static boolean createUSK() { File editionFile = new File(EDITION_FILENAME); long fileChanged = editionFile.lastModified(); if (new Date().getTime() - fileChanged > - TimeUnit.HOURS.toMillis(MAX_TIME_WITHOUT_NEW_USK)) { + Math.abs(new Random().nextLong()) % TimeUnit.DAYS.toMillis(MAX_DAYS_WITHOUT_NEW_USK)) { return true; } return false; diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index c5ea31ba..f907e4fd 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -296,7 +296,7 @@ public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, stillRunningSize = stillRunning.size(); } final int uploading = totalBlocksStillUploading + stillRunningSize; - Thread.sleep(1 + uploading * uploading); + Thread.sleep(1 + 10 * uploading * uploading); } catch (InterruptedException e1) { throw new RuntimeException("Unexpected interrupt"); } From 84fdc1e09e6e0f7c4c42ee5e687e376472a0b48f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 21 Nov 2017 20:33:30 +0100 Subject: [PATCH 141/180] Simplified the adjustments of the download speed. --- .../src/freenet/library/uploader/DownloadAll.java | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index b3d3ae61..1d26e918 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -64,7 +64,7 @@ */ public class DownloadAll { private static final int PARALLEL_JOBS = 10; - private static final int PARALLEL_UPLOADS = 1; + private static final int PARALLEL_UPLOADS = 3; /** Logger. */ private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); @@ -93,6 +93,7 @@ public class DownloadAll { private int recreated = 0; private int failedRecreated = 0; private int avoidFetching = 0; + private int uploadsStarted = 0; private int avoidRecreate = 0; private int wrongChkCounterForUpload = 0; private int maxObjectQueueSize = 0; @@ -915,6 +916,7 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { }); ongoingUploads = new HashMap(); } + uploadsStarted++; uploadStarter.execute(new Runnable() { public void run() { if (!page.hasParent()) { @@ -1003,6 +1005,10 @@ boolean isDone() { } }; + private int uploadsWaiting() { + return uploadsStarted - uploadCounter - avoidRecreate; + } + private void ageRunning() { final HashSet> stillRunningCopy; synchronized (stillRunning) { @@ -1316,11 +1322,10 @@ public void waitForSlot() { startCleanupThread(); synchronized (stillRunning) { try { - stillRunning.wait(TimeUnit.SECONDS.toMillis(1)); - for (int i = 0; i < ongoingUploadsSize(); i++) { - stillRunning.wait(1 + TimeUnit.SECONDS.toMillis(ongoingUploadsSize())); + for (int i = 0; i < uploadsWaiting() + ongoingUploadsSize() + stillRunning.size(); i++) { + stillRunning.wait(TimeUnit.SECONDS.toMillis(1 + uploadsWaiting() + uploadsWaiting())); } - while (stillRunning.size() + ongoingUploadsSize() * ongoingUploadsSize() >= PARALLEL_JOBS) { + while (stillRunning.size() >= PARALLEL_JOBS) { stillRunning.wait(1 + TimeUnit.MINUTES.toMillis(2)); } } catch (InterruptedException e) { From b65c04af79b521a007de5890a37a820f2605e14a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 25 Nov 2017 11:56:48 +0100 Subject: [PATCH 142/180] Made the distribution of new USK versions more even. --- .../library/uploader/DirectoryUploader.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 436f9cba..f55a9d6d 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -253,13 +253,14 @@ private static boolean removeAll(File wd) { * specific time. */ private static boolean createUSK() { - File editionFile = new File(EDITION_FILENAME); - long fileChanged = editionFile.lastModified(); - if (new Date().getTime() - fileChanged > - Math.abs(new Random().nextLong()) % TimeUnit.DAYS.toMillis(MAX_DAYS_WITHOUT_NEW_USK)) { - return true; - } - return false; + File editionFile = new File(EDITION_FILENAME); + long fileChanged = editionFile.lastModified(); + if (Long.valueOf(new Date().getTime() - fileChanged).doubleValue() / + Long.valueOf(TimeUnit.DAYS.toMillis(MAX_DAYS_WITHOUT_NEW_USK)).doubleValue() + > new Random(fileChanged).nextDouble()) { + return true; + } + return false; } private final Object inflateSync = new Object(); From a4d29148bfa8fabab6e4bcee2a34b7fefd1d4a61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 18 Dec 2017 08:22:58 +0100 Subject: [PATCH 143/180] Refactored the DownloadAll to prepare for a one-shot. --- .../library/uploader/AdHocDataReader.java | 184 +++ .../freenet/library/uploader/DownloadAll.java | 1353 +---------------- .../uploader/DownloadAllPerpetually.java | 1249 +++++++++++++++ 3 files changed, 1435 insertions(+), 1351 deletions(-) create mode 100644 uploader/src/freenet/library/uploader/AdHocDataReader.java create mode 100644 uploader/src/freenet/library/uploader/DownloadAllPerpetually.java diff --git a/uploader/src/freenet/library/uploader/AdHocDataReader.java b/uploader/src/freenet/library/uploader/AdHocDataReader.java new file mode 100644 index 00000000..ec1f61db --- /dev/null +++ b/uploader/src/freenet/library/uploader/AdHocDataReader.java @@ -0,0 +1,184 @@ +package freenet.library.uploader; + +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.logging.Level; +import java.util.logging.Logger; + +import freenet.library.io.FreenetURI; +import freenet.library.io.YamlReaderWriter; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.BinInfo; + + +class AdHocDataReader { + /** Logger. */ + private static final Logger logger = Logger.getLogger(AdHocDataReader.class.getName()); + + interface UriProcessor { + public FreenetURI getURI(); + public int getLevel(); + public boolean processUri(FreenetURI uri); + public void childrenSeen(int level, int foundChildren); + public void uriSeen(); + public void stringSeen(); + } + + /** + * Convert an object from the yaml to a FreenetURI. + * + * The object can be a FreenetURI already (new style) or a string. + * + * @param obj + * @return a FreenetURI + * @throws MalformedURLException + */ + private FreenetURI getFreenetURI(Object obj, UriProcessor uriProcessor) throws MalformedURLException { + FreenetURI u; + if (obj instanceof FreenetURI) { + u = (FreenetURI) obj; + uriProcessor.uriSeen(); + } else { + u = new FreenetURI((String) obj); + uriProcessor.stringSeen(); + } + return u; + } + + + + private int processBinInfoValues(Map entries, UriProcessor uriProcessor) + throws MalformedURLException { + int foundChildren = 0; + for (BinInfo value : entries.values()) { + try { + if (uriProcessor.processUri(getFreenetURI(value.getID(), uriProcessor))) { + foundChildren ++; + } + + } catch (ClassCastException e) { + throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + uriProcessor.getURI(), e); + } + } + return foundChildren; + } + + private int processSubnodes(Map map, UriProcessor uriProcessor) + throws MalformedURLException { + int foundChildren = 0; + Map subnodes = + (Map) map.get("subnodes"); + for (Object key : subnodes.keySet()) { + if (uriProcessor.processUri(getFreenetURI(key, uriProcessor))) { + foundChildren ++; + } + } + return foundChildren; + } + + void readAndProcessYamlData(InputStream inputStream, UriProcessor uriProcessor, int page_level) + throws IOException { + int foundChildren = 0; + try { + Object readObject = new YamlReaderWriter().readObject(inputStream); + Map map = ((LinkedHashMap) readObject); + if (map.containsKey("ttab") && + map.containsKey("utab") && + map.containsKey("totalPages")) { + Map map2 = (Map) map.get("ttab"); + if (map2.containsKey("entries")) { + Map entries = + (Map) map2.get("entries"); + foundChildren += processBinInfoValues(entries, uriProcessor); + if (logger.isLoggable(Level.FINER)) { + Map subnodes = + (Map) map2.get("subnodes"); + logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { + uriProcessor.getLevel(), + subnodes.size(), + }); + } + foundChildren += processSubnodes(map2, uriProcessor); + return; + } + } + if (map.containsKey("lkey") && + map.containsKey("rkey") && + map.containsKey("entries")) { + // Must separate map and array! + if (map.containsKey("subnodes")) { + throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + + uriProcessor.getURI()); + } + if (map.get("entries") instanceof Map) { + Map entries = + (Map) map.get("entries"); + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with {3} entries.", + new Object[] { + uriProcessor.getLevel(), + map.get("lkey"), + map.get("rkey"), + entries.size() + }); + foundChildren += processBinInfoValues(entries, uriProcessor); + return; + } + if (map.get("entries") instanceof ArrayList) { + // Assuming this is a list of TermPageEntries. + logger.log(Level.FINE, + "Contains from {1} to {2} (level {0}) with page entries.", + new Object[] { + uriProcessor.getLevel(), + map.get("lkey"), + map.get("rkey") + }); + return; + } + } + Entry entry = map.entrySet().iterator().next(); + if (entry.getValue() instanceof Map) { + Map map2 = (Map) entry.getValue(); + if (map2.containsKey("node_min") + && map2.containsKey("size") + && map2.containsKey("entries")) { + String first = null; + String last = null; + for (Entry contents : map.entrySet()) { + if (contents.getValue() instanceof Map) { + if (first == null) { + first = contents.getKey(); + } + last = contents.getKey(); + Map map3 = (Map) contents.getValue(); + if (map3.containsKey("subnodes")) { + foundChildren += processSubnodes(map3, uriProcessor); + } + continue; + } + throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + + uriProcessor.getURI()); + } + logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { + uriProcessor.getLevel(), + first, + last, + }); + return; + } + } + logger.severe("Cannot understand contents: " + map); + System.exit(1); + } finally { + uriProcessor.childrenSeen(page_level, foundChildren); + logger.exiting(AdHocDataReader.class.toString(), + "receivedAllData added " + foundChildren + " to the queue."); + } + + } +} diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 1d26e918..8a14e23c 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -11,1378 +11,29 @@ package freenet.library.uploader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; import java.net.MalformedURLException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Random; -import java.util.Set; -import java.util.WeakHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.logging.Level; -import java.util.logging.Logger; -import net.pterodactylus.fcp.AllData; -import net.pterodactylus.fcp.ClientGet; -import net.pterodactylus.fcp.ClientPut; -import net.pterodactylus.fcp.FcpAdapter; -import net.pterodactylus.fcp.FcpConnection; -import net.pterodactylus.fcp.GetFailed; -import net.pterodactylus.fcp.Priority; -import net.pterodactylus.fcp.PutFailed; -import net.pterodactylus.fcp.PutSuccessful; -import net.pterodactylus.fcp.SubscribeUSK; -import net.pterodactylus.fcp.SubscribedUSKUpdate; -import net.pterodactylus.fcp.URIGenerated; -import net.pterodactylus.fcp.Verbosity; import freenet.library.io.FreenetURI; -import freenet.library.io.YamlReaderWriter; -import freenet.library.io.serial.Packer; -import freenet.library.io.serial.Packer.BinInfo; /** * Class to download the entire index. */ public class DownloadAll { - private static final int PARALLEL_JOBS = 10; - private static final int PARALLEL_UPLOADS = 3; - - /** Logger. */ - private static final Logger logger = Logger.getLogger(DownloadAll.class.getName()); - - public final Map stillRunning = new HashMap(); - private FreenetURI uri; - private FreenetURI newUri; - private int edition; - private FcpConnection connection; - private static int getterCounter = 0; - private static int uploadCounter = 0; - private LinkedBlockingQueue objectQueue = - new LinkedBlockingQueue(); - private Thread cleanupThread; - private List roots = new ArrayList(); - - private ExecutorService uploadStarter = null; - private Map ongoingUploads = null; - - private int successful = 0; - private int successfulBlocks = 0; - private long successfulBytes = 0; - private int failed = 0; - private long uriUrisSeen = 0; - private long stringUrisSeen = 0; - private int recreated = 0; - private int failedRecreated = 0; - private int avoidFetching = 0; - private int uploadsStarted = 0; - private int avoidRecreate = 0; - private int wrongChkCounterForUpload = 0; - private int maxObjectQueueSize = 0; - - private Random rand = new Random(); - private Date started = new Date(); - - public DownloadAll(FreenetURI u) { - uri = u; - } - - public static class WeakHashSet - implements Set { - /** - * We just use the keys and let all values be TOKEN. - */ - private Map map = new WeakHashMap(); - private static Object TOKEN = new Object(); - - @Override - public boolean add(T arg0) { - if (map.containsKey(arg0)) { - return false; - } else { - map.put(arg0, TOKEN); - return true; - } - } - - @Override - public boolean addAll(Collection arg0) { - boolean retval = false; - for (T ele : arg0) { - if (add(ele)) { - retval = true; - } - } - return retval; - } - - @Override - public void clear() { - map.clear(); - } - - @Override - public boolean contains(Object arg0) { - return map.containsKey(arg0); - } - - @Override - public boolean containsAll(Collection arg0) { - for (Object ele : arg0) { - if (!contains(ele)) { - return false; - } - } - return true; - } - - @Override - public boolean isEmpty() { - return map.isEmpty(); - } - - @Override - public Iterator iterator() { - return map.keySet().iterator(); - } - - @Override - public boolean remove(Object arg0) { - return map.remove(arg0) != null; - } - - @Override - public boolean removeAll(Collection arg0) { - boolean retval = true; - for (Object ele : arg0) { - if (!remove(ele)) { - retval = false; - } - } - return retval; - } - - @Override - public boolean retainAll(Collection arg0) { - boolean retval = false; - for (T ele : map.keySet()) { - if (!arg0.contains(ele)) { - if (map.remove(ele) != null) { - retval = true; - } - } - } - return retval; - } - - @Override - public int size() { - return map.size(); - } - - @Override - public Object[] toArray() { - return map.keySet().toArray(); - } - - @Override - public T[] toArray(T[] arg0) { - return map.keySet().toArray(arg0); - } - - } - - /** - * A class to keep track of what pages are fetched and how they are related - * to other fetched pages. The purpose of this is to avoid fetching stuff - * related only to "old" editions. - */ - private static class FetchedPage { - /** - * This is really a Set but there is no WeakSet so we use the keys - * and let all values be TOKEN. - */ - private Set parents = Collections.synchronizedSet(new WeakHashSet()); - private Set children = Collections.synchronizedSet(new HashSet()); - - private FreenetURI uri; - int level; - private boolean succeeded; - private boolean failed; - - FetchedPage(FreenetURI u) { - this(u, 0); - } - - FetchedPage(FreenetURI u, int l) { - uri = u; - level = l; - } - - void addParent(FetchedPage fp) { - parents.add(fp); - } - - void addChild(FetchedPage fp) { - children.add(fp); - } - - FetchedPage newChild(FreenetURI u) { - FetchedPage child = new FetchedPage(u, level + 1); - child.addParent(this); - addChild(child); - return child; - } - - FreenetURI getURI() { - return uri; - } - - boolean hasParent() { - return !parents.isEmpty(); - } - - private FetchedPage[] getParents() { - // Even though parents and children are synchronized we - // encountered some ConcurrentModificationException when - // fetching them through iterators so we avoid that. - return parents.toArray(new FetchedPage[0]); - } - - private FetchedPage[] getChildren() { - return children.toArray(new FetchedPage[0]); - } - - /** - * fetchedPage is an ancestor, any number of levels, to this - * page. - * - * @param fetchedPage the ancestor to search for. - * @return - */ - public boolean hasParent(FetchedPage fetchedPage) { - if (parents.contains(fetchedPage)) { - return true; - } - for (FetchedPage parent : getParents()) { - if (parent != null && parent.hasParent(fetchedPage)) { - return true; - } - } - return false; - } - - int getTreeSize() { - int size = 1; - for (FetchedPage child : getChildren()) { - size += child.getTreeSize(); - } - return size; - } - - void addPerLevel(Map result) { - if (!result.containsKey(level)) { - result.put(level, 0); - } - if (!succeeded && !failed) { - result.put(level, result.get(level) + 1); - } - for (FetchedPage child : children) { - child.addPerLevel(result); - } - } - - int getTreeSizeSucceeded() { - int size = succeeded ? 1 : 0; - for (FetchedPage child : getChildren()) { - size += child.getTreeSizeSucceeded(); - } - return size; - } - - int getTreeSizeFailed() { - int size = failed ? 1 : 0; - for (FetchedPage child : getChildren()) { - size += child.getTreeSizeFailed(); - } - return size; - } - - void didFail() { - failed = true; - } - - void didSucceed() { - failed = false; - succeeded = true; - } - - public FetchedPage findUri(FreenetURI u) { - if (u.equals(uri)) { - return this; - } - for (FetchedPage child : getChildren()) { - FetchedPage found = child.findUri(u); - if (found != null) { - return found; - } - } - return null; - } - } - - private class USKUpdateAdapter extends FcpAdapter { - - private boolean updated = false; - private Object subscriber; - - public USKUpdateAdapter(Object s) { - subscriber = s; - } - - @Override - public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { - assert fcpConnection == connection; - if (subscribedUSKUpdate.isNewKnownGood() && - subscribedUSKUpdate.getEdition() > edition) { - updated = true; - try { - newUri = new FreenetURI(subscribedUSKUpdate.getURI()); - } catch (MalformedURLException e) { - throw new RuntimeException(e); - } - edition = subscribedUSKUpdate.getEdition(); - synchronized (subscriber) { - subscriber.notify(); - } - } - } - - public void restart() { - if (updated) { - updated = false; - logger.info("Found: " + newUri + " Edition: " + edition); - FetchedPage rootPage = new FetchedPage(newUri); - synchronized (roots) { - roots.add(rootPage); - } - new GetAdapter(rootPage.newChild(newUri)); - } - } - } - - - class StatisticsAccumulator { - private int count = 0; - private int sum = 0; - - void addSample(int found) { - count++; - sum += found; - } - - double getMean() { - return 1.0 * sum / count; - } - - public String toString() { - return "" + getMean() + " (" + count + ")"; - } - } - - private Map statistics = new HashMap(); - private void addFoundChildren(int level, int foundChildren) { - if (!statistics.containsKey(level)) { - statistics.put(level, new StatisticsAccumulator()); - } - statistics.get(level).addSample(foundChildren); - } - - private double getEstimatedPagesLeft(FetchedPage page) { - double estimate = 0.0; - double extra = 0.0; - Map pagesPerLevel = new HashMap(); - page.addPerLevel(pagesPerLevel); - for (int level = 1; pagesPerLevel.containsKey(level); level++) { - if (!statistics.containsKey(level)) { - return Double.POSITIVE_INFINITY; - } - extra += pagesPerLevel.get(level); - estimate += extra; - extra = extra * statistics.get(level).getMean(); - } - return estimate; - } - - - private static class OngoingUpload { - private final Date started = new Date(); - private final FreenetURI freenetURI; - private final Runnable callback; - - public OngoingUpload(FreenetURI fname, Runnable cback) { - freenetURI = fname; - callback = cback; - } - - Date getStarted() { - return started; - } - - FreenetURI getKey() { - return freenetURI; - } - - void complete() { - final long millis = new Date().getTime() - started.getTime(); - final long seconds = millis / 1000; - final long minutes = seconds / 60; - final long hours = minutes / 60; - logger.log(Level.FINE, "Upload completed after {0,number}:{1,number,00}:{2,number,00}.", - new Object[] { - hours, - minutes % 60, - seconds % 60, - }); - callback.run(); - } - } - - /** - * Show the amount of outstanding work. - */ - void printLeft() { - if (logger.isLoggable(Level.FINEST)) { - int total = 0; - int required = 0; - int completed = 0; - synchronized (stillRunning) { - for (GetAdapter value : stillRunning.values()) { - total += value.progressTotal; - required += value.progressRequired; - completed += value.progressCompleted; - } - String ongoingUploadsMessage = ""; - if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { - Date oldest = null; - synchronized (ongoingUploads) { - for (Map.Entry entry : ongoingUploads.entrySet()) { - if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { - oldest = entry.getValue().getStarted(); - } - } - } - ongoingUploadsMessage = " and " + ongoingUploadsSize() + " uploads"; - if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { - ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); - } - ongoingUploadsMessage += "."; - } - logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + - "(" + completed + "/" + required + "/" + total + ")" + - ongoingUploadsMessage); - } - } - } - - /** - * Convert an object from the yaml to a FreenetURI. - * - * The object can be a FreenetURI already (new style) or a string. - * - * @param obj - * @return a FreenetURI - * @throws MalformedURLException - */ - private FreenetURI getFreenetURI(Object obj) throws MalformedURLException { - FreenetURI u; - if (obj instanceof FreenetURI) { - u = (FreenetURI) obj; - uriUrisSeen ++; - } else { - u = new FreenetURI((String) obj); - stringUrisSeen ++; - } - return u; - } - - interface UriProcessor { - public FetchedPage getPage(); - public boolean processUri(FreenetURI uri); - } - - private int processBinInfoValues(Map entries, UriProcessor uriProcessor) - throws MalformedURLException { - int foundChildren = 0; - for (BinInfo value : entries.values()) { - try { - if (uriProcessor.processUri(getFreenetURI(value.getID()))) { - foundChildren ++; - } - - } catch (ClassCastException e) { - throw new RuntimeException("Cannot process BinInfo value " + value.getID() + " for " + uriProcessor.getPage().getURI(), e); - } - } - return foundChildren; - } - - private int processSubnodes(Map map, UriProcessor uriProcessor) - throws MalformedURLException { - int foundChildren = 0; - Map subnodes = - (Map) map.get("subnodes"); - for (Object key : subnodes.keySet()) { - if (uriProcessor.processUri(getFreenetURI(key))) { - foundChildren ++; - } - } - return foundChildren; - } - - private void readAndProcessYamlData(InputStream inputStream, UriProcessor uriProcessor, int page_level) - throws IOException { - int foundChildren = 0; - try { - Object readObject = new YamlReaderWriter().readObject(inputStream); - Map map = ((LinkedHashMap) readObject); - if (map.containsKey("ttab") && - map.containsKey("utab") && - map.containsKey("totalPages")) { - Map map2 = (Map) map.get("ttab"); - if (map2.containsKey("entries")) { - Map entries = - (Map) map2.get("entries"); - foundChildren += processBinInfoValues(entries, uriProcessor); - if (logger.isLoggable(Level.FINER)) { - Map subnodes = - (Map) map2.get("subnodes"); - logger.log(Level.FINER, "Contains ttab.entries (level {0}) with {1} subnodes", new Object[] { - uriProcessor.getPage().level, - subnodes.size(), - }); - } - foundChildren += processSubnodes(map2, uriProcessor); - return; - } - } - if (map.containsKey("lkey") && - map.containsKey("rkey") && - map.containsKey("entries")) { - // Must separate map and array! - if (map.containsKey("subnodes")) { - throw new RuntimeException("This parsing is not complex enough to handle subnodes for terms for " + - uriProcessor.getPage().getURI()); - } - if (map.get("entries") instanceof Map) { - Map entries = - (Map) map.get("entries"); - logger.log(Level.FINE, - "Contains from {1} to {2} (level {0}) with {3} entries.", - new Object[] { - uriProcessor.getPage().level, - map.get("lkey"), - map.get("rkey"), - entries.size() - }); - foundChildren += processBinInfoValues(entries, uriProcessor); - return; - } - if (map.get("entries") instanceof ArrayList) { - // Assuming this is a list of TermPageEntries. - logger.log(Level.FINE, - "Contains from {1} to {2} (level {0}) with page entries.", - new Object[] { - uriProcessor.getPage().level, - map.get("lkey"), - map.get("rkey") - }); - return; - } - } - Entry entry = map.entrySet().iterator().next(); - if (entry.getValue() instanceof Map) { - Map map2 = (Map) entry.getValue(); - if (map2.containsKey("node_min") - && map2.containsKey("size") - && map2.containsKey("entries")) { - String first = null; - String last = null; - for (Entry contents : map.entrySet()) { - if (contents.getValue() instanceof Map) { - if (first == null) { - first = contents.getKey(); - } - last = contents.getKey(); - Map map3 = (Map) contents.getValue(); - if (map3.containsKey("subnodes")) { - foundChildren += processSubnodes(map3, uriProcessor); - } - continue; - } - throw new RuntimeException("Cannot process entries. Entry for " + contents.getKey() + " is not String=Map for " + - uriProcessor.getPage().getURI()); - } - logger.log(Level.FINER, "Starts with entry for {1} and ended with entry {2} (level {0}).", new Object[] { - uriProcessor.getPage().level, - first, - last, - }); - return; - } - } - logger.severe("Cannot understand contents: " + map); - System.exit(1); - } finally { - addFoundChildren(page_level, foundChildren); - logger.exiting(GetAdapter.class.toString(), - "receivedAllData added " + foundChildren + " to the queue."); - } - - } - - private class GetAdapter extends FcpAdapter { - private ClientGet getter; - private String token; - private FetchedPage page; - private int progressTotal; - private int progressRequired; - private int progressCompleted; - private boolean done; - int waitingLaps; - public static final int WAITING_FACTOR = 50; - - public GetAdapter(FetchedPage u) { - page = u; - getterCounter ++; - token = "Getter" + getterCounter; - waitingLaps = 0; - getter = new ClientGet(page.getURI().toString(), token); - getter.setPriority(Priority.prefetch); - getter.setVerbosity(Verbosity.ALL); - - waitForSlot(); - connection.addFcpListener(this); - try { - connection.sendMessage(getter); - } catch (IOException e) { - e.printStackTrace(); - System.exit(1); - } - synchronized (stillRunning) { - stillRunning.put(page, this); - stillRunning.notifyAll(); - } - } - - /** - * Called when nothing has happened for a while with this request. - * @param key The page. - */ - public void hasBeenWaiting(FetchedPage key) { - waitingLaps++; - if (waitingLaps > WAITING_FACTOR * PARALLEL_JOBS) { - connection.removeFcpListener(this); - getter = null; - synchronized (stillRunning) { - stillRunning.remove(key); - } - if (key.hasParent()) { - logger.warning("Restarting fetch for " + key.getURI()); - new GetAdapter(key); - } else { - logger.finer("Avoid refetching " + key.getURI()); - } - } - } - - - private boolean processAnUri(FreenetURI uri) { - synchronized (roots) { - for (FetchedPage root : roots) { - FetchedPage foundChild = root.findUri(uri); - if (foundChild != null) { - page.addChild(foundChild); - foundChild.addParent(page); - return false; - } - } - } - objectQueue.offer(page.newChild(uri)); - return true; - } - - @Override - public void receivedAllData(FcpConnection c, AllData ad) { - assert c == connection; - assert ad != null; - if (!token.equals(ad.getIdentifier())) { - return; - } - final int objectQueueSize = objectQueue.size(); - if (objectQueueSize > maxObjectQueueSize) { - maxObjectQueueSize = objectQueueSize; - } - logger.entering(GetAdapter.class.toString(), - "receivedAllData", - "receivedAllData for " + token + - " adding to the " + objectQueueSize + " elements in the queue " + - "(max " + maxObjectQueueSize + ")."); - page.didSucceed(); - UriProcessor uriProcessor = new UriProcessor() { - @Override - public FetchedPage getPage() { - return page; - } - - @Override - public boolean processUri(FreenetURI uri) { - return processAnUri(uri); - } - }; - final InputStream inputStream = ad.getPayloadInputStream(); - try { - readAndProcessYamlData(inputStream, uriProcessor, page.level); - } catch (IOException e) { - logger.log(Level.SEVERE, "Cannot unpack.", e); - e.printStackTrace(); - System.exit(1); - } catch (ClassCastException cce) { - logger.log(Level.SEVERE, "Cannot unpack.", cce); - cce.printStackTrace(); - System.exit(1); - } finally { - markDone(); - successful ++; - successfulBlocks += progressCompleted; - successfulBytes += ad.getDataLength(); - showProgress(); - } - } - - @Override - public void receivedGetFailed(FcpConnection c, GetFailed gf) { - assert c == connection; - assert gf != null; - if (!token.equals(gf.getIdentifier())) { - return; - } - synchronized (getter) { - getter.notify(); - } - logger.warning("receivedGetFailed for " + token + " (" + page.getURI() + ")."); - page.didFail(); - markDone(); - failed ++; - showProgress(); - upload(page, new Runnable() { - public void run() { - objectQueue.offer(page); - recreated ++; - } - }); - } - - /** - * We have detected that we cannot download a certain CHK. - * - * If this CHK is actually cached, lets upload it from - * the cache in an attempt to repair the index. - * - * @param page the URI to upload. - * @param callback when the file is successfully uploaded. - */ - public boolean upload(final FetchedPage page, final Runnable callback) { - final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); - if (!dir.canRead()) { - return false; - } - final File file = new File(dir, page.getURI().toString()); - if (!file.canRead()) { - logger.warning("Cannot find " + file + " in the cache."); - return false; - } - if (uploadStarter == null) { - uploadStarter = Executors.newSingleThreadExecutor(); - uploadStarter.execute(new Runnable() { - public void run() { - connection.addFcpListener(new FcpAdapter() { - @Override - public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { - assert c == connection; - assert uriGenerated != null; - String identifier = uriGenerated.getIdentifier(); - FreenetURI chk; - synchronized (ongoingUploads) { - chk = ongoingUploads.get(identifier).getKey(); - } - FreenetURI generatedURI; - try { - generatedURI = new FreenetURI(uriGenerated.getURI()); - } catch (MalformedURLException e) { - logger.severe("Were supposed to resurrect " + chk + - " but the URI calculated to " + uriGenerated.getURI() + - " that is not possible to convert to an URI. Will upload anyway."); - wrongChkCounterForUpload++; - return; - } - if (!generatedURI.equals(chk)) { - logger.severe("Were supposed to resurrect " + chk + - " but the URI calculated to " + uriGenerated.getURI() + ". " + - "Will upload anyway."); - wrongChkCounterForUpload++; - } else { - logger.finest("Resurrecting " + chk); - } - } - - @Override - public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { - assert c == connection; - assert putSuccessful != null; - String identifier = putSuccessful.getIdentifier(); - OngoingUpload ongoingUpload; - synchronized (ongoingUploads) { - ongoingUpload = ongoingUploads.get(identifier); - } - final OngoingUpload foundUpload = ongoingUpload; - FreenetURI chk = foundUpload.getKey(); - FreenetURI generatedURI = null; - try { - generatedURI = new FreenetURI(putSuccessful.getURI()); - } catch (MalformedURLException e) { - logger.severe("Uploaded " + putSuccessful.getURI() + - " that is not possible to convert to an URI."); - } - if (generatedURI != null) { - if (!generatedURI.equals(chk)) { - logger.severe("Uploaded " + putSuccessful.getURI() + - " while supposed to upload " + chk + - ". "); - } else { - foundUpload.complete(); - } - } - synchronized (ongoingUploads) { - ongoingUploads.remove(identifier); - ongoingUploads.notifyAll(); - } - synchronized (stillRunning) { - stillRunning.notifyAll(); - } - }; - - @Override - public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { - assert c == connection; - assert putFailed != null; - String identifier = putFailed.getIdentifier(); - OngoingUpload ongoingUpload; - synchronized (ongoingUploads) { - ongoingUpload = ongoingUploads.get(identifier); - } - final OngoingUpload foundUpload = ongoingUpload; - FreenetURI chk = foundUpload.getKey(); - logger.severe("Uploaded " + chk + " failed."); - failedRecreated++; - synchronized (ongoingUploads) { - ongoingUploads.remove(identifier); - ongoingUploads.notifyAll(); - } - synchronized (stillRunning) { - stillRunning.notifyAll(); - } - } - }); - } - }); - ongoingUploads = new HashMap(); - } - uploadsStarted++; - uploadStarter.execute(new Runnable() { - public void run() { - if (!page.hasParent()) { - avoidRecreate++; - return; - } - uploadCounter++; - final String identifier = "Upload" + uploadCounter; - synchronized (ongoingUploads) { - ongoingUploads.put(identifier, new OngoingUpload(page.getURI(), callback)); - ongoingUploads.notifyAll(); - } - final ClientPut putter = new ClientPut("CHK@", identifier); - putter.setEarlyEncode(true); - putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); - putter.setVerbosity(Verbosity.NONE); - final long dataLength = file.length(); - putter.setDataLength(dataLength); - FileInputStream in; - try { - in = new FileInputStream(file); - putter.setPayloadInputStream(in); - connection.sendMessage(putter); - in.close(); - in = null; - } catch (IOException | NullPointerException e) { - e.printStackTrace(); - logger.warning("Upload failed for " + file); - } - while (true) { - synchronized (ongoingUploads) { - if (ongoingUploads.size() < PARALLEL_UPLOADS) { - break; - } - try { - ongoingUploads.wait(TimeUnit.SECONDS.toMillis(3)); - } catch (InterruptedException e) { - throw new RuntimeException("Waiting for upload slot terminated."); - } - } - } - } - }); - return true; - } - - @Override - public void receivedSimpleProgress(FcpConnection c, - net.pterodactylus.fcp.SimpleProgress sp) { - assert c == connection; - assert sp != null; - if (!token.equals(sp.getIdentifier())) { - return; - } - progressTotal = sp.getTotal(); - progressRequired = sp.getRequired(); - progressCompleted = sp.getSucceeded(); - printLeft(); - } - - - private void markDone() { - done = true; - synchronized (this) { - this.notifyAll(); - } - // Signal to the cleanup thread: - synchronized (stillRunning) { - stillRunning.notifyAll(); - } - } - - private void forgetAboutThis() { - assert done; - connection.removeFcpListener(this); - synchronized (stillRunning) { - stillRunning.remove(page); - // Signal to the - stillRunning.notifyAll(); - printLeft(); - } - } - - boolean isDone() { - return done; - } - }; - - private int uploadsWaiting() { - return uploadsStarted - uploadCounter - avoidRecreate; - } - - private void ageRunning() { - final HashSet> stillRunningCopy; - synchronized (stillRunning) { - stillRunningCopy = new HashSet>(stillRunning.entrySet()); - } - for (Entry entry : stillRunningCopy) { - entry.getValue().hasBeenWaiting(entry.getKey()); - } - } - - public void doDownload() { - FcpSession session; - try { - session = new FcpSession("DownloaderFor" + uri); - } catch (IllegalStateException | IOException e1) { - e1.printStackTrace(); - return; - } - try { - connection = session.getConnection(); - if (connection == null) { - throw new IllegalArgumentException("No connection."); - } - final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); - subscriber.setActive(true); - - final USKUpdateAdapter subscriberListener = new USKUpdateAdapter(subscriber); - connection.addFcpListener(subscriberListener); - - synchronized (subscriber) { - try { - connection.sendMessage(subscriber); - subscriber.wait(); - } catch (InterruptedException e) { - throw new RuntimeException("Waiting for connection interrupted."); - } catch (IOException e) { - throw new RuntimeException("Hello cannot write."); - } - } - subscriberListener.restart(); - - boolean moreJobs = false; - do { - if (moreJobs) { - synchronized (stillRunning) { - try { - logger.fine("Queue empty. " + - "Still running " + - stillRunning.size() + "."); - stillRunning.wait(20000); - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(1); - } - } - } - - boolean empty = true; - do { - ageRunning(); - synchronized (roots) { - final int roots_size = roots.size(); - if (roots_size > 1) { - int roots_distance = roots_size - 1; - if (roots.get(1).getTreeSizeSucceeded() >= roots.get(0).getTreeSizeSucceeded() - roots_distance * roots_distance * roots_distance) { - roots.remove(0); - } - } - } - - FetchedPage lastRoot; - synchronized (roots) { - lastRoot = roots.get(roots.size() - 1); - } - - // Randomize the order by rotating the queue - int maxLaps = objectQueue.size(); - if (maxLaps == 0) { - maxLaps = 1; - } - int toRotate = rand.nextInt(maxLaps); - int rotated = 0; - int counted = 0; - - while (!objectQueue.isEmpty()) { - FetchedPage taken; - try { - taken = objectQueue.take(); - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(1); - continue; - } - if (!taken.hasParent()) { - logger.finer("Avoid fetching " + taken.getURI()); - taken = null; - avoidFetching++; - continue; - } - - counted += taken.level * taken.level * taken.level; - if (counted < toRotate) { - rotated++; - objectQueue.offer(taken); - continue; - } - logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); - new GetAdapter(taken); - break; - } - subscriberListener.restart(); - empty = objectQueue.isEmpty(); - } while (!empty); - synchronized (stillRunning) { - moreJobs = !stillRunning.isEmpty(); - } - } while (moreJobs); - if (uploadStarter != null) { - uploadStarter.shutdown(); - try { - uploadStarter.awaitTermination(1, TimeUnit.HOURS); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - connection.removeFcpListener(subscriberListener); - } finally { - removeCleanupThread(); - session.close(); - connection = null; - } - showProgress(); - } - - - private void showProgress() { - String recreatedMessage = ""; - if (recreated > 0) { - recreatedMessage = " Recreated: " + recreated; - } - if (failedRecreated > 0) { - recreatedMessage += " Recreation failed: " + failedRecreated; - } - if (avoidRecreate > 0) { - recreatedMessage += " Recreation avoided: " + avoidRecreate; - } - String urisSeenMessage = ""; - if (uriUrisSeen > 0 || stringUrisSeen > 0) { - urisSeenMessage = " StringUrisSeen: " + stringUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); - urisSeenMessage += new Formatter().format(" (%.1f%%)", 100.0 * stringUrisSeen / (uriUrisSeen + stringUrisSeen)); - } - String wrongChkCounterForUploadMessage = ""; - if (wrongChkCounterForUpload > 0) { - wrongChkCounterForUploadMessage = " WrongChkUploaded: " + wrongChkCounterForUpload; - } - logger.fine("Fetches: Successful: " + successful + - " blocks: " + successfulBlocks + - " bytes: " + successfulBytes + - " Failed: " + failed + - urisSeenMessage + - recreatedMessage + - wrongChkCounterForUploadMessage + - " Avoided: " + avoidFetching + "."); - - StringBuilder sb = new StringBuilder(); - List copiedRoots; - synchronized (roots) { - copiedRoots = new ArrayList(roots); - } - Collections.reverse(copiedRoots); - boolean first = true; - for (FetchedPage root : copiedRoots) { - if (sb.length() > 0) { - sb.append(", "); - } - long edition = root.getURI().getEdition(); - sb.append(edition); - int succeeded = root.getTreeSizeSucceeded(); - int failed = root.getTreeSizeFailed(); - if (failed > 0) { - sb.append(new Formatter().format(" FAILED: %.1f%%.", 100.0 * failed / (failed + succeeded))); - } - double estimate = getEstimatedPagesLeft(root); - if (estimate < Double.POSITIVE_INFINITY) { - final double fractionDone = 1.0 * succeeded / (estimate + succeeded); - sb.append(new Formatter().format(" Fetched: %.1f%%.", - 100.0 * fractionDone)); - if (first) { - logger.log(Level.FINER, "ETA: {0,date}, Started: {1,date}. Done {2,number,percent}.", - new Object[] { - new Date(new Double(1.0 / fractionDone * (new Date().getTime() - started.getTime())).longValue() + - started.getTime()), - started, - fractionDone, - }); - first = false; - } - } - sb.append(" ("); - sb.append(succeeded); - - if (failed > 0) { - sb.append(" and "); - sb.append(failed); - sb.append(" failed"); - } - - sb.append(")"); - } - - System.out.println("Editions: " + sb.toString()); - } - - /** - * 1. chdir to the directory with all the files. - * 2. Give parameters --move CHK/filename - * The CHK/filename is of the top file (in library.index.lastpushed.chk). - */ - public void doMove() { - int count = 0; - File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new2"); - if (!toDirectory.mkdir()) { - System.err.println("Could not create the directory " + toDirectory); - System.exit(1); - } - final FetchedPage fetchedPage = new FetchedPage(uri); - roots.add(fetchedPage); - objectQueue.add(fetchedPage); - while (objectQueue.size() > 0) { - FetchedPage page; - try { - page = objectQueue.take(); - } catch (InterruptedException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - System.exit(1); - return; - } - final FetchedPage finalPage = page; - FileInputStream inputStream; - try { - Files.createLink(Paths.get(toDirectory.getPath(), page.uri.toString()), Paths.get(page.uri.toString())); - inputStream = new FileInputStream(page.uri.toString()); - count++; - System.out.println("Read file " + count + " in " + page.uri + " level " + page.level + " left: " + objectQueue.size()); - } catch (IOException e) { - System.out.println("Cannot find file " + page.uri); - e.printStackTrace(); - System.exit(1); - return; - } - try { - readAndProcessYamlData(inputStream, - new UriProcessor() { - @Override - public FetchedPage getPage() { - return finalPage; - } - - Set seen = new HashSet(); - @Override - public boolean processUri(FreenetURI uri) { - if (seen.contains(uri)) { - return false; - } - seen.add(uri); - objectQueue.offer(finalPage.newChild(uri)); - return true; - } - - }, page.level); - } catch (IOException e) { - System.out.println("Cannot read file " + page.uri); - e.printStackTrace(); - System.exit(1); - return; - } - - } - } - public static void main(String[] argv) { if (argv.length > 1 && argv[0].equals("--move")) { try { - new DownloadAll(new FreenetURI(argv[1])).doMove(); + new DownloadAllPerpetually(new FreenetURI(argv[1])).doMove(); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(2); } } else { try { - new DownloadAll(new FreenetURI(argv[0])).doDownload(); + new DownloadAllPerpetually(new FreenetURI(argv[0])).doDownload(); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(2); } } } - - private int ongoingUploadsSize() { - if (ongoingUploads == null) { - return 0; - } - - synchronized (ongoingUploads) { - return ongoingUploads.size(); - } - } - - public void waitForSlot() { - startCleanupThread(); - synchronized (stillRunning) { - try { - for (int i = 0; i < uploadsWaiting() + ongoingUploadsSize() + stillRunning.size(); i++) { - stillRunning.wait(TimeUnit.SECONDS.toMillis(1 + uploadsWaiting() + uploadsWaiting())); - } - while (stillRunning.size() >= PARALLEL_JOBS) { - stillRunning.wait(1 + TimeUnit.MINUTES.toMillis(2)); - } - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(1); - } - } - } - - private synchronized void startCleanupThread() { - if (cleanupThread == null) { - cleanupThread = new Thread( - new Runnable() { - public void run () { - boolean moreJobs = false; - do { - if (moreJobs) { - synchronized (stillRunning) { - try { - stillRunning.wait(1234567); - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(1); - } - } - Set copy; - synchronized (stillRunning) { - copy = new HashSet(stillRunning.values()); - } - for (GetAdapter ga : copy) { - if (ga.isDone()) { - ga.forgetAboutThis(); - } - } - } - synchronized (stillRunning) { - moreJobs = !stillRunning.isEmpty(); - } - } while (moreJobs); - removeCleanupThread(); - } - } - ); - cleanupThread.start(); - } - } - - private synchronized void removeCleanupThread() { - cleanupThread = null; - - Set copy; - synchronized (stillRunning) { - copy = new HashSet(stillRunning.values()); - } - for (GetAdapter ga : copy) { - ga.markDone(); - ga.forgetAboutThis(); - } - } } diff --git a/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java b/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java new file mode 100644 index 00000000..5478ac4a --- /dev/null +++ b/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java @@ -0,0 +1,1249 @@ +/* + */ + +/* + * Log levels used: + * None/Warning: Serious events and small problems. + * FINE: Stats for fetches and overview of contents of fetched keys. Minor events. + * FINER: Queue additions, length, ETA, rotations. + * FINEST: Really minor events. + */ + +package freenet.library.uploader; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; +import java.util.WeakHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import net.pterodactylus.fcp.AllData; +import net.pterodactylus.fcp.ClientGet; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.GetFailed; +import net.pterodactylus.fcp.Priority; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.SubscribeUSK; +import net.pterodactylus.fcp.SubscribedUSKUpdate; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; +import freenet.library.io.FreenetURI; +import freenet.library.io.YamlReaderWriter; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.BinInfo; + +/** + * Class to download the entire index. + */ +class DownloadAllPerpetually extends AdHocDataReader { + private static final int PARALLEL_JOBS = 10; + private static final int PARALLEL_UPLOADS = 3; + + /** Logger. */ + private static final Logger logger = Logger.getLogger(DownloadAllPerpetually.class.getName()); + + public final Map stillRunning = new HashMap(); + private FreenetURI uri; + private FreenetURI newUri; + private int edition; + private FcpConnection connection; + private static int getterCounter = 0; + private static int uploadCounter = 0; + private LinkedBlockingQueue objectQueue = + new LinkedBlockingQueue(); + private Thread cleanupThread; + private List roots = new ArrayList(); + + private ExecutorService uploadStarter = null; + private Map ongoingUploads = null; + + private int successful = 0; + private int successfulBlocks = 0; + private long successfulBytes = 0; + private int failed = 0; + private long uriUrisSeen = 0; + private long stringUrisSeen = 0; + private int recreated = 0; + private int failedRecreated = 0; + private int avoidFetching = 0; + private int uploadsStarted = 0; + private int avoidRecreate = 0; + private int wrongChkCounterForUpload = 0; + private int maxObjectQueueSize = 0; + + private Random rand = new Random(); + private Date started = new Date(); + + public DownloadAllPerpetually(FreenetURI u) { + uri = u; + } + + public static class WeakHashSet + implements Set { + /** + * We just use the keys and let all values be TOKEN. + */ + private Map map = new WeakHashMap(); + private static Object TOKEN = new Object(); + + @Override + public boolean add(T arg0) { + if (map.containsKey(arg0)) { + return false; + } else { + map.put(arg0, TOKEN); + return true; + } + } + + @Override + public boolean addAll(Collection arg0) { + boolean retval = false; + for (T ele : arg0) { + if (add(ele)) { + retval = true; + } + } + return retval; + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public boolean contains(Object arg0) { + return map.containsKey(arg0); + } + + @Override + public boolean containsAll(Collection arg0) { + for (Object ele : arg0) { + if (!contains(ele)) { + return false; + } + } + return true; + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + @Override + public Iterator iterator() { + return map.keySet().iterator(); + } + + @Override + public boolean remove(Object arg0) { + return map.remove(arg0) != null; + } + + @Override + public boolean removeAll(Collection arg0) { + boolean retval = true; + for (Object ele : arg0) { + if (!remove(ele)) { + retval = false; + } + } + return retval; + } + + @Override + public boolean retainAll(Collection arg0) { + boolean retval = false; + for (T ele : map.keySet()) { + if (!arg0.contains(ele)) { + if (map.remove(ele) != null) { + retval = true; + } + } + } + return retval; + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Object[] toArray() { + return map.keySet().toArray(); + } + + @Override + public T[] toArray(T[] arg0) { + return map.keySet().toArray(arg0); + } + + } + + /** + * A class to keep track of what pages are fetched and how they are related + * to other fetched pages. The purpose of this is to avoid fetching stuff + * related only to "old" editions. + */ + private static class FetchedPage { + /** + * This is really a Set but there is no WeakSet so we use the keys + * and let all values be TOKEN. + */ + private Set parents = Collections.synchronizedSet(new WeakHashSet()); + private Set children = Collections.synchronizedSet(new HashSet()); + + private FreenetURI uri; + int level; + private boolean succeeded; + private boolean failed; + + FetchedPage(FreenetURI u) { + this(u, 0); + } + + FetchedPage(FreenetURI u, int l) { + uri = u; + level = l; + } + + void addParent(FetchedPage fp) { + parents.add(fp); + } + + void addChild(FetchedPage fp) { + children.add(fp); + } + + FetchedPage newChild(FreenetURI u) { + FetchedPage child = new FetchedPage(u, level + 1); + child.addParent(this); + addChild(child); + return child; + } + + FreenetURI getURI() { + return uri; + } + + boolean hasParent() { + return !parents.isEmpty(); + } + + private FetchedPage[] getParents() { + // Even though parents and children are synchronized we + // encountered some ConcurrentModificationException when + // fetching them through iterators so we avoid that. + return parents.toArray(new FetchedPage[0]); + } + + private FetchedPage[] getChildren() { + return children.toArray(new FetchedPage[0]); + } + + /** + * fetchedPage is an ancestor, any number of levels, to this + * page. + * + * @param fetchedPage the ancestor to search for. + * @return + */ + public boolean hasParent(FetchedPage fetchedPage) { + if (parents.contains(fetchedPage)) { + return true; + } + for (FetchedPage parent : getParents()) { + if (parent != null && parent.hasParent(fetchedPage)) { + return true; + } + } + return false; + } + + int getTreeSize() { + int size = 1; + for (FetchedPage child : getChildren()) { + size += child.getTreeSize(); + } + return size; + } + + void addPerLevel(Map result) { + if (!result.containsKey(level)) { + result.put(level, 0); + } + if (!succeeded && !failed) { + result.put(level, result.get(level) + 1); + } + for (FetchedPage child : children) { + child.addPerLevel(result); + } + } + + int getTreeSizeSucceeded() { + int size = succeeded ? 1 : 0; + for (FetchedPage child : getChildren()) { + size += child.getTreeSizeSucceeded(); + } + return size; + } + + int getTreeSizeFailed() { + int size = failed ? 1 : 0; + for (FetchedPage child : getChildren()) { + size += child.getTreeSizeFailed(); + } + return size; + } + + void didFail() { + failed = true; + } + + void didSucceed() { + failed = false; + succeeded = true; + } + + public FetchedPage findUri(FreenetURI u) { + if (u.equals(uri)) { + return this; + } + for (FetchedPage child : getChildren()) { + FetchedPage found = child.findUri(u); + if (found != null) { + return found; + } + } + return null; + } + } + + private class USKUpdateAdapter extends FcpAdapter { + + private boolean updated = false; + private Object subscriber; + + public USKUpdateAdapter(Object s) { + subscriber = s; + } + + @Override + public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { + assert fcpConnection == connection; + if (subscribedUSKUpdate.isNewKnownGood() && + subscribedUSKUpdate.getEdition() > edition) { + updated = true; + try { + newUri = new FreenetURI(subscribedUSKUpdate.getURI()); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + edition = subscribedUSKUpdate.getEdition(); + synchronized (subscriber) { + subscriber.notify(); + } + } + } + + public void restart() { + if (updated) { + updated = false; + logger.info("Found: " + newUri + " Edition: " + edition); + FetchedPage rootPage = new FetchedPage(newUri); + synchronized (roots) { + roots.add(rootPage); + } + new GetAdapter(rootPage.newChild(newUri)); + } + } + } + + + class StatisticsAccumulator { + private int count = 0; + private int sum = 0; + + void addSample(int found) { + count++; + sum += found; + } + + double getMean() { + return 1.0 * sum / count; + } + + public String toString() { + return "" + getMean() + " (" + count + ")"; + } + } + + private Map statistics = new HashMap(); + private void addFoundChildren(int level, int foundChildren) { + if (!statistics.containsKey(level)) { + statistics.put(level, new StatisticsAccumulator()); + } + statistics.get(level).addSample(foundChildren); + } + + private double getEstimatedPagesLeft(FetchedPage page) { + double estimate = 0.0; + double extra = 0.0; + Map pagesPerLevel = new HashMap(); + page.addPerLevel(pagesPerLevel); + for (int level = 1; pagesPerLevel.containsKey(level); level++) { + if (!statistics.containsKey(level)) { + return Double.POSITIVE_INFINITY; + } + extra += pagesPerLevel.get(level); + estimate += extra; + extra = extra * statistics.get(level).getMean(); + } + return estimate; + } + + + private static class OngoingUpload { + private final Date started = new Date(); + private final FreenetURI freenetURI; + private final Runnable callback; + + public OngoingUpload(FreenetURI fname, Runnable cback) { + freenetURI = fname; + callback = cback; + } + + Date getStarted() { + return started; + } + + FreenetURI getKey() { + return freenetURI; + } + + void complete() { + final long millis = new Date().getTime() - started.getTime(); + final long seconds = millis / 1000; + final long minutes = seconds / 60; + final long hours = minutes / 60; + logger.log(Level.FINE, "Upload completed after {0,number}:{1,number,00}:{2,number,00}.", + new Object[] { + hours, + minutes % 60, + seconds % 60, + }); + callback.run(); + } + } + + /** + * Show the amount of outstanding work. + */ + void printLeft() { + if (logger.isLoggable(Level.FINEST)) { + int total = 0; + int required = 0; + int completed = 0; + synchronized (stillRunning) { + for (GetAdapter value : stillRunning.values()) { + total += value.progressTotal; + required += value.progressRequired; + completed += value.progressCompleted; + } + String ongoingUploadsMessage = ""; + if (logger.isLoggable(Level.FINEST) && ongoingUploadsSize() > 0) { + Date oldest = null; + synchronized (ongoingUploads) { + for (Map.Entry entry : ongoingUploads.entrySet()) { + if (oldest == null || oldest.compareTo(entry.getValue().getStarted()) > 0) { + oldest = entry.getValue().getStarted(); + } + } + } + ongoingUploadsMessage = " and " + ongoingUploadsSize() + " uploads"; + if (oldest != null && new Date().getTime() - oldest.getTime() > TimeUnit.HOURS.toMillis(5)) { + ongoingUploadsMessage += new MessageFormat(", oldest from {0,date,long}").format(new Object[] { oldest }); + } + ongoingUploadsMessage += "."; + } + logger.finest("Outstanding " + stillRunning.size() + " ClientGet jobs " + + "(" + completed + "/" + required + "/" + total + ")" + + ongoingUploadsMessage); + } + } + } + + private class GetAdapter extends FcpAdapter { + private ClientGet getter; + private String token; + private FetchedPage page; + private int progressTotal; + private int progressRequired; + private int progressCompleted; + private boolean done; + int waitingLaps; + public static final int WAITING_FACTOR = 50; + + public GetAdapter(FetchedPage u) { + page = u; + getterCounter ++; + token = "Getter" + getterCounter; + waitingLaps = 0; + getter = new ClientGet(page.getURI().toString(), token); + getter.setPriority(Priority.prefetch); + getter.setVerbosity(Verbosity.ALL); + + waitForSlot(); + connection.addFcpListener(this); + try { + connection.sendMessage(getter); + } catch (IOException e) { + e.printStackTrace(); + System.exit(1); + } + synchronized (stillRunning) { + stillRunning.put(page, this); + stillRunning.notifyAll(); + } + } + + /** + * Called when nothing has happened for a while with this request. + * @param key The page. + */ + public void hasBeenWaiting(FetchedPage key) { + waitingLaps++; + if (waitingLaps > WAITING_FACTOR * PARALLEL_JOBS) { + connection.removeFcpListener(this); + getter = null; + synchronized (stillRunning) { + stillRunning.remove(key); + } + if (key.hasParent()) { + logger.warning("Restarting fetch for " + key.getURI()); + new GetAdapter(key); + } else { + logger.finer("Avoid refetching " + key.getURI()); + } + } + } + + + private boolean processAnUri(FreenetURI uri) { + synchronized (roots) { + for (FetchedPage root : roots) { + FetchedPage foundChild = root.findUri(uri); + if (foundChild != null) { + page.addChild(foundChild); + foundChild.addParent(page); + return false; + } + } + } + objectQueue.offer(page.newChild(uri)); + return true; + } + + @Override + public void receivedAllData(FcpConnection c, AllData ad) { + assert c == connection; + assert ad != null; + if (!token.equals(ad.getIdentifier())) { + return; + } + final int objectQueueSize = objectQueue.size(); + if (objectQueueSize > maxObjectQueueSize) { + maxObjectQueueSize = objectQueueSize; + } + logger.entering(GetAdapter.class.toString(), + "receivedAllData", + "receivedAllData for " + token + + " adding to the " + objectQueueSize + " elements in the queue " + + "(max " + maxObjectQueueSize + ")."); + page.didSucceed(); + UriProcessor uriProcessor = new UriProcessor() { + @Override + public FreenetURI getURI() { + return page.getURI(); + } + + @Override + public int getLevel() { + return page.level; + } + + @Override + public boolean processUri(FreenetURI uri) { + return processAnUri(uri); + } + + @Override + public void uriSeen() { + uriUrisSeen++; + } + + @Override + public void stringSeen() { + stringUrisSeen++; + } + + @Override + public void childrenSeen(int level, int foundChildren) { + addFoundChildren(level, foundChildren); + } + + }; + final InputStream inputStream = ad.getPayloadInputStream(); + try { + readAndProcessYamlData(inputStream, uriProcessor, page.level); + } catch (IOException e) { + logger.log(Level.SEVERE, "Cannot unpack.", e); + e.printStackTrace(); + System.exit(1); + } catch (ClassCastException cce) { + logger.log(Level.SEVERE, "Cannot unpack.", cce); + cce.printStackTrace(); + System.exit(1); + } finally { + markDone(); + successful ++; + successfulBlocks += progressCompleted; + successfulBytes += ad.getDataLength(); + showProgress(); + } + } + + @Override + public void receivedGetFailed(FcpConnection c, GetFailed gf) { + assert c == connection; + assert gf != null; + if (!token.equals(gf.getIdentifier())) { + return; + } + synchronized (getter) { + getter.notify(); + } + logger.warning("receivedGetFailed for " + token + " (" + page.getURI() + ")."); + page.didFail(); + markDone(); + failed ++; + showProgress(); + upload(page, new Runnable() { + public void run() { + objectQueue.offer(page); + recreated ++; + } + }); + } + + /** + * We have detected that we cannot download a certain CHK. + * + * If this CHK is actually cached, lets upload it from + * the cache in an attempt to repair the index. + * + * @param page the URI to upload. + * @param callback when the file is successfully uploaded. + */ + public boolean upload(final FetchedPage page, final Runnable callback) { + final File dir = new File(".", UploaderPaths.LIBRARY_CACHE); + if (!dir.canRead()) { + return false; + } + final File file = new File(dir, page.getURI().toString()); + if (!file.canRead()) { + logger.warning("Cannot find " + file + " in the cache."); + return false; + } + if (uploadStarter == null) { + uploadStarter = Executors.newSingleThreadExecutor(); + uploadStarter.execute(new Runnable() { + public void run() { + connection.addFcpListener(new FcpAdapter() { + @Override + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + String identifier = uriGenerated.getIdentifier(); + FreenetURI chk; + synchronized (ongoingUploads) { + chk = ongoingUploads.get(identifier).getKey(); + } + FreenetURI generatedURI; + try { + generatedURI = new FreenetURI(uriGenerated.getURI()); + } catch (MalformedURLException e) { + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + + " that is not possible to convert to an URI. Will upload anyway."); + wrongChkCounterForUpload++; + return; + } + if (!generatedURI.equals(chk)) { + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + ". " + + "Will upload anyway."); + wrongChkCounterForUpload++; + } else { + logger.finest("Resurrecting " + chk); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { + assert c == connection; + assert putSuccessful != null; + String identifier = putSuccessful.getIdentifier(); + OngoingUpload ongoingUpload; + synchronized (ongoingUploads) { + ongoingUpload = ongoingUploads.get(identifier); + } + final OngoingUpload foundUpload = ongoingUpload; + FreenetURI chk = foundUpload.getKey(); + FreenetURI generatedURI = null; + try { + generatedURI = new FreenetURI(putSuccessful.getURI()); + } catch (MalformedURLException e) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " that is not possible to convert to an URI."); + } + if (generatedURI != null) { + if (!generatedURI.equals(chk)) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " while supposed to upload " + chk + + ". "); + } else { + foundUpload.complete(); + } + } + synchronized (ongoingUploads) { + ongoingUploads.remove(identifier); + ongoingUploads.notifyAll(); + } + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + }; + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { + assert c == connection; + assert putFailed != null; + String identifier = putFailed.getIdentifier(); + OngoingUpload ongoingUpload; + synchronized (ongoingUploads) { + ongoingUpload = ongoingUploads.get(identifier); + } + final OngoingUpload foundUpload = ongoingUpload; + FreenetURI chk = foundUpload.getKey(); + logger.severe("Uploaded " + chk + " failed."); + failedRecreated++; + synchronized (ongoingUploads) { + ongoingUploads.remove(identifier); + ongoingUploads.notifyAll(); + } + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + }); + } + }); + ongoingUploads = new HashMap(); + } + uploadsStarted++; + uploadStarter.execute(new Runnable() { + public void run() { + if (!page.hasParent()) { + avoidRecreate++; + return; + } + uploadCounter++; + final String identifier = "Upload" + uploadCounter; + synchronized (ongoingUploads) { + ongoingUploads.put(identifier, new OngoingUpload(page.getURI(), callback)); + ongoingUploads.notifyAll(); + } + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setEarlyEncode(true); + putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); + putter.setVerbosity(Verbosity.NONE); + final long dataLength = file.length(); + putter.setDataLength(dataLength); + FileInputStream in; + try { + in = new FileInputStream(file); + putter.setPayloadInputStream(in); + connection.sendMessage(putter); + in.close(); + in = null; + } catch (IOException | NullPointerException e) { + e.printStackTrace(); + logger.warning("Upload failed for " + file); + } + while (true) { + synchronized (ongoingUploads) { + if (ongoingUploads.size() < PARALLEL_UPLOADS) { + break; + } + try { + ongoingUploads.wait(TimeUnit.SECONDS.toMillis(3)); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for upload slot terminated."); + } + } + } + } + }); + return true; + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!token.equals(sp.getIdentifier())) { + return; + } + progressTotal = sp.getTotal(); + progressRequired = sp.getRequired(); + progressCompleted = sp.getSucceeded(); + printLeft(); + } + + + private void markDone() { + done = true; + synchronized (this) { + this.notifyAll(); + } + // Signal to the cleanup thread: + synchronized (stillRunning) { + stillRunning.notifyAll(); + } + } + + private void forgetAboutThis() { + assert done; + connection.removeFcpListener(this); + synchronized (stillRunning) { + stillRunning.remove(page); + // Signal to the + stillRunning.notifyAll(); + printLeft(); + } + } + + boolean isDone() { + return done; + } + }; + + private int uploadsWaiting() { + return uploadsStarted - uploadCounter - avoidRecreate; + } + + private void ageRunning() { + final HashSet> stillRunningCopy; + synchronized (stillRunning) { + stillRunningCopy = new HashSet>(stillRunning.entrySet()); + } + for (Entry entry : stillRunningCopy) { + entry.getValue().hasBeenWaiting(entry.getKey()); + } + } + + public void doDownload() { + FcpSession session; + try { + session = new FcpSession("DownloaderFor" + uri); + } catch (IllegalStateException | IOException e1) { + e1.printStackTrace(); + return; + } + try { + connection = session.getConnection(); + if (connection == null) { + throw new IllegalArgumentException("No connection."); + } + final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); + subscriber.setActive(true); + + final USKUpdateAdapter subscriberListener = new USKUpdateAdapter(subscriber); + connection.addFcpListener(subscriberListener); + + synchronized (subscriber) { + try { + connection.sendMessage(subscriber); + subscriber.wait(); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for connection interrupted."); + } catch (IOException e) { + throw new RuntimeException("Hello cannot write."); + } + } + subscriberListener.restart(); + + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + logger.fine("Queue empty. " + + "Still running " + + stillRunning.size() + "."); + stillRunning.wait(20000); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + } + + boolean empty = true; + do { + ageRunning(); + synchronized (roots) { + final int roots_size = roots.size(); + if (roots_size > 1) { + int roots_distance = roots_size - 1; + if (roots.get(1).getTreeSizeSucceeded() >= roots.get(0).getTreeSizeSucceeded() - roots_distance * roots_distance * roots_distance) { + roots.remove(0); + } + } + } + + FetchedPage lastRoot; + synchronized (roots) { + lastRoot = roots.get(roots.size() - 1); + } + + // Randomize the order by rotating the queue + int maxLaps = objectQueue.size(); + if (maxLaps == 0) { + maxLaps = 1; + } + int toRotate = rand.nextInt(maxLaps); + int rotated = 0; + int counted = 0; + + while (!objectQueue.isEmpty()) { + FetchedPage taken; + try { + taken = objectQueue.take(); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + continue; + } + if (!taken.hasParent()) { + logger.finer("Avoid fetching " + taken.getURI()); + taken = null; + avoidFetching++; + continue; + } + + counted += taken.level * taken.level * taken.level; + if (counted < toRotate) { + rotated++; + objectQueue.offer(taken); + continue; + } + logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); + new GetAdapter(taken); + break; + } + subscriberListener.restart(); + empty = objectQueue.isEmpty(); + } while (!empty); + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + if (uploadStarter != null) { + uploadStarter.shutdown(); + try { + uploadStarter.awaitTermination(1, TimeUnit.HOURS); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + connection.removeFcpListener(subscriberListener); + } finally { + removeCleanupThread(); + session.close(); + connection = null; + } + showProgress(); + } + + + private void showProgress() { + String recreatedMessage = ""; + if (recreated > 0) { + recreatedMessage = " Recreated: " + recreated; + } + if (failedRecreated > 0) { + recreatedMessage += " Recreation failed: " + failedRecreated; + } + if (avoidRecreate > 0) { + recreatedMessage += " Recreation avoided: " + avoidRecreate; + } + String urisSeenMessage = ""; + if (uriUrisSeen > 0 || stringUrisSeen > 0) { + urisSeenMessage = " StringUrisSeen: " + stringUrisSeen + "/" + (uriUrisSeen + stringUrisSeen); + urisSeenMessage += new Formatter().format(" (%.1f%%)", 100.0 * stringUrisSeen / (uriUrisSeen + stringUrisSeen)); + } + String wrongChkCounterForUploadMessage = ""; + if (wrongChkCounterForUpload > 0) { + wrongChkCounterForUploadMessage = " WrongChkUploaded: " + wrongChkCounterForUpload; + } + logger.fine("Fetches: Successful: " + successful + + " blocks: " + successfulBlocks + + " bytes: " + successfulBytes + + " Failed: " + failed + + urisSeenMessage + + recreatedMessage + + wrongChkCounterForUploadMessage + + " Avoided: " + avoidFetching + "."); + + StringBuilder sb = new StringBuilder(); + List copiedRoots; + synchronized (roots) { + copiedRoots = new ArrayList(roots); + } + Collections.reverse(copiedRoots); + boolean first = true; + for (FetchedPage root : copiedRoots) { + if (sb.length() > 0) { + sb.append(", "); + } + long edition = root.getURI().getEdition(); + sb.append(edition); + int succeeded = root.getTreeSizeSucceeded(); + int failed = root.getTreeSizeFailed(); + if (failed > 0) { + sb.append(new Formatter().format(" FAILED: %.1f%%.", 100.0 * failed / (failed + succeeded))); + } + double estimate = getEstimatedPagesLeft(root); + if (estimate < Double.POSITIVE_INFINITY) { + final double fractionDone = 1.0 * succeeded / (estimate + succeeded); + sb.append(new Formatter().format(" Fetched: %.1f%%.", + 100.0 * fractionDone)); + if (first) { + logger.log(Level.FINER, "ETA: {0,date}, Started: {1,date}. Done {2,number,percent}.", + new Object[] { + new Date(new Double(1.0 / fractionDone * (new Date().getTime() - started.getTime())).longValue() + + started.getTime()), + started, + fractionDone, + }); + first = false; + } + } + sb.append(" ("); + sb.append(succeeded); + + if (failed > 0) { + sb.append(" and "); + sb.append(failed); + sb.append(" failed"); + } + + sb.append(")"); + } + + System.out.println("Editions: " + sb.toString()); + } + + /** + * 1. chdir to the directory with all the files. + * 2. Give parameters --move CHK/filename + * The CHK/filename is of the top file (in library.index.lastpushed.chk). + */ + public void doMove() { + int count = 0; + File toDirectory = new File("../" + UploaderPaths.LIBRARY_CACHE + ".new2"); + if (!toDirectory.mkdir()) { + System.err.println("Could not create the directory " + toDirectory); + System.exit(1); + } + final FetchedPage fetchedPage = new FetchedPage(uri); + roots.add(fetchedPage); + objectQueue.add(fetchedPage); + while (objectQueue.size() > 0) { + FetchedPage page; + try { + page = objectQueue.take(); + } catch (InterruptedException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + System.exit(1); + return; + } + final FetchedPage finalPage = page; + FileInputStream inputStream; + try { + Files.createLink(Paths.get(toDirectory.getPath(), page.uri.toString()), Paths.get(page.uri.toString())); + inputStream = new FileInputStream(page.uri.toString()); + count++; + System.out.println("Read file " + count + " in " + page.uri + " level " + page.level + " left: " + objectQueue.size()); + } catch (IOException e) { + System.out.println("Cannot find file " + page.uri); + e.printStackTrace(); + System.exit(1); + return; + } + try { + readAndProcessYamlData(inputStream, + new UriProcessor() { + @Override + public FreenetURI getURI() { + return finalPage.getURI(); + } + + @Override + public int getLevel() { + return 1; + } + + Set seen = new HashSet(); + @Override + public boolean processUri(FreenetURI uri) { + if (seen.contains(uri)) { + return false; + } + seen.add(uri); + objectQueue.offer(finalPage.newChild(uri)); + return true; + } + + @Override + public void uriSeen() {} + + @Override + public void stringSeen() {} + + @Override + public void childrenSeen(int level, int foundChildren) {} + + }, page.level); + } catch (IOException e) { + System.out.println("Cannot read file " + page.uri); + e.printStackTrace(); + System.exit(1); + return; + } + + } + } + + private int ongoingUploadsSize() { + if (ongoingUploads == null) { + return 0; + } + + synchronized (ongoingUploads) { + return ongoingUploads.size(); + } + } + + public void waitForSlot() { + startCleanupThread(); + synchronized (stillRunning) { + try { + for (int i = 0; i < uploadsWaiting() + ongoingUploadsSize() + stillRunning.size(); i++) { + stillRunning.wait(TimeUnit.SECONDS.toMillis(1 + uploadsWaiting() + uploadsWaiting())); + } + while (stillRunning.size() >= PARALLEL_JOBS) { + stillRunning.wait(1 + TimeUnit.MINUTES.toMillis(2)); + } + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + } + + private synchronized void startCleanupThread() { + if (cleanupThread == null) { + cleanupThread = new Thread( + new Runnable() { + public void run () { + boolean moreJobs = false; + do { + if (moreJobs) { + synchronized (stillRunning) { + try { + stillRunning.wait(1234567); + } catch (InterruptedException e) { + e.printStackTrace(); + System.exit(1); + } + } + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (GetAdapter ga : copy) { + if (ga.isDone()) { + ga.forgetAboutThis(); + } + } + } + synchronized (stillRunning) { + moreJobs = !stillRunning.isEmpty(); + } + } while (moreJobs); + removeCleanupThread(); + } + } + ); + cleanupThread.start(); + } + } + + private synchronized void removeCleanupThread() { + cleanupThread = null; + + Set copy; + synchronized (stillRunning) { + copy = new HashSet(stillRunning.values()); + } + for (GetAdapter ga : copy) { + ga.markDone(); + ga.forgetAboutThis(); + } + } +} From 7e871b1b25bbf3b9a1187cbdec94c424e7b6d927 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 18 Dec 2017 08:29:30 +0100 Subject: [PATCH 144/180] First version of one-shot download. --- .../library/uploader/DownloadAllOnce.java | 748 ++++++++++++++++++ 1 file changed, 748 insertions(+) create mode 100644 uploader/src/freenet/library/uploader/DownloadAllOnce.java diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java new file mode 100644 index 00000000..f3bc807e --- /dev/null +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -0,0 +1,748 @@ +/* + */ + +/* + * Log levels used: + * None/Warning: Serious events and small problems. + * FINE: Stats for fetches and overview of contents of fetched keys. Minor events. + * FINER: Queue additions, length, ETA, rotations. + * FINEST: Really minor events. + */ + +package freenet.library.uploader; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.nio.file.Paths; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Random; +import java.util.Set; +import java.util.WeakHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import net.pterodactylus.fcp.AllData; +import net.pterodactylus.fcp.ClientGet; +import net.pterodactylus.fcp.ClientPut; +import net.pterodactylus.fcp.FcpAdapter; +import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.GetFailed; +import net.pterodactylus.fcp.Priority; +import net.pterodactylus.fcp.PutFailed; +import net.pterodactylus.fcp.PutSuccessful; +import net.pterodactylus.fcp.SubscribeUSK; +import net.pterodactylus.fcp.SubscribedUSKUpdate; +import net.pterodactylus.fcp.URIGenerated; +import net.pterodactylus.fcp.Verbosity; +import freenet.library.io.FreenetURI; +import freenet.library.io.YamlReaderWriter; +import freenet.library.io.serial.Packer; +import freenet.library.io.serial.Packer.BinInfo; + +/** + * Class to download the entire index. + * + * When a newer USK is seen, stop the processing and exit. + */ +class DownloadAllOnce { + private static final int PARALLEL_JOBS = 10; + private static final int PARALLEL_UPLOADS = 3; + + /** Logger. */ + private static final Logger logger = Logger.getLogger(DownloadAllOnce.class.getName()); + + private ScheduledExecutorService executors; + private FcpConnection connection; + private File directory; + private Set allFiles = new HashSet(); + private int getterCounter = 0; + private int uploadCounter = 0; + + private AdHocDataReader reader = new AdHocDataReader(); + + class RotatingQueue extends LinkedBlockingQueue { + public RotatingQueue(Random r) { + random = r; + } + + @Override + public E poll() { + int toRotate = 0; + int s = size(); + if (s > 0) { + toRotate = random.nextInt(s); + } + while (true) { + E taken; + + taken = super.poll(); + if (taken == null) { + return null; + } + if (--toRotate > 0) { + offer(taken); // Ignoring impossible false status. + continue; + } + return taken; + } + } + + @Override + public E poll(long l, TimeUnit u) { + throw new IllegalStateException("Not implemented"); + } + + @Override + public E take() throws InterruptedException { + E taken = poll(); + if (taken == null) { + return super.take(); + } + return taken; + } + } + + + /** + * A class to keep track of the Pages we work with. + */ + private class Page { + // private Page parent; + + private FreenetURI uri; + private int level = 0; + + Page(FreenetURI u, Page p) { + // parent = p, + uri = u; + if (p != null) { + level = p.level + 1; + } + } + + FreenetURI getURI() { + return uri; + } + + int getLevel() { + return level; + } + + File getFile() { + return new File(directory, getURI().toString().replace("/", "__")); + } + } + + private static class NotImplementedYet + extends UnsupportedOperationException { + } + + private Random random = new Random(); + private RotatingQueue toFetch = new RotatingQueue(random); + private RotatingQueue toUploadUnfetchable = new RotatingQueue(random); + private RotatingQueue toParse = new RotatingQueue(random); + private RotatingQueue toRefetchUnfetchable = new RotatingQueue(random); + private RotatingQueue toRefetch = new RotatingQueue(random); + + private int counterFetch = 0; + private int counterUploadUnfetchable = 0; + private int counterParse = 0; + private int counterRefetchUnfetchable = 0; + private int counterRefetch = 0; + + + public synchronized final void printStatistics() { + logger.info("Statistics"); + printStatisticsLine("toFetch", counterFetch, toFetch); + printStatisticsLine("toUploadUnfetchable", counterUploadUnfetchable, toUploadUnfetchable); + printStatisticsLine("toParse", counterParse, toParse); + printStatisticsLine("toRefetchUnfetchable", counterRefetchUnfetchable, toRefetchUnfetchable); + printStatisticsLine("toRefetch", counterRefetch, toRefetch); + if (allFiles.size() > 0) { + System.out.println("To remove: " + allFiles.size()); + } + } + + private static String STATISTICS_FORMAT = "%-21s%7d%6d%5d%5d%6d%6d%5d%5d"; + public final void printStatisticsLine(String r, int counter, RotatingQueue rqp) { + if (rqp.size() > 0 || counter > 0) { + int arr[] = new int[12]; + for (Page p : rqp) { + arr[p.level]++; + } + System.out.println(new Formatter().format(STATISTICS_FORMAT, r, + counter, + rqp.size(), + arr[0], + arr[1], + arr[2], + arr[3], + arr[4], + arr[5])); + } + } + + private boolean fetch(final Page page) { + int counter; + synchronized (this) { + counter = ++getterCounter; + } + final String token = "Getter" + counter; + final ClientGet getter = new ClientGet(page.getURI().toString(), token); + getter.setPriority(Priority.prefetch); + getter.setVerbosity(Verbosity.NONE); + final boolean[] results = new boolean[1]; + results[0] = false; + FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedAllData(FcpConnection c, AllData ad) { + assert c == connection; + assert ad != null; + if (!token.equals(ad.getIdentifier())) { + return; + } + logger.entering(DownloadAllOnce.class.toString(), + "receivedAllData", + "receivedAllData for " + token); + try { + Files.copy(ad.getPayloadInputStream(), + page.getFile().toPath(), + StandardCopyOption.REPLACE_EXISTING); + } catch (IOException ioe) { + page.getFile().delete(); + synchronized (getter) { + getter.notify(); + } + return; + } + results[0] = true; + synchronized (getter) { + getter.notify(); + } + } + + @Override + public void receivedGetFailed(FcpConnection c, GetFailed gf) { + assert c == connection; + assert gf != null; + if (!token.equals(gf.getIdentifier())) { + return; + } + synchronized (getter) { + getter.notify(); + } + logger.warning("receivedGetFailed for " + token + " (" + page.getURI() + ")."); + } + + @Override + public void receivedSimpleProgress(FcpConnection c, + net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!token.equals(sp.getIdentifier())) { + return; + } + logger.finest("Progress for " + token + " (" + sp.getSucceeded() + "/" + sp.getRequired() + "/" + sp.getTotal() + ")."); + } + }; + connection.addFcpListener(listener); + try { + connection.sendMessage(getter); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + synchronized (getter) { + try { + getter.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + return false; + } + } + connection.removeFcpListener(listener); + + return results[0]; + } + + private void parse(final Page page) { + try { + reader.readAndProcessYamlData(new FileInputStream(page.getFile()), + new AdHocDataReader.UriProcessor() { + @Override + public FreenetURI getURI() { + return page.getURI(); + } + + @Override + public int getLevel() { + return page.getLevel(); + } + + Set seen = new HashSet(); + @Override + public boolean processUri(FreenetURI uri) { + if (seen.contains(uri)) { + return false; + } + seen.add(uri); + handleNew(new Page(uri, page)); + return true; + } + + @Override + public void uriSeen() {} + + @Override + public void stringSeen() {} + + @Override + public void childrenSeen(int level, int foundChildren) {} + + + }, + page.getLevel()); + } catch (IOException ioe) { + page.getFile().delete(); + } + } + + private boolean upload(final Page page) { + final boolean[] successfuls = new boolean[1]; + successfuls[0] = false; + int counter; + synchronized (this) { + counter = ++uploadCounter; + } + final String identifier = "Upload" + counter; + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setEarlyEncode(true); + putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); + putter.setVerbosity(Verbosity.NONE); + final long dataLength = page.getFile().length(); + putter.setDataLength(dataLength); + + final FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + String identifier = uriGenerated.getIdentifier(); + FreenetURI chk = page.getURI(); + FreenetURI generatedURI; + try { + generatedURI = new FreenetURI(uriGenerated.getURI()); + } catch (MalformedURLException e) { + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + + " that is not possible to convert to an URI. Will upload anyway."); + return; + } + if (!generatedURI.equals(chk)) { + logger.severe("Were supposed to resurrect " + chk + + " but the URI calculated to " + uriGenerated.getURI() + ". " + + "Will upload anyway."); + } else { + logger.finest("Resurrecting " + chk); + } + } + + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { + assert c == connection; + assert putSuccessful != null; + String identifier = putSuccessful.getIdentifier(); + FreenetURI chk = page.getURI(); + FreenetURI generatedURI = null; + try { + try { + generatedURI = new FreenetURI(putSuccessful.getURI()); + } catch (MalformedURLException e) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " that is not possible to convert to an URI."); + return; + } + if (!generatedURI.equals(chk)) { + logger.severe("Uploaded " + putSuccessful.getURI() + + " while supposed to upload " + chk + + ". "); + return; + } + logger.finest("Resurrected " + chk); + successfuls[0] = true; + } finally { + synchronized (putter) { + putter.notify(); + } + } + } + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { + assert c == connection; + assert putFailed != null; + String identifier = putFailed.getIdentifier(); + FreenetURI chk = page.getURI(); + logger.severe("Uploaded " + chk + " failed."); + synchronized (putter) { + putter.notify(); + } + } + }; + connection.addFcpListener(listener); + FileInputStream in; + try { + in = new FileInputStream(page.getFile()); + putter.setPayloadInputStream(in); + connection.sendMessage(putter); + synchronized (putter) { + putter.wait(); + } + in.close(); + in = null; + } catch (IOException | NullPointerException e) { + e.printStackTrace(); + logger.warning("Upload failed for " + page.getFile()); + } catch (InterruptedException e) { + e.printStackTrace(); + logger.warning("Upload interrupted for " + page.getFile()); + } finally { + connection.removeFcpListener(listener); + } + return successfuls[0]; + } + + private void doRefetchUnfetchable(Page page) { + if (fetch(page)) { + add(toParse, page); + } else { + add(toRefetchUnfetchable, page); + } + counterRefetchUnfetchable++; + } + + private void doRefetch(Page page) { + if (fetch(page)) { + add(toRefetch, page); + } else { + handleUnfetchable(page); + } + counterRefetch++; + } + + private void handleNew(Page page) { + if (page.getFile().exists()) { + page.getFile().setLastModified(System.currentTimeMillis()); + allFiles.remove(page.getFile()); + add(toParse, page); + } else { + add(toFetch, page); + } + } + + private void doFetch(Page page) { + if (fetch(page)) { + add(toParse, page); + } else { + handleUnfetchable(page); + } + counterFetch++; + } + + private void doParse(Page page) { + parse(page); + add(toRefetch, page); + counterParse++; + } + + private void handleUnfetchable(Page page) { + if (page.getFile().exists()) { + add(toUploadUnfetchable, page); + } else { + add(toRefetchUnfetchable, page); + } + } + + private void doUploadUnfetchable(Page page) { + if (upload(page)) { + add(toRefetch, page); + } else { + add(toRefetchUnfetchable, page); + } + counterUploadUnfetchable++; + } + + + private void add(RotatingQueue whereto, Page p) { + whereto.offer(p); + } + + private class CleanupOldFiles implements Runnable { + ScheduledFuture handle = null; + + public ScheduledFuture setHandle(ScheduledFuture h) { + handle = h; + return h; + } + + public void run() { + if (toParse.size() > 0) { + // Don't delete anything if the parsing is not completed. + return; + } + if (allFiles.size() == 0) { + if (handle != null) { + handle.cancel(true); + handle = null; + } + return; + } + // Find the oldest one. + long oldestAge = Long.MAX_VALUE; + File oldestFile = null; + for (File f : allFiles) { + if (f.lastModified() < oldestAge) { + oldestAge = f.lastModified(); + oldestFile = f; + } + } + allFiles.remove(oldestFile); + System.out.println("Removing file " + oldestFile); + oldestFile.delete(); + } + } + + private abstract class ProcessSomething implements Runnable { + protected abstract void process(); + + public void run() { + try { + process(); + } catch (Exception e) { + System.out.println("Class " + this + " threw exception: " + e); + e.printStackTrace(); + } + } + } + + private class ProcessParse extends ProcessSomething { + protected void process() { + Page page = toParse.poll(); + if (page != null) { + doParse(page); + } + } + } + + private class ProcessUploadUnfetchable extends ProcessSomething { + protected void process() { + Page page = toUploadUnfetchable.poll(); + if (page != null) { + doUploadUnfetchable(page); + return; + } + } + } + + /** + * This is the bulk of all fetches. + * + * Mostly Fetch, if any, but sometimes one of the refetches. + */ + private class ProcessFetches extends ProcessSomething { + protected void process() { + int refetchable = toRefetch.size() + toRefetchUnfetchable.size(); + if (random.nextInt(1 + refetchable) < 1000 + toFetch.size() * toFetch.size() / 100) { + Page page = toFetch.poll(); + if (page != null) { + logger.finest("Fetch Fetch"); + doFetch(page); + return; + } + } + + if (random.nextInt(1 + refetchable) < toRefetchUnfetchable.size()) { + Page page = toRefetchUnfetchable.poll(); + if (page != null) { + logger.finest("Fetch RefetchUnfetchable"); + doRefetchUnfetchable(page); + return; + } + } + + Page page = toRefetch.poll(); + if (page != null) { + logger.finest("Fetch Refetch"); + doRefetch(page); + return; + } + } + } + + private class ProcessRefetchUnfetchable extends ProcessSomething { + protected void process() { + Page page = toRefetchUnfetchable.poll(); + if (page != null) { + doRefetchUnfetchable(page); + return; + } + } + } + + private class ProcessRefetch extends ProcessSomething { + protected void process() { + Page page = toRefetch.poll(); + if (page != null) { + doRefetch(page); + return; + } + } + } + + private void run(FreenetURI u) { + executors = Executors.newScheduledThreadPool(10); + Set> futures = new HashSet>(); + directory = new File("library-download-all-once-db"); + if (directory.exists()) { + allFiles.addAll(Arrays.asList(directory.listFiles())); + CleanupOldFiles cleanUp = new CleanupOldFiles(); + futures.add(cleanUp.setHandle(executors.scheduleWithFixedDelay(cleanUp, 500, 1, TimeUnit.MINUTES))); + } else { + directory.mkdir(); + } + futures.add(executors.scheduleWithFixedDelay(new Runnable() { + public void run() { + printStatistics(); + } + }, 10, 30, TimeUnit.SECONDS)); + for (int i = 0; i < 9; i++) { + futures.add(executors.scheduleWithFixedDelay(new ProcessFetches(), 20 + i, 4, TimeUnit.SECONDS)); + } + futures.add(executors.scheduleWithFixedDelay(new ProcessRefetchUnfetchable(), 240, 1, TimeUnit.MINUTES)); + futures.add(executors.scheduleWithFixedDelay(new ProcessRefetch(), 500, 33, TimeUnit.SECONDS)); + for (int i = 0; i < 3; i++) { + futures.add(executors.scheduleWithFixedDelay(new ProcessUploadUnfetchable(), 40 + i, 2, TimeUnit.SECONDS)); + } + futures.add(executors.scheduleWithFixedDelay(new ProcessParse(), 2, 2, TimeUnit.SECONDS)); + FcpSession session; + try { + session = new FcpSession("DownloadAllOnceFor" + u); + } catch (IllegalStateException | IOException e1) { + e1.printStackTrace(); + return; + } + try { + run2(session, u); + } finally { + waitTermination(TimeUnit.SECONDS.toMillis(1)); + logger.info("Shutdown with " + futures.size() + " processors."); + executors.shutdown(); + waitTermination(TimeUnit.SECONDS.toMillis(2000)); + for (Iterator> futureIterator = futures.iterator(); + futureIterator.hasNext(); ) { + ScheduledFuture future = futureIterator.next(); + if (future.isDone()) { + futureIterator.remove(); + } + } + logger.info("Shutdown now (after long wait) with " + futures.size() + " processors left."); + executors.shutdownNow(); + executors = null; + session.close(); + session = null; + logger.info("Shutdown now completed."); + } + } + + private void run2(FcpSession session, FreenetURI uri) { + connection = session.getConnection(); + if (connection == null) { + throw new IllegalArgumentException("No connection."); + } + final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); + subscriber.setActive(true); + final int[] editions = new int[1]; + final FreenetURI[] newUris = new FreenetURI[1]; + editions[0] = 0; + FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { + assert fcpConnection == connection; + FreenetURI newUri; + try { + newUri = new FreenetURI(subscribedUSKUpdate.getURI()); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + if (subscribedUSKUpdate.isNewKnownGood() + && !newUri.equals(newUris[0])) { + newUris[0] = newUri; + editions[0] = subscribedUSKUpdate.getEdition(); + synchronized (subscriber) { + subscriber.notify(); + } + } + } + }; + connection.addFcpListener(listener); + + synchronized (subscriber) { + try { + connection.sendMessage(subscriber); + subscriber.wait(); // Wait until found + handleNew(new Page(newUris[0], null)); + subscriber.wait(); // Work until next one found + System.out.println("Next edition seen."); + } catch (InterruptedException e) { + throw new RuntimeException("Subscription interrupted."); + } catch (IOException e) { + throw new RuntimeException("Subscription can't write."); + } + } + } + + private void waitTermination(long ms) { + try { + executors.awaitTermination(ms, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for jobs."); + } + } + + public static void main(String[] argv) throws InterruptedException { + FreenetURI u; + try { + u = new FreenetURI(argv[0]); + } catch (MalformedURLException e) { + e.printStackTrace(); + System.exit(2); + return; + } + + new DownloadAllOnce().run(u); + } +} From 8d62d9b0b0634520a064d3d114f24f2ec02bd78a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 18 Dec 2017 08:20:56 +0100 Subject: [PATCH 145/180] Work started with removal of terms. --- .../library/uploader/DirectoryUploader.java | 106 +++++++++++++----- 1 file changed, 75 insertions(+), 31 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index f55a9d6d..388106db 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -130,7 +130,8 @@ public void run() { static final String EDITION_FILENAME = "library.index.last-edition"; static final String LAST_DISK_FILENAME = "library.index.lastpushed.disk"; - + static final String REMOVED_TERMS_FILENAME = "terms.to.remove.disk"; + static final String BASE_FILENAME_PUSH_DATA = "library.index.data."; ProtoIndexSerialiser srlDisk = null; @@ -197,7 +198,8 @@ protected void mergeToFreenet(File diskDir) { (LiveArchiver,SimpleProgress>)(s.getChildSerialiser()); ProtoIndexComponentSerialiser leaf = ProtoIndexComponentSerialiser.get(ProtoIndexComponentSerialiser.FMT_FILE_LOCAL, archiver); String f = DirectoryUploader.readStringFrom(new File(diskDir, LAST_DISK_FILENAME)); - if(f == null) { + String rf = DirectoryUploader.readStringFrom(new File(diskDir, REMOVED_TERMS_FILENAME)); + if(f == null && rf == null) { if(diskDir.list().length == 0) { System.err.println("Directory " + diskDir + " is empty - removing. Nothing to merge."); diskDir.delete(); @@ -209,20 +211,38 @@ protected void mergeToFreenet(File diskDir) { } ProtoIndex idxDisk = null; - try { - PullTask pull = new PullTask(f); - System.out.println("Pulling previous index "+f+" from disk so can update it."); - s.pull(pull); - System.out.println("Pulled previous index "+f+" from disk - updating..."); - idxDisk = pull.data; - if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) - throw new IllegalStateException("Different serialiser: "+idxDisk.getSerialiser()+" should be "+archiver); - } catch (TaskAbortException e) { - System.err.println("Failed to download previous index for spider update: "+e); - e.printStackTrace(); - return; + if (f != null) { + try { + PullTask pull = new PullTask(f); + System.out.println("Pulling previous index "+f+" from disk so can update it."); + s.pull(pull); + idxDisk = pull.data; + if(idxDisk.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: "+idxDisk.getSerialiser()+" should be "+archiver); + } catch (TaskAbortException e) { + System.err.println("Failed to download previous index for spider update: "+e); + e.printStackTrace(); + return; + } } - mergeToFreenet(idxDisk, diskDir); + + ProtoIndex removeIdxDisk = null; + if (rf != null) { + try { + PullTask pull = new PullTask(rf); + System.out.println("Pulling index " + f + " with terms for removal from disk."); + s.pull(pull); + removeIdxDisk = pull.data; + if (removeIdxDisk.getSerialiser().getLeafSerialiser() != archiver) + throw new IllegalStateException("Different serialiser: " + removeIdxDisk.getSerialiser() + " should be " + archiver); + } catch (TaskAbortException e) { + System.err.println("Failed to download previous index for spider update: " + e); + e.printStackTrace(); + return; + } + } + + mergeToFreenet(idxDisk, removeIdxDisk, diskDir); } /** Delete everything in a directory. Only use this when we are @@ -265,11 +285,18 @@ > new Random(fileChanged).nextDouble()) { private final Object inflateSync = new Object(); - /** Merge from an on-disk index to an on-Freenet index. - * @param diskToMerge The on-disk index. - * @param diskDir The folder the on-disk index is stored in. + /** + * Merge from an on-disk index to an on-Freenet index. + * + * @param diskToMerge + * The on-disk index with new terms. None if null. + * @param removeIdxDisk + * The on-disk index with terms to remove. None if null. + * @param diskDir + * The folder the on-disk index is stored in. */ - protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { + protected void mergeToFreenet(ProtoIndex diskToMerge, ProtoIndex removeIdxDisk, File diskDir) { + assert diskToMerge != null || removeIdxDisk != null; if (lastUploadURI == null) { try { lastUploadURI = new FreenetURI(readStringFrom(new File(LAST_URL_FILENAME))); @@ -281,9 +308,11 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { makeFreenetSerialisers(); - updateOverallMetadata(diskToMerge); + if (diskToMerge != null) { + updateOverallMetadata(diskToMerge); + } - final SkeletonBTreeMap> newtrees = diskToMerge.ttab; + final SkeletonBTreeMap> newtrees = diskToMerge != null ? diskToMerge.ttab : new SkeletonBTreeMap>(12); // Do the upload @@ -293,20 +322,19 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { try { long mergeStartTime = System.currentTimeMillis(); assert(idxFreenet.ttab.isBare()); - Iterator it = - diskToMerge.ttab.keySetAutoDeflate().iterator(); - TreeSet terms = new TreeSet(); - while(it.hasNext()) terms.add(it.next()); + TreeSet terms = getAllTerms(diskToMerge); + TreeSet termsToRemove = getAllTerms(removeIdxDisk); System.out.println("Merging " - + terms.size() - + " terms from disk to Freenet..."); - assert(terms.size() == diskToMerge.ttab.size()); + + terms.size() + + " terms from disk to Freenet and removing " + + termsToRemove.size() + + " terms..."); assert(idxFreenet.ttab.isBare()); - assert(diskToMerge.ttab.isBare()); long entriesAdded = terms.size(); + long entriesRemoved = termsToRemove.size(); // Run the actual merge. System.out.println("Start update"); - idxFreenet.ttab.update(terms, null, clo, new TaskAbortExceptionConvertor()); + idxFreenet.ttab.update(terms, termsToRemove, clo, new TaskAbortExceptionConvertor()); assert(idxFreenet.ttab.isBare()); // Deflate the main tree. System.out.println("Start deflate"); @@ -327,7 +355,7 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { System.out.println("Done waiting"); long mergeEndTime = System.currentTimeMillis(); - System.out.println(entriesAdded + " entries merged in " + (mergeEndTime-mergeStartTime) + " ms, root at " + task4.meta); + System.out.println(entriesAdded + " entries added and " + entriesRemoved + " entries removed in " + (mergeEndTime - mergeStartTime) + " ms, root at " + task4.meta); FreenetURI uri; if (task4.meta instanceof FreenetURI) { uri = (FreenetURI) task4.meta; @@ -341,6 +369,7 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { newtrees.deflate(); diskToMerge = null; terms = null; + termsToRemove = null; System.out.println("Finished with disk index "+diskDir); removeAll(diskDir); } @@ -357,6 +386,21 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, File diskDir) { } } + private TreeSet getAllTerms(ProtoIndex index) { + TreeSet terms = new TreeSet(); + if (index != null) { + assert (index.ttab.isBare()); + Iterator it = + index.ttab.keySetAutoDeflate().iterator(); + while (it.hasNext()) { + terms.add(it.next()); + } + assert (terms.size() == index.ttab.size()); + assert (index.ttab.isBare()); + } + return terms; + } + static String readFileLine(final String filename) { File f = new File(filename); FileInputStream fis; From 0b90c460b2d8bd6368216d924e30f1c9bc26b067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 3 Mar 2018 13:44:43 +0100 Subject: [PATCH 146/180] Reduce the days between USKs. This is a necessity since if was noticed that pages were forgotten about between USKs. --- uploader/src/freenet/library/uploader/DirectoryUploader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index 388106db..ca835bca 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -109,7 +109,7 @@ public void run() { * to avoid too many USKs created (saving time for the creation * and for the clients). */ - private static final int MAX_DAYS_WITHOUT_NEW_USK = 8; + private static final int MAX_DAYS_WITHOUT_NEW_USK = 4; static final String DISK_DIR_PREFIX = "library-temp-index-"; /** Directory the current idxDisk is saved in. */ From 712c2287dbc8e07c430d27134506fb8430c83099 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 17 Jan 2018 21:52:40 +0100 Subject: [PATCH 147/180] Avoid fetching items not connected to the latest (if possible). --- .../freenet/library/uploader/DownloadAllPerpetually.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java b/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java index 5478ac4a..29d7f1c1 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java +++ b/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java @@ -980,7 +980,13 @@ public void doDownload() { objectQueue.offer(taken); continue; } - logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); + + if (!taken.hasParent(lastRoot) && rand.nextInt(100) > 0) { + objectQueue.offer(taken); + continue; + } + + logger.finest("Rotated " + rotated + " (count to " + toRotate + ")."); new GetAdapter(taken); break; } From 7c91f455eb4e3985d81a4c6ea2d097aa713bd516 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 3 Mar 2018 13:45:36 +0100 Subject: [PATCH 148/180] Don't consider a failed upload completed. --- uploader/src/freenet/library/uploader/FcpArchiver.java | 1 - 1 file changed, 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index f907e4fd..d45fbb12 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -222,7 +222,6 @@ public void receivedSimpleProgress(FcpConnection c, if (sp.getFailed() > 0 || sp.getFatallyFailed() > 0) { System.out.println(token + "failed - aborted."); - markDone(); } progressCompleted = sp.getSucceeded(); progressTotal = sp.getTotal(); From 6a669e6da1421484bddcfa6a94ca285049b9454e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 10 Mar 2018 16:27:03 +0100 Subject: [PATCH 149/180] Renamed the old DownloadAll to a better name. --- uploader/src/freenet/library/uploader/DownloadAll.java | 4 ++-- .../{DownloadAllPerpetually.java => FetchAllOnce.java} | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) rename uploader/src/freenet/library/uploader/{DownloadAllPerpetually.java => FetchAllOnce.java} (99%) diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 8a14e23c..0108b431 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -22,14 +22,14 @@ public class DownloadAll { public static void main(String[] argv) { if (argv.length > 1 && argv[0].equals("--move")) { try { - new DownloadAllPerpetually(new FreenetURI(argv[1])).doMove(); + new FetchAllOnce(new FreenetURI(argv[1])).doMove(); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(2); } } else { try { - new DownloadAllPerpetually(new FreenetURI(argv[0])).doDownload(); + new FetchAllOnce(new FreenetURI(argv[0])).doDownload(); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(2); diff --git a/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java b/uploader/src/freenet/library/uploader/FetchAllOnce.java similarity index 99% rename from uploader/src/freenet/library/uploader/DownloadAllPerpetually.java rename to uploader/src/freenet/library/uploader/FetchAllOnce.java index 29d7f1c1..9715ffdc 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllPerpetually.java +++ b/uploader/src/freenet/library/uploader/FetchAllOnce.java @@ -62,12 +62,12 @@ /** * Class to download the entire index. */ -class DownloadAllPerpetually extends AdHocDataReader { +class FetchAllOnce extends AdHocDataReader { private static final int PARALLEL_JOBS = 10; private static final int PARALLEL_UPLOADS = 3; /** Logger. */ - private static final Logger logger = Logger.getLogger(DownloadAllPerpetually.class.getName()); + private static final Logger logger = Logger.getLogger(FetchAllOnce.class.getName()); public final Map stillRunning = new HashMap(); private FreenetURI uri; @@ -101,7 +101,7 @@ class DownloadAllPerpetually extends AdHocDataReader { private Random rand = new Random(); private Date started = new Date(); - public DownloadAllPerpetually(FreenetURI u) { + public FetchAllOnce(FreenetURI u) { uri = u; } From e89b388bf45d3958264b9de23f766268fa78aa4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 30 Dec 2017 09:50:59 +0100 Subject: [PATCH 150/180] Many fixes to the DownloadAllOnce. Make fetching the priority over refetches. Fixed upload bug. Separated the FCP-using processing from the rest. Increased the intensity for non-FCP operations. Also increased the max amount of uploads in parallel. Don't do deletes when there are still fetches to do. Also delete a little quicker. 12 files per minute. Saving uploads. Fixed logging. Also fixed formatting. Prioritize refetches of unfetchable over already fetched if not recently failed. Corrected deferring refetches. Fixed the upload. Also added loops to debug the take down of the executors. Simplified the takedown. This relies on the Executor built-in mechanism. Parse at full speed. Fetch also to upload and save regularly. Log after operations to include result. Correction in the logging. Formulations in log messages. Allow a pointer to the directory with all pages. --- .../library/uploader/DownloadAllOnce.java | 1496 ++++++++++------- 1 file changed, 861 insertions(+), 635 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index f3bc807e..2d845075 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -13,30 +13,21 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.net.MalformedURLException; import java.nio.file.Files; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.StandardCopyOption; -import java.nio.file.Paths; -import java.text.MessageFormat; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.Formatter; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; import java.util.Random; import java.util.Set; -import java.util.WeakHashMap; -import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; @@ -45,6 +36,7 @@ import java.util.logging.Level; import java.util.logging.Logger; +import freenet.library.io.FreenetURI; import net.pterodactylus.fcp.AllData; import net.pterodactylus.fcp.ClientGet; import net.pterodactylus.fcp.ClientPut; @@ -58,10 +50,6 @@ import net.pterodactylus.fcp.SubscribedUSKUpdate; import net.pterodactylus.fcp.URIGenerated; import net.pterodactylus.fcp.Verbosity; -import freenet.library.io.FreenetURI; -import freenet.library.io.YamlReaderWriter; -import freenet.library.io.serial.Packer; -import freenet.library.io.serial.Packer.BinInfo; /** * Class to download the entire index. @@ -69,680 +57,918 @@ * When a newer USK is seen, stop the processing and exit. */ class DownloadAllOnce { - private static final int PARALLEL_JOBS = 10; - private static final int PARALLEL_UPLOADS = 3; - - /** Logger. */ - private static final Logger logger = Logger.getLogger(DownloadAllOnce.class.getName()); + /** Logger. */ + private static final Logger logger = Logger.getLogger(DownloadAllOnce.class.getName()); + + private ScheduledExecutorService FCPexecutors; + private ScheduledExecutorService otherExecutors; + private FcpConnection connection; + private boolean closingDown = false; + private File directory; + private File morePagesDirectory; + private Set allFiles = new HashSet(); + private Uploads oldUploads = new Uploads(); + private int getterCounter = 0; + private int uploadCounter = 0; + + private AdHocDataReader reader = new AdHocDataReader(); + + private static final long OPERATION_GIVE_UP_TIME = TimeUnit.HOURS.toMillis(2); + + class RotatingQueue extends LinkedBlockingQueue { + public RotatingQueue(Random r) { + random = r; + } - private ScheduledExecutorService executors; - private FcpConnection connection; - private File directory; - private Set allFiles = new HashSet(); - private int getterCounter = 0; - private int uploadCounter = 0; + @Override + public E poll() { + int toRotate = 0; + int s = size(); + if (s > 0) { + toRotate = random.nextInt(s); + } + while (true) { + E taken; + + taken = super.poll(); + if (taken == null) { + return null; + } + if (--toRotate > 0) { + offer(taken); // Ignoring impossible false status. + continue; + } + return taken; + } + } - private AdHocDataReader reader = new AdHocDataReader(); + @Override + public E poll(long l, TimeUnit u) { + throw new IllegalStateException("Not implemented"); + } - class RotatingQueue extends LinkedBlockingQueue { - public RotatingQueue(Random r) { - random = r; + @Override + public E take() throws InterruptedException { + E taken = poll(); + if (taken == null) { + return super.take(); + } + return taken; + } } - @Override - public E poll() { - int toRotate = 0; - int s = size(); - if (s > 0) { - toRotate = random.nextInt(s); - } - while (true) { - E taken; - - taken = super.poll(); - if (taken == null) { - return null; - } - if (--toRotate > 0) { - offer(taken); // Ignoring impossible false status. - continue; - } - return taken; - } - } - @Override - public E poll(long l, TimeUnit u) { - throw new IllegalStateException("Not implemented"); - } - - @Override - public E take() throws InterruptedException { - E taken = poll(); - if (taken == null) { - return super.take(); - } - return taken; - } - } - - - /** - * A class to keep track of the Pages we work with. - */ - private class Page { - // private Page parent; - - private FreenetURI uri; - private int level = 0; - - Page(FreenetURI u, Page p) { - // parent = p, - uri = u; - if (p != null) { - level = p.level + 1; - } - } - - FreenetURI getURI() { - return uri; - } - - int getLevel() { - return level; - } + /** + * A class to keep track of the Pages we work with. + */ + private class Page { + // private Page parent; - File getFile() { - return new File(directory, getURI().toString().replace("/", "__")); + private FreenetURI uri; + private int level = 0; + Date nextFetchAttempt = new Date(); + StringBuffer logAttempts; + private long timeToNextFetchAttempt; + + Page(FreenetURI u, Page p) { + // parent = p, + uri = u; + if (p != null) { + level = p.level + 1; + } + fetchTimerReset(); + } + + FreenetURI getURI() { + return uri; + } + + int getLevel() { + return level; + } + + File getFile() { + return new File(directory, getURI().toString().replace("/", "__")); + } + + private void calculateNextFetchAttempt() { + nextFetchAttempt = new Date(new Date().getTime() + timeToNextFetchAttempt); + } + + void fetchFailed() { + timeToNextFetchAttempt += 1 + 2 * random.nextInt(Long.valueOf(timeToNextFetchAttempt).intValue()) - timeToNextFetchAttempt / 2; + calculateNextFetchAttempt(); + logAttempts.append("Failed at ").append(new Date()).append(" and deferred to ").append(nextFetchAttempt).append("\n"); + } + + boolean fetchAvailable() { + return new Date().after(nextFetchAttempt); + } + + void fetchTimerReset() { + timeToNextFetchAttempt = TimeUnit.HOURS.toMillis(4); + logAttempts = new StringBuffer(); + logAttempts.append("Deferred to ").append(new Date()).append("\n"); + calculateNextFetchAttempt(); + } } - } - - private static class NotImplementedYet - extends UnsupportedOperationException { - } - - private Random random = new Random(); - private RotatingQueue toFetch = new RotatingQueue(random); - private RotatingQueue toUploadUnfetchable = new RotatingQueue(random); - private RotatingQueue toParse = new RotatingQueue(random); - private RotatingQueue toRefetchUnfetchable = new RotatingQueue(random); - private RotatingQueue toRefetch = new RotatingQueue(random); - - private int counterFetch = 0; - private int counterUploadUnfetchable = 0; - private int counterParse = 0; - private int counterRefetchUnfetchable = 0; - private int counterRefetch = 0; - - - public synchronized final void printStatistics() { - logger.info("Statistics"); - printStatisticsLine("toFetch", counterFetch, toFetch); - printStatisticsLine("toUploadUnfetchable", counterUploadUnfetchable, toUploadUnfetchable); - printStatisticsLine("toParse", counterParse, toParse); - printStatisticsLine("toRefetchUnfetchable", counterRefetchUnfetchable, toRefetchUnfetchable); - printStatisticsLine("toRefetch", counterRefetch, toRefetch); - if (allFiles.size() > 0) { - System.out.println("To remove: " + allFiles.size()); + + class AvoidRecentFetchesQueue extends RotatingQueue { + AvoidRecentFetchesQueue(Random r) { + super(r); + } + + public Page pollNotDeferred() { + int maxLaps = size(); + if (maxLaps > 20) { + maxLaps = 10; + } + do { + Page page = poll(); + if (page == null) { + return page; + } + if (page.fetchAvailable()) { + return page; + } + logger.finest("Skipped page deferred until " + page.nextFetchAttempt); + offer(page); // Ignored impossible false status + } while (maxLaps-- > 0); + logger.finest("Did not find any not deferred page"); + return null; + } } - } - - private static String STATISTICS_FORMAT = "%-21s%7d%6d%5d%5d%6d%6d%5d%5d"; - public final void printStatisticsLine(String r, int counter, RotatingQueue rqp) { - if (rqp.size() > 0 || counter > 0) { - int arr[] = new int[12]; - for (Page p : rqp) { - arr[p.level]++; - } - System.out.println(new Formatter().format(STATISTICS_FORMAT, r, - counter, - rqp.size(), - arr[0], - arr[1], - arr[2], - arr[3], - arr[4], - arr[5])); + + + + private Random random = new Random(); + private RotatingQueue toParse = new RotatingQueue(random); + private RotatingQueue toFetch = new RotatingQueue(random); + private AvoidRecentFetchesQueue toRefetchUnfetchable = new AvoidRecentFetchesQueue(random); + private RotatingQueue toRefetch = new RotatingQueue(random); + private AvoidRecentFetchesQueue toUploadUnfetchable = new AvoidRecentFetchesQueue(random); + + private int counterParse = 0; + private int counterFetch = 0; + private int counterRefetchUnfetchable = 0; + private int counterRefetch = 0; + private int counterUploadUnfetchable = 0; + private int counterRefetchUpload = 0; + + + private static String STATISTICS_FORMAT_PREFIX = "%-21s%7d"; + + public synchronized final void logStatistics() { + StringBuffer sb = new StringBuffer(); + sb.append(statisticsLine("toParse", counterParse, toParse)); + sb.append(statisticsLine("toFetch", counterFetch, toFetch)); + sb.append(statisticsLine("toRefetchUnfetchable", counterRefetchUnfetchable, toRefetchUnfetchable)); + if (counterRefetchUpload > 0) { + sb.append(new Formatter().format(STATISTICS_FORMAT_PREFIX, + "RefetchUpload", counterRefetchUpload)).append("\n"); + } + sb.append(statisticsLine("toRefetch", counterRefetch, toRefetch)); + sb.append(statisticsLine("toUploadUnfetchable", counterUploadUnfetchable, toUploadUnfetchable)); + if (allFiles.size() > 0) { + sb.append("Files left to remove: " + allFiles.size() + "\n"); + } + if (oldUploads.size() > 0) { + sb.append("Uploads from previous run: " + oldUploads.size() + "\n"); + } + logger.info("Statistics:\n" + sb.toString() + "End Statistics."); } - } - private boolean fetch(final Page page) { - int counter; - synchronized (this) { - counter = ++getterCounter; + private static String STATISTICS_FORMAT = STATISTICS_FORMAT_PREFIX + "%6d%5d%5d%6d%6d%5d%5d\n"; + + public final String statisticsLine(String r, int counter, RotatingQueue rqp) { + if (rqp.size() > 0 || counter > 0) { + int arr[] = new int[12]; + for (Page p : rqp) { + arr[p.level]++; + } + return new Formatter() + .format(STATISTICS_FORMAT, r, counter, rqp.size(), arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]) + .toString(); + } + return ""; } - final String token = "Getter" + counter; - final ClientGet getter = new ClientGet(page.getURI().toString(), token); - getter.setPriority(Priority.prefetch); - getter.setVerbosity(Verbosity.NONE); - final boolean[] results = new boolean[1]; - results[0] = false; - FcpAdapter listener = new FcpAdapter() { - @Override - public void receivedAllData(FcpConnection c, AllData ad) { - assert c == connection; - assert ad != null; - if (!token.equals(ad.getIdentifier())) { - return; - } - logger.entering(DownloadAllOnce.class.toString(), - "receivedAllData", - "receivedAllData for " + token); - try { - Files.copy(ad.getPayloadInputStream(), - page.getFile().toPath(), - StandardCopyOption.REPLACE_EXISTING); - } catch (IOException ioe) { - page.getFile().delete(); - synchronized (getter) { - getter.notify(); + + private boolean fetch(final Page page) { + int counter; + synchronized (this) { + counter = ++getterCounter; + } + final String token = "Getter" + counter; + final ClientGet getter = new ClientGet(page.getURI().toString(), token); + getter.setPriority(Priority.prefetch); + getter.setVerbosity(Verbosity.NONE); + final boolean[] results = new boolean[1]; + results[0] = false; + FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedAllData(FcpConnection c, AllData ad) { + assert c == connection; + assert ad != null; + if (!token.equals(ad.getIdentifier())) { + return; + } + logger.entering(DownloadAllOnce.class.toString(), "receivedAllData", "receivedAllData for " + token); + try { + Files.copy(ad.getPayloadInputStream(), page.getFile().toPath(), + StandardCopyOption.REPLACE_EXISTING); + } catch (IOException ioe) { + page.getFile().delete(); + synchronized (getter) { + getter.notify(); + } + return; + } + results[0] = true; + synchronized (getter) { + getter.notify(); + } } - return; - } - results[0] = true; - synchronized (getter) { - getter.notify(); - } + + @Override + public void receivedGetFailed(FcpConnection c, GetFailed gf) { + assert c == connection; + assert gf != null; + if (!token.equals(gf.getIdentifier())) { + return; + } + synchronized (getter) { + getter.notify(); + } + logger.fine("receivedGetFailed for " + token + " (" + page.getURI() + ")."); + } + + @Override + public void receivedSimpleProgress(FcpConnection c, net.pterodactylus.fcp.SimpleProgress sp) { + assert c == connection; + assert sp != null; + if (!token.equals(sp.getIdentifier())) { + return; + } + logger.finest("Progress for " + token + " (" + sp.getSucceeded() + "/" + sp.getRequired() + "/" + + sp.getTotal() + ")."); + } + }; + connection.addFcpListener(listener); + try { + connection.sendMessage(getter); + } catch (IOException e) { + logger.log(Level.SEVERE, "Exception", e); + return false; } + synchronized (getter) { + try { + getter.wait(OPERATION_GIVE_UP_TIME); + } catch (InterruptedException e) { + if (!closingDown) { + logger.log(Level.SEVERE, "Exception", e); + } + return false; + } + } + connection.removeFcpListener(listener); - @Override - public void receivedGetFailed(FcpConnection c, GetFailed gf) { - assert c == connection; - assert gf != null; - if (!token.equals(gf.getIdentifier())) { - return; - } - synchronized (getter) { - getter.notify(); - } - logger.warning("receivedGetFailed for " + token + " (" + page.getURI() + ")."); + boolean result = results[0]; + if (result) { + page.fetchTimerReset(); + } else { + page.fetchFailed(); } - @Override - public void receivedSimpleProgress(FcpConnection c, - net.pterodactylus.fcp.SimpleProgress sp) { - assert c == connection; - assert sp != null; - if (!token.equals(sp.getIdentifier())) { - return; - } - logger.finest("Progress for " + token + " (" + sp.getSucceeded() + "/" + sp.getRequired() + "/" + sp.getTotal() + ")."); - } - }; - connection.addFcpListener(listener); - try { - connection.sendMessage(getter); - } catch (IOException e) { - e.printStackTrace(); - return false; - } - synchronized (getter) { - try { - getter.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - return false; - } + return result; } - connection.removeFcpListener(listener); - - return results[0]; - } - - private void parse(final Page page) { - try { - reader.readAndProcessYamlData(new FileInputStream(page.getFile()), - new AdHocDataReader.UriProcessor() { - @Override - public FreenetURI getURI() { - return page.getURI(); - } - - @Override - public int getLevel() { - return page.getLevel(); - } - - Set seen = new HashSet(); - @Override - public boolean processUri(FreenetURI uri) { - if (seen.contains(uri)) { - return false; - } - seen.add(uri); - handleNew(new Page(uri, page)); - return true; - } - - @Override - public void uriSeen() {} - - @Override - public void stringSeen() {} - - @Override - public void childrenSeen(int level, int foundChildren) {} - - - }, - page.getLevel()); - } catch (IOException ioe) { - page.getFile().delete(); - } - } - - private boolean upload(final Page page) { - final boolean[] successfuls = new boolean[1]; - successfuls[0] = false; - int counter; - synchronized (this) { - counter = ++uploadCounter; + + private void parse(final Page page) { + try { + reader.readAndProcessYamlData(new FileInputStream(page.getFile()), new AdHocDataReader.UriProcessor() { + @Override + public FreenetURI getURI() { + return page.getURI(); + } + + @Override + public int getLevel() { + return page.getLevel(); + } + + Set seen = new HashSet(); + + @Override + public boolean processUri(FreenetURI uri) { + if (seen.contains(uri)) { + return false; + } + seen.add(uri); + handleNew(new Page(uri, page)); + return true; + } + + @Override + public void uriSeen() { + } + + @Override + public void stringSeen() { + } + + @Override + public void childrenSeen(int level, int foundChildren) { + } + + + }, page.getLevel()); + } catch (IOException ioe) { + page.getFile().delete(); + } } - final String identifier = "Upload" + counter; - final ClientPut putter = new ClientPut("CHK@", identifier); - putter.setEarlyEncode(true); - putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); - putter.setVerbosity(Verbosity.NONE); - final long dataLength = page.getFile().length(); - putter.setDataLength(dataLength); - - final FcpAdapter listener = new FcpAdapter() { - @Override - public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { - assert c == connection; - assert uriGenerated != null; - String identifier = uriGenerated.getIdentifier(); - FreenetURI chk = page.getURI(); - FreenetURI generatedURI; - try { - generatedURI = new FreenetURI(uriGenerated.getURI()); - } catch (MalformedURLException e) { - logger.severe("Were supposed to resurrect " + chk + - " but the URI calculated to " + uriGenerated.getURI() + - " that is not possible to convert to an URI. Will upload anyway."); - return; - } - if (!generatedURI.equals(chk)) { - logger.severe("Were supposed to resurrect " + chk + - " but the URI calculated to " + uriGenerated.getURI() + ". " + - "Will upload anyway."); - } else { - logger.finest("Resurrecting " + chk); - } + + private boolean upload(final Page page) { + final boolean[] successfuls = new boolean[1]; + successfuls[0] = false; + int counter; + synchronized (this) { + counter = ++uploadCounter; } + final String identifier = "Upload" + counter; + final ClientPut putter = new ClientPut("CHK@", identifier); + putter.setEarlyEncode(true); + putter.setPriority(net.pterodactylus.fcp.Priority.bulkSplitfile); + putter.setVerbosity(Verbosity.NONE); + final long dataLength = page.getFile().length(); + putter.setDataLength(dataLength); + + final FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { + assert c == connection; + assert uriGenerated != null; + if (!identifier.equals(uriGenerated.getIdentifier())) { + return; + } + FreenetURI chk = page.getURI(); + FreenetURI generatedURI; + try { + generatedURI = new FreenetURI(uriGenerated.getURI()); + } catch (MalformedURLException e) { + logger.severe( + "Were supposed to resurrect " + chk + " but the URI calculated to " + uriGenerated.getURI() + + " that is not possible to convert to an URI. Will upload anyway."); + return; + } + if (!generatedURI.equals(chk)) { + logger.severe("Were supposed to resurrect " + chk + " but the URI calculated to " + + uriGenerated.getURI() + ". " + "Will upload anyway."); + } else { + logger.finest("Resurrecting " + chk); + } + } - @Override - public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { - assert c == connection; - assert putSuccessful != null; - String identifier = putSuccessful.getIdentifier(); - FreenetURI chk = page.getURI(); - FreenetURI generatedURI = null; - try { - try { - generatedURI = new FreenetURI(putSuccessful.getURI()); - } catch (MalformedURLException e) { - logger.severe("Uploaded " + putSuccessful.getURI() + - " that is not possible to convert to an URI."); - return; - } - if (!generatedURI.equals(chk)) { - logger.severe("Uploaded " + putSuccessful.getURI() + - " while supposed to upload " + chk + - ". "); - return; - } - logger.finest("Resurrected " + chk); - successfuls[0] = true; - } finally { + @Override + public void receivedPutSuccessful(FcpConnection c, PutSuccessful putSuccessful) { + assert c == connection; + assert putSuccessful != null; + if (!identifier.equals(putSuccessful.getIdentifier())) { + return; + } + FreenetURI chk = page.getURI(); + FreenetURI generatedURI = null; + try { + try { + generatedURI = new FreenetURI(putSuccessful.getURI()); + } catch (MalformedURLException e) { + logger.severe("Uploaded " + putSuccessful.getURI() + " that is not possible to convert to an URI."); + return; + } + if (!generatedURI.equals(chk)) { + logger.severe("Uploaded " + putSuccessful.getURI() + " while supposed to upload " + chk + ". "); + return; + } + logger.finer("Uploaded " + chk); + successfuls[0] = true; + } finally { + synchronized (putter) { + putter.notify(); + } + } + } + + @Override + public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { + assert c == connection; + assert putFailed != null; + if (!identifier.equals(putFailed.getIdentifier())) { + return; + } + FreenetURI chk = page.getURI(); + logger.severe("Uploaded " + chk + " failed."); + synchronized (putter) { + putter.notify(); + } + } + }; + connection.addFcpListener(listener); + FileInputStream in; + try { + in = new FileInputStream(page.getFile()); + putter.setPayloadInputStream(in); + connection.sendMessage(putter); synchronized (putter) { - putter.notify(); + putter.wait(OPERATION_GIVE_UP_TIME); } - } + in.close(); + in = null; + } catch (IOException | NullPointerException e) { + logger.log(Level.WARNING, "Upload failed for " + page.getFile(), e); + } catch (InterruptedException e) { + if (!closingDown) { + logger.log(Level.WARNING, "Upload interrupted for " + page.getFile(), e); + } + return false; + } finally { + connection.removeFcpListener(listener); } + return successfuls[0]; + } - @Override - public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { - assert c == connection; - assert putFailed != null; - String identifier = putFailed.getIdentifier(); - FreenetURI chk = page.getURI(); - logger.severe("Uploaded " + chk + " failed."); - synchronized (putter) { - putter.notify(); - } + private boolean doRefetchUnfetchable(Page page) { + counterRefetchUnfetchable++; + boolean result = fetch(page); + if (result) { + add(toParse, page); + } else { + add(toRefetchUnfetchable, page); } - }; - connection.addFcpListener(listener); - FileInputStream in; - try { - in = new FileInputStream(page.getFile()); - putter.setPayloadInputStream(in); - connection.sendMessage(putter); - synchronized (putter) { - putter.wait(); - } - in.close(); - in = null; - } catch (IOException | NullPointerException e) { - e.printStackTrace(); - logger.warning("Upload failed for " + page.getFile()); - } catch (InterruptedException e) { - e.printStackTrace(); - logger.warning("Upload interrupted for " + page.getFile()); - } finally { - connection.removeFcpListener(listener); - } - return successfuls[0]; - } - - private void doRefetchUnfetchable(Page page) { - if (fetch(page)) { - add(toParse, page); - } else { - add(toRefetchUnfetchable, page); + return result; } - counterRefetchUnfetchable++; - } - - private void doRefetch(Page page) { - if (fetch(page)) { - add(toRefetch, page); - } else { - handleUnfetchable(page); + + private boolean doRefetchToUpload(Page page) { + counterRefetchUpload++; + boolean result = fetch(page); + if (result) { + add(toRefetch, page); + } else { + add(toUploadUnfetchable, page); + } + return result; } - counterRefetch++; - } - - private void handleNew(Page page) { - if (page.getFile().exists()) { - page.getFile().setLastModified(System.currentTimeMillis()); - allFiles.remove(page.getFile()); - add(toParse, page); - } else { - add(toFetch, page); + + private boolean doRefetch(Page page) { + counterRefetch++; + boolean result = fetch(page); + if (result) { + add(toRefetch, page); + } else { + handleUnfetchable(page); + } + return result; } - } - private void doFetch(Page page) { - if (fetch(page)) { - add(toParse, page); - } else { - handleUnfetchable(page); + private void handleNew(Page page) { + if (page.getFile().exists()) { + page.getFile().setLastModified(System.currentTimeMillis()); + allFiles.remove(page.getFile()); + add(toParse, page); + } else { + add(toFetch, page); + } } - counterFetch++; - } - - private void doParse(Page page) { - parse(page); - add(toRefetch, page); - counterParse++; - } - - private void handleUnfetchable(Page page) { - if (page.getFile().exists()) { - add(toUploadUnfetchable, page); - } else { - add(toRefetchUnfetchable, page); + + private boolean doFetch(Page page) { + counterFetch++; + boolean result = fetch(page); + if (result) { + add(toParse, page); + } else { + handleUnfetchable(page); + } + return result; } - } - private void doUploadUnfetchable(Page page) { - if (upload(page)) { - add(toRefetch, page); - } else { - add(toRefetchUnfetchable, page); + private void doParse(Page page) { + counterParse++; + parse(page); + if (oldUploads.check(page.getURI())) { + add(toUploadUnfetchable, page); + } else { + add(toRefetch, page); + } } - counterUploadUnfetchable++; - } + private void handleUnfetchable(Page page) { + if (page.getFile().exists()) { + add(toUploadUnfetchable, page); + } else { + add(toRefetchUnfetchable, page); + } + } - private void add(RotatingQueue whereto, Page p) { - whereto.offer(p); - } + private boolean doUploadUnfetchable(Page page) { + counterUploadUnfetchable++; + boolean result = upload(page); + add(toRefetch, page); + return result; + } - private class CleanupOldFiles implements Runnable { - ScheduledFuture handle = null; - public ScheduledFuture setHandle(ScheduledFuture h) { - handle = h; - return h; + private void add(RotatingQueue whereto, Page p) { + whereto.offer(p); } - public void run() { - if (toParse.size() > 0) { - // Don't delete anything if the parsing is not completed. - return; - } - if (allFiles.size() == 0) { - if (handle != null) { - handle.cancel(true); - handle = null; - } - return; - } - // Find the oldest one. - long oldestAge = Long.MAX_VALUE; - File oldestFile = null; - for (File f : allFiles) { - if (f.lastModified() < oldestAge) { - oldestAge = f.lastModified(); - oldestFile = f; - } - } - allFiles.remove(oldestFile); - System.out.println("Removing file " + oldestFile); - oldestFile.delete(); - } - } - - private abstract class ProcessSomething implements Runnable { - protected abstract void process(); - - public void run() { - try { - process(); - } catch (Exception e) { - System.out.println("Class " + this + " threw exception: " + e); - e.printStackTrace(); - } + private class CleanupOldFiles implements Runnable { + ScheduledFuture handle = null; + + public ScheduledFuture setHandle(ScheduledFuture h) { + handle = h; + return h; + } + + public void run() { + if (toParse.size() > 0) { + // Don't delete anything if the parsing is not completed. + return; + } + if (toFetch.size() > 0) { + // Don't delete anything if the fetching is not completed. + return; + } + if (allFiles.size() == 0) { + if (handle != null) { + handle.cancel(true); + handle = null; + } + return; + } + // Find the oldest one. + long oldestAge = Long.MAX_VALUE; + File oldestFile = null; + for (File f : allFiles) { + if (!f.exists()) { + allFiles.remove(f); + try { + oldUploads.check(new FreenetURI(f.getName())); + } catch (MalformedURLException e) { + logger.log(Level.WARNING, "File " + f + " was deleted", e); + } + return; + } + if (f.lastModified() < oldestAge) { + oldestAge = f.lastModified(); + oldestFile = f; + } + } + allFiles.remove(oldestFile); + try { + oldUploads.check(new FreenetURI(oldestFile.getName())); + } catch (MalformedURLException e) { + logger.log(Level.WARNING, "Deleting file " + oldestFile, e); + } + logger.fine("Removing file " + oldestFile); + oldestFile.delete(); + } } - } - - private class ProcessParse extends ProcessSomething { - protected void process() { - Page page = toParse.poll(); - if (page != null) { - doParse(page); - } + + /** + * Class to keep track of uploads from the previous run. + */ + private class Uploads { + private Set fromPreviousRun = new HashSet(); + private final static String OLD_UPLOADS_FILENAME = "old_uploads.saved"; + + void load() { + File file = new File(directory, OLD_UPLOADS_FILENAME); + if (file.exists()) { + logger.finest("Reading file " + file); + try { + FileInputStream f = new FileInputStream(file); + ObjectInputStream ois = new ObjectInputStream(f); + fromPreviousRun = (Set) ois.readObject(); + ois.close(); + } catch (IOException e) { + logger.warning("Could not read the file " + file); + } catch (ClassCastException | ClassNotFoundException e) { + logger.warning("File " + file + " contains strange object"); + } finally { + file.delete(); + } + } else { + logger.finest("No file " + file); + } + } + + private void rotate() { + String OLD_FILENAME = OLD_UPLOADS_FILENAME + ".old"; + File oldfile = new File(directory, OLD_FILENAME); + if (oldfile.exists()) { + oldfile.delete(); + } + File file = new File(directory, OLD_UPLOADS_FILENAME); + if (file.exists()) { + file.renameTo(oldfile); + } + } + + synchronized void save() { + rotate(); + File file = new File(directory, OLD_UPLOADS_FILENAME); + Set set = new HashSet(); + for (Page p : toUploadUnfetchable) { + set.add(p.getURI()); + } + if (set.size() > 0) { + logger.finest("Writing file " + file); + try { + FileOutputStream f = new FileOutputStream(file); + ObjectOutputStream oos = new ObjectOutputStream(f); + oos.writeObject(set); + oos.close(); + f.close(); + } catch (IOException e) { + logger.log(Level.WARNING, "Problem writing file " + file, e); + file.delete(); + } + } else { + logger.finest("Nothing to write to file " + file); + } + } + + synchronized int size() { + return fromPreviousRun.size(); + } + + synchronized boolean check(FreenetURI uri) { + boolean retval = fromPreviousRun.contains(uri); + fromPreviousRun.remove(uri); + return retval; + } } - } - - private class ProcessUploadUnfetchable extends ProcessSomething { - protected void process() { - Page page = toUploadUnfetchable.poll(); - if (page != null) { - doUploadUnfetchable(page); - return; - } + + private abstract class ProcessSomething implements Runnable { + protected abstract void process(); + + public void run() { + try { + process(); + } catch (Exception e) { + logger.log(Level.SEVERE, "Class " + this + " threw exception: " + e, e); + } + } } - } - - /** - * This is the bulk of all fetches. - * - * Mostly Fetch, if any, but sometimes one of the refetches. - */ - private class ProcessFetches extends ProcessSomething { - protected void process() { - int refetchable = toRefetch.size() + toRefetchUnfetchable.size(); - if (random.nextInt(1 + refetchable) < 1000 + toFetch.size() * toFetch.size() / 100) { - Page page = toFetch.poll(); - if (page != null) { - logger.finest("Fetch Fetch"); - doFetch(page); - return; - } - } - - if (random.nextInt(1 + refetchable) < toRefetchUnfetchable.size()) { - Page page = toRefetchUnfetchable.poll(); - if (page != null) { - logger.finest("Fetch RefetchUnfetchable"); - doRefetchUnfetchable(page); - return; - } - } - - Page page = toRefetch.poll(); - if (page != null) { - logger.finest("Fetch Refetch"); - doRefetch(page); - return; - } + + private class ProcessParse extends ProcessSomething { + protected void process() { + Page page = toParse.poll(); + if (page != null) { + doParse(page); + otherExecutors.schedule(this, 0, TimeUnit.SECONDS); + } else { + otherExecutors.schedule(this, 10, TimeUnit.SECONDS); + } + } } - } - - private class ProcessRefetchUnfetchable extends ProcessSomething { - protected void process() { - Page page = toRefetchUnfetchable.poll(); - if (page != null) { - doRefetchUnfetchable(page); - return; - } + + private class ProcessUploadUnfetchable extends ProcessSomething { + protected void process() { + if (morePagesDirectory != null) { + Page page = toRefetchUnfetchable.poll(); + if (page != null) { + File fromFile = new File(morePagesDirectory, page.getFile().getName()); + try { + Files.copy(fromFile.toPath(), page.getFile().toPath()); + boolean result = doUploadUnfetchable(page); + logger.finer("Uploaded Unfetchable" + (result ? "" : "failed") + "."); + return; + } catch (UnsupportedOperationException uoe) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", uoe); + toRefetchUnfetchable.offer(page); + } catch (FileAlreadyExistsException faee) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", faee); + toUploadUnfetchable.offer(page); + } catch (IOException ioe) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", ioe); + if (page.getFile().exists()) { + page.getFile().delete(); + logger.info("Deleted partial copy " + page.getFile()); + } + toRefetchUnfetchable.offer(page); + } catch (SecurityException se) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", se); + toRefetchUnfetchable.offer(page); + } + } + } + + Page page = toUploadUnfetchable.poll(); + if (page != null) { + boolean result = doUploadUnfetchable(page); + logger.finer("Uploaded Unfetchable" + (result ? "" : "failed") + "."); + return; + } + } } - } - - private class ProcessRefetch extends ProcessSomething { - protected void process() { - Page page = toRefetch.poll(); - if (page != null) { - doRefetch(page); - return; - } + + /** + * This is the bulk of all fetches. + * + * Mostly Fetch, if any, but sometimes one of the refetches. + */ + private class ProcessFetches extends ProcessSomething { + protected void process() { + Page page = toFetch.poll(); + if (page != null) { + boolean result = doFetch(page); + logger.finest("Fetched Fetch" + (result ? "" : " failed") + "."); + return; + } + + page = toRefetchUnfetchable.pollNotDeferred(); + if (page != null) { + String log = page.logAttempts.toString(); + boolean result = doRefetchUnfetchable(page); + logger.finer(log + "Fetched RefetchUnfetchable" + (result ? "" : " failed") + "."); + return; + } + + page = toUploadUnfetchable.pollNotDeferred(); + if (page != null) { + String log = page.logAttempts.toString(); + boolean result = doRefetchToUpload(page); + logger.finer(log + "Fetched ToUpload" + (result ? "" : " failed") + "."); + return; + } + + page = toRefetch.poll(); + if (page != null) { + boolean result = doRefetch(page); + logger.finer("Fetched Refetch" + (result ? "" : " failed") + "."); + return; + } + } } - } - - private void run(FreenetURI u) { - executors = Executors.newScheduledThreadPool(10); - Set> futures = new HashSet>(); - directory = new File("library-download-all-once-db"); - if (directory.exists()) { - allFiles.addAll(Arrays.asList(directory.listFiles())); - CleanupOldFiles cleanUp = new CleanupOldFiles(); - futures.add(cleanUp.setHandle(executors.scheduleWithFixedDelay(cleanUp, 500, 1, TimeUnit.MINUTES))); - } else { - directory.mkdir(); + + private class ProcessRefetchUnfetchable extends ProcessSomething { + protected void process() { + Page page = toRefetchUnfetchable.poll(); + if (page != null) { + doRefetchUnfetchable(page); + return; + } + } } - futures.add(executors.scheduleWithFixedDelay(new Runnable() { - public void run() { - printStatistics(); + + private class ProcessRefetch extends ProcessSomething { + protected void process() { + Page page = toRefetch.poll(); + if (page != null) { + doRefetch(page); + return; + } } - }, 10, 30, TimeUnit.SECONDS)); - for (int i = 0; i < 9; i++) { - futures.add(executors.scheduleWithFixedDelay(new ProcessFetches(), 20 + i, 4, TimeUnit.SECONDS)); } - futures.add(executors.scheduleWithFixedDelay(new ProcessRefetchUnfetchable(), 240, 1, TimeUnit.MINUTES)); - futures.add(executors.scheduleWithFixedDelay(new ProcessRefetch(), 500, 33, TimeUnit.SECONDS)); - for (int i = 0; i < 3; i++) { - futures.add(executors.scheduleWithFixedDelay(new ProcessUploadUnfetchable(), 40 + i, 2, TimeUnit.SECONDS)); + + private void run(FreenetURI u, File morePagesDir) { + morePagesDirectory = morePagesDir; + FCPexecutors = Executors.newScheduledThreadPool(10); + otherExecutors = Executors.newScheduledThreadPool(1); + directory = new File("library-download-all-once-db"); + if (directory.exists()) { + oldUploads.load(); + allFiles.addAll(Arrays.asList(directory.listFiles())); + CleanupOldFiles cleanUp = new CleanupOldFiles(); + cleanUp.setHandle(otherExecutors.scheduleWithFixedDelay(cleanUp, 30000, 5, TimeUnit.SECONDS)); + } else { + directory.mkdir(); + } + + otherExecutors.scheduleAtFixedRate(new Runnable() { + public void run() { + logStatistics(); + } + }, 10, 30, TimeUnit.SECONDS); + for (int i = 0; i < 10; i++) { + FCPexecutors.scheduleWithFixedDelay(new ProcessFetches(), 20 + i, 4, TimeUnit.SECONDS); + } + for (int i = 0; i < 4; i++) { + FCPexecutors.scheduleWithFixedDelay(new ProcessUploadUnfetchable(), 40 + i, 1, TimeUnit.SECONDS); + } + otherExecutors.schedule(new ProcessParse(), 2000, TimeUnit.MILLISECONDS); + otherExecutors.scheduleWithFixedDelay(new Runnable() { + public void run() { + oldUploads.save(); + } + }, 100, 20, TimeUnit.MINUTES); + FcpSession session; + try { + session = new FcpSession("DownloadAllOnceFor" + u); + } catch (IllegalStateException | IOException e1) { + logger.log(Level.SEVERE, "Exception", e1); + return; + } + try { + run2(session, u); + } finally { + waitTermination(TimeUnit.SECONDS.toMillis(1)); + closingDown = true; + logger.info("Shutdown."); + FCPexecutors.shutdown(); + otherExecutors.shutdown(); + oldUploads.save(); + waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME); + logger.info("Shutdown now (after long wait)."); + FCPexecutors.shutdownNow(); + FCPexecutors = null; + otherExecutors.shutdownNow(); + otherExecutors = null; + session.close(); + session = null; + logger.info("Shutdown now completed."); + } } - futures.add(executors.scheduleWithFixedDelay(new ProcessParse(), 2, 2, TimeUnit.SECONDS)); - FcpSession session; - try { - session = new FcpSession("DownloadAllOnceFor" + u); - } catch (IllegalStateException | IOException e1) { - e1.printStackTrace(); - return; - } - try { - run2(session, u); - } finally { - waitTermination(TimeUnit.SECONDS.toMillis(1)); - logger.info("Shutdown with " + futures.size() + " processors."); - executors.shutdown(); - waitTermination(TimeUnit.SECONDS.toMillis(2000)); - for (Iterator> futureIterator = futures.iterator(); - futureIterator.hasNext(); ) { - ScheduledFuture future = futureIterator.next(); - if (future.isDone()) { - futureIterator.remove(); - } - } - logger.info("Shutdown now (after long wait) with " + futures.size() + " processors left."); - executors.shutdownNow(); - executors = null; - session.close(); - session = null; - logger.info("Shutdown now completed."); + + private void run2(FcpSession session, FreenetURI uri) { + connection = session.getConnection(); + if (connection == null) { + throw new IllegalArgumentException("No connection."); + } + final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); + subscriber.setActive(true); + final int[] editions = new int[1]; + final FreenetURI[] newUris = new FreenetURI[1]; + editions[0] = 0; + FcpAdapter listener = new FcpAdapter() { + @Override + public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, + SubscribedUSKUpdate subscribedUSKUpdate) { + assert fcpConnection == connection; + FreenetURI newUri; + try { + newUri = new FreenetURI(subscribedUSKUpdate.getURI()); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + if (subscribedUSKUpdate.isNewKnownGood() && !newUri.equals(newUris[0])) { + newUris[0] = newUri; + editions[0] = subscribedUSKUpdate.getEdition(); + synchronized (subscriber) { + subscriber.notify(); + } + } + } + }; + connection.addFcpListener(listener); + + synchronized (subscriber) { + try { + connection.sendMessage(subscriber); + subscriber.wait(); // Wait until found + handleNew(new Page(newUris[0], null)); + subscriber.wait(); // Work until next one found + logger.info("Next edition seen."); + } catch (InterruptedException e) { + throw new RuntimeException("Subscription interrupted."); + } catch (IOException e) { + throw new RuntimeException("Subscription can't write."); + } + } } - } - private void run2(FcpSession session, FreenetURI uri) { - connection = session.getConnection(); - if (connection == null) { - throw new IllegalArgumentException("No connection."); + private void waitTermination(long ms) { + try { + FCPexecutors.awaitTermination(ms, TimeUnit.MILLISECONDS); + otherExecutors.awaitTermination(1 + ms / 10, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + throw new RuntimeException("Waiting for jobs."); + } } - final SubscribeUSK subscriber = new SubscribeUSK(uri + "-1", "USK"); - subscriber.setActive(true); - final int[] editions = new int[1]; - final FreenetURI[] newUris = new FreenetURI[1]; - editions[0] = 0; - FcpAdapter listener = new FcpAdapter() { - @Override - public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { - assert fcpConnection == connection; - FreenetURI newUri; - try { - newUri = new FreenetURI(subscribedUSKUpdate.getURI()); - } catch (MalformedURLException e) { - throw new RuntimeException(e); + + public static void main(String[] argv) throws InterruptedException { + FreenetURI u; + try { + u = new FreenetURI(argv[0]); + } catch (MalformedURLException e) { + logger.log(Level.SEVERE, "Exception", e); + System.exit(2); + return; + } + + File morePagesDir = null; + if (argv.length > 1) { + morePagesDir = new File(argv[1]); + if (!morePagesDir.exists()) { + logger.severe("Directory " + morePagesDir + " does not exist."); + System.exit(2); + return; } - if (subscribedUSKUpdate.isNewKnownGood() - && !newUri.equals(newUris[0])) { - newUris[0] = newUri; - editions[0] = subscribedUSKUpdate.getEdition(); - synchronized (subscriber) { - subscriber.notify(); - } + if (!morePagesDir.isDirectory()) { + logger.severe("File " + morePagesDir + " is not a directory."); + System.exit(2); + return; } } - }; - connection.addFcpListener(listener); - - synchronized (subscriber) { - try { - connection.sendMessage(subscriber); - subscriber.wait(); // Wait until found - handleNew(new Page(newUris[0], null)); - subscriber.wait(); // Work until next one found - System.out.println("Next edition seen."); - } catch (InterruptedException e) { - throw new RuntimeException("Subscription interrupted."); - } catch (IOException e) { - throw new RuntimeException("Subscription can't write."); - } - } - } - private void waitTermination(long ms) { - try { - executors.awaitTermination(ms, TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new RuntimeException("Waiting for jobs."); - } - } - - public static void main(String[] argv) throws InterruptedException { - FreenetURI u; - try { - u = new FreenetURI(argv[0]); - } catch (MalformedURLException e) { - e.printStackTrace(); - System.exit(2); - return; + new DownloadAllOnce().run(u, morePagesDir); } - - new DownloadAllOnce().run(u); - } } From 849d1990b2892c6e8cdf7002fdb8cd3b948a28da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 13 Mar 2018 22:09:42 +0100 Subject: [PATCH 151/180] Added tests. --- .../library/util/SkeletonBTreeMapTest.java | 280 ++++++++++++++++++ .../library/util/SkeletonTreeMapTest.java | 76 +++++ 2 files changed, 356 insertions(+) create mode 100644 shared/test/freenet/library/util/SkeletonBTreeMapTest.java diff --git a/shared/test/freenet/library/util/SkeletonBTreeMapTest.java b/shared/test/freenet/library/util/SkeletonBTreeMapTest.java new file mode 100644 index 00000000..27071223 --- /dev/null +++ b/shared/test/freenet/library/util/SkeletonBTreeMapTest.java @@ -0,0 +1,280 @@ +/* This code is part of Freenet. It is distributed under the GNU General + * Public License, version 2 (or at your option any later version). See + * http://www.gnu.org/ for further details of the GPL. */ +package freenet.library.util; + +import junit.framework.TestCase; + +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.TreeSet; + +import freenet.library.io.DataFormatException; +import freenet.library.io.serial.IterableSerialiser; +import freenet.library.io.serial.MapSerialiser; +import freenet.library.io.serial.ScheduledSerialiser; +import freenet.library.io.serial.Translator; +import freenet.library.util.SkeletonBTreeMap; +import freenet.library.util.concurrent.Executors; +import freenet.library.util.concurrent.ObjectProcessor; +import freenet.library.util.exec.TaskAbortException; +import freenet.library.util.func.Tuples.X2; + +import static freenet.library.util.func.Tuples.X2; + +public class SkeletonBTreeMapTest extends TestCase { + + SkeletonBTreeMap skelmap; + String oneKey; + + private static long lastNumber = 0; + + private static synchronized Long getNextNumber() { + return new Long(++lastNumber); + } + + static class SkelMapNodeSerialiser + implements IterableSerialiser.SkeletonNode>, + ScheduledSerialiser.SkeletonNode> { + + final private Map store = Collections.synchronizedMap(new HashMap()); + SkelMapMapSerialiser mapSerialiser; + + Translator, Map> ttrans = new + SkeletonBTreeMap.TreeTranslator(null, null); + + SkeletonBTreeMap.NodeTranslator> ntrans; + + Translator, Map> tmtrans = new SkeletonTreeMap.TreeMapTranslator() { + + @Override + public Map app(SkeletonTreeMap translatee) { + return app(translatee, new TreeMap(), null); + } + + @Override + public SkeletonTreeMap rev(Map intermediate) throws DataFormatException { + return rev(intermediate, new SkeletonTreeMap(), null); + } + }; + + SkelMapNodeSerialiser(SkeletonBTreeMap skelmap, SkelMapMapSerialiser ms) { + mapSerialiser = ms; + ntrans = skelmap.makeNodeTranslator(null, tmtrans); + } + + @Override + public void pull(PullTask.SkeletonNode> task) throws TaskAbortException { + assert task != null; + assert task.meta != null; + assert task.meta instanceof SkeletonBTreeMap.GhostNode; + SkeletonBTreeMap.GhostNode gn = (SkeletonBTreeMap.GhostNode) task.meta; + assert gn.meta instanceof Long; + assert store.containsKey(gn.meta); + Map map = (Map) store.get(gn.meta); + SkeletonBTreeMap.SkeletonNode node; + try { + node = ntrans.rev(map); + } catch (DataFormatException e) { + throw new TaskAbortException("Unpacking SkeletonNode", e); + } + task.data = node; + } + + @Override + public void push(PushTask.SkeletonNode> task) throws TaskAbortException { + assert task.data.isBare(); + Map map = ntrans.app(task.data); + + Long pos = getNextNumber(); + store.put(pos, map); + task.meta = task.data.makeGhost(pos); + } + + @Override + public void pull(Iterable.SkeletonNode>> tasks) throws TaskAbortException { + throw new TaskAbortException("NIY", new Throwable()); + } + + @Override + public void push(Iterable.SkeletonNode>> tasks) throws TaskAbortException { + throw new TaskAbortException("NIY", new Throwable()); + } + + @Override + public ObjectProcessor.SkeletonNode>, E, TaskAbortException> pullSchedule( + BlockingQueue.SkeletonNode>> input, + BlockingQueue.SkeletonNode>, TaskAbortException>> output, + Map.SkeletonNode>, E> deposit) { + + return new ObjectProcessor.SkeletonNode>, E, TaskAbortException>(input, + output, deposit, null, Executors.DEFAULT_EXECUTOR, new TaskAbortExceptionConvertor()) { + @Override + protected Runnable createJobFor(final PullTask.SkeletonNode> task) { + return new Runnable() { + @Override + public void run() { + TaskAbortException ex = null; + try { + pull(task); + } catch (TaskAbortException e) { + ex = e; + } catch (RuntimeException e) { + ex = new TaskAbortException("pull failed", e); + } + postProcess.invoke(X2(task, ex)); + } + }; + } + }.autostart(); + + } + + @Override + public ObjectProcessor.SkeletonNode>, E, TaskAbortException> pushSchedule( + BlockingQueue.SkeletonNode>> input, + BlockingQueue.SkeletonNode>, TaskAbortException>> output, + Map.SkeletonNode>, E> deposit) { + ObjectProcessor.SkeletonNode>, E, TaskAbortException> objectProcessor = new ObjectProcessor.SkeletonNode>, E, TaskAbortException>( + input, output, deposit, null, Executors.DEFAULT_EXECUTOR, new TaskAbortExceptionConvertor()) { + @Override + protected Runnable createJobFor(final PushTask.SkeletonNode> task) { + return new Runnable() { + @Override + public void run() { + // Simulate push. + TaskAbortException ex = null; + try { + push(task); + } catch (TaskAbortException e) { + ex = e; + } catch (RuntimeException e) { + ex = new TaskAbortException("push failed", e); + } + postProcess.invoke(X2(task, ex)); + } + }; + } + }; + return objectProcessor.autostart(); + } + + } + + static class SkelMapMapSerialiser implements MapSerialiser { + final private Map store = Collections.synchronizedMap(new HashMap()); + + @Override + public void pull(Map> tasks, Object mapmeta) throws TaskAbortException { + for (Map.Entry> en : tasks.entrySet()) { + en.getValue().data = ((Map) store.get(en.getValue().meta)).get(en.getKey()); + } + } + + @Override + public void push(Map> tasks, Object mapmeta) throws TaskAbortException { + Map map = new HashMap(); + for (Map.Entry> en : tasks.entrySet()) { + map.put(en.getKey(), en.getValue().data); + } + Long pos = getNextNumber(); + store.put(pos, map); + for (Map.Entry> en : tasks.entrySet()) { + en.getValue().meta = pos; + } + } + + } + + private static String rndStr() { + return UUID.randomUUID().toString(); + } + + private static String rndKey() { + return rndStr().substring(0,8); + } + + protected void setUp() throws TaskAbortException { + skelmap = new SkeletonBTreeMap(2); + SkelMapMapSerialiser mapSerialiser = new SkelMapMapSerialiser(); + skelmap.setSerialiser(new SkelMapNodeSerialiser(skelmap, mapSerialiser), mapSerialiser); + assertTrue(skelmap.isBare()); + } + + private void add(int count, int laps) throws TaskAbortException { + int calculatedSize = skelmap.size(); + for (int l = 0; l < laps; ++l) { + SortedMap map = new TreeMap(); + for (int i = 0; i < count; ++i) { + String key = rndKey(); + map.put(key, i); + oneKey = key; + } + skelmap.update(map, new TreeSet()); + calculatedSize += count; + assertTrue(skelmap.isBare()); + assertEquals(calculatedSize, skelmap.size()); + } + } + + public void testSetup() { + assertTrue(true); + } + + public void test1() throws TaskAbortException { + add(1, 1); + } + + public void test3() throws TaskAbortException { + add(3, 1); + } + + public void test4() throws TaskAbortException { + add(4, 1); + } + + public void test10() throws TaskAbortException { + add(10, 1); + } + + public void test100() throws TaskAbortException { + add(100, 1); + } + + public void BIGtest1000() throws TaskAbortException { + add(1000, 1); + } + + public void BIGtest10000() throws TaskAbortException { + add(10000, 1); + } + + public void test1x3() throws TaskAbortException { + add(1, 3); + } + + public void test1x4() throws TaskAbortException { + add(1, 4); + } + + public void test1x5() throws TaskAbortException { + add(1, 5); + } + + public void test6x5() throws TaskAbortException { + add(6, 5); + } + + public void test10x5() throws TaskAbortException { + add(10, 5); + } + + public void BIGtest10x50() throws TaskAbortException { + add(10, 50); + } +} diff --git a/shared/test/freenet/library/util/SkeletonTreeMapTest.java b/shared/test/freenet/library/util/SkeletonTreeMapTest.java index af175fd8..5714dad1 100644 --- a/shared/test/freenet/library/util/SkeletonTreeMapTest.java +++ b/shared/test/freenet/library/util/SkeletonTreeMapTest.java @@ -7,8 +7,10 @@ import java.util.SortedMap; import java.util.UUID; +import freenet.library.io.serial.MapSerialiser; import freenet.library.util.DataNotLoadedException; import freenet.library.util.SkeletonTreeMap; +import freenet.library.util.exec.TaskAbortException; /** @@ -134,4 +136,78 @@ public void testIncompleteValues() { } } + class SkelMapMapSerializer implements MapSerialiser { + + @Override + public void pull(Map> tasks, Object mapmeta) throws TaskAbortException { + for (Map.Entry> en : tasks.entrySet()) { + // Simulate existing contents + en.getValue().data = 12; + } + } + + @Override + public void push(Map> tasks, Object mapmeta) throws TaskAbortException { + // Simulate storage. + } + + } + + public void testInflateAndDeflate() throws TaskAbortException { + skelmap.setSerialiser(new SkelMapMapSerializer()); + assertFalse(skelmap.isLive()); + assertTrue(skelmap.isBare()); + skelmap.inflate(); + assertTrue(skelmap.isLive()); + assertFalse(skelmap.isBare()); + for (Map.Entry en : skelmap.entrySet()) { + assertTrue(skelmap.entrySet().contains(en)); + assertNotNull(skelmap.get(en.getKey())); + assertEquals(skelmap.get(en.getKey()), new Integer(12)); + } + + assertTrue(skelmap.isLive()); + assertFalse(skelmap.isBare()); + skelmap.deflate(); + assertFalse(skelmap.isLive()); + assertTrue(skelmap.isBare()); + for (Map.Entry en : skelmap.entrySet()) { + try { + skelmap.entrySet().contains(en); + } catch (DataNotLoadedException e) { + continue; + } + fail("Data was loaded for " + en); + } + } + + public void testSingleInflateAndDeflate() throws TaskAbortException { + skelmap.setSerialiser(new SkelMapMapSerializer()); + assertFalse(skelmap.isLive()); + assertTrue(skelmap.isBare()); + skelmap.inflate(skelmap.firstKey()); + assertFalse(skelmap.isLive()); + assertFalse(skelmap.isBare()); + for (Map.Entry en : skelmap.entrySet()) { + assertTrue(skelmap.entrySet().contains(en)); + assertNotNull(skelmap.get(en.getKey())); + assertEquals(skelmap.get(en.getKey()), new Integer(12)); + break; + } + + assertFalse(skelmap.isLive()); + assertFalse(skelmap.isBare()); + skelmap.deflate(skelmap.firstKey()); + assertFalse(skelmap.isLive()); + assertTrue(skelmap.isBare()); + for (Map.Entry en : skelmap.entrySet()) { + try { + skelmap.entrySet().contains(en); + } catch (DataNotLoadedException e) { + continue; + } + fail("Data was loaded for " + en); + } + } + } From e3232ec908660627e59c12a26bd0f88db66d6037 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Tue, 13 Mar 2018 22:10:21 +0100 Subject: [PATCH 152/180] Added generics of a method. --- .../freenet/library/util/concurrent/ObjectProcessor.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/shared/src/freenet/library/util/concurrent/ObjectProcessor.java b/shared/src/freenet/library/util/concurrent/ObjectProcessor.java index 21d7261a..43b4d292 100644 --- a/shared/src/freenet/library/util/concurrent/ObjectProcessor.java +++ b/shared/src/freenet/library/util/concurrent/ObjectProcessor.java @@ -301,7 +301,9 @@ private static synchronized void ensureAutoHandler() { try { boolean o = proc.open; while (proc.dispatchPoll()); - if (!o) { it.remove(); } + if (!o) { + it.remove(); + } } catch (RejectedExecutionException e) { // FIXME NORM // neither Executors.DEFAULT_EXECUTOR nor Freenet's in-built executors @@ -352,7 +354,7 @@ public boolean auto() { /** ** Call {@link #auto()} and return {@code this}. */ - public ObjectProcessor autostart() { + public ObjectProcessor autostart() { auto(); return this; } From 2eac8a06a37aa5552cbf0b1a69037a5f3b635c7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sat, 17 Mar 2018 19:55:27 +0100 Subject: [PATCH 153/180] Improved the statistics logging. Show both successful and failed operations. --- .../library/uploader/DownloadAllOnce.java | 75 +++++++++++++------ 1 file changed, 51 insertions(+), 24 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index 2d845075..2e5f978d 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -208,27 +208,43 @@ public Page pollNotDeferred() { private RotatingQueue toRefetch = new RotatingQueue(random); private AvoidRecentFetchesQueue toUploadUnfetchable = new AvoidRecentFetchesQueue(random); - private int counterParse = 0; - private int counterFetch = 0; - private int counterRefetchUnfetchable = 0; - private int counterRefetch = 0; - private int counterUploadUnfetchable = 0; - private int counterRefetchUpload = 0; - - - private static String STATISTICS_FORMAT_PREFIX = "%-21s%7d"; + private int counterParseSuccess = 0; + private int counterParseFailed = 0; + private int counterFetchSuccess = 0; + private int counterFetchFailed = 0; + private int counterRefetchUnfetchableSuccess = 0; + private int counterRefetchUnfetchableFailed = 0; + private int counterRefetchSuccess = 0; + private int counterRefetchFailed = 0; + private int counterUploadUnfetchableSuccess = 0; + private int counterUploadUnfetchableFailed = 0; + private int counterRefetchUploadSuccess = 0; + private int counterRefetchUploadFailed = 0; + + + private static String STATISTICS_FORMAT_PREFIX = "%-21s%7d%7d%7d"; public synchronized final void logStatistics() { StringBuffer sb = new StringBuffer(); - sb.append(statisticsLine("toParse", counterParse, toParse)); - sb.append(statisticsLine("toFetch", counterFetch, toFetch)); - sb.append(statisticsLine("toRefetchUnfetchable", counterRefetchUnfetchable, toRefetchUnfetchable)); + sb.append(statisticsLine("toParse", counterParseSuccess, counterParseFailed, toParse)); + sb.append(statisticsLine("toFetch", counterFetchSuccess, counterFetchFailed, toFetch)); + sb.append(statisticsLine("toRefetchUnfetchable", + counterRefetchUnfetchableSuccess, counterRefetchUnfetchableFailed, + toRefetchUnfetchable)); + int counterRefetchUpload = counterRefetchUploadSuccess + counterRefetchUploadFailed; if (counterRefetchUpload > 0) { sb.append(new Formatter().format(STATISTICS_FORMAT_PREFIX, - "RefetchUpload", counterRefetchUpload)).append("\n"); - } - sb.append(statisticsLine("toRefetch", counterRefetch, toRefetch)); - sb.append(statisticsLine("toUploadUnfetchable", counterUploadUnfetchable, toUploadUnfetchable)); + "RefetchUpload", + counterRefetchUpload, + counterRefetchUploadSuccess, + counterRefetchUploadFailed)).append("\n"); + } + sb.append(statisticsLine("toRefetch", + counterRefetchSuccess, counterRefetchFailed, + toRefetch)); + sb.append(statisticsLine("toUploadUnfetchable", + counterUploadUnfetchableSuccess, counterUploadUnfetchableFailed, + toUploadUnfetchable)); if (allFiles.size() > 0) { sb.append("Files left to remove: " + allFiles.size() + "\n"); } @@ -240,14 +256,16 @@ public synchronized final void logStatistics() { private static String STATISTICS_FORMAT = STATISTICS_FORMAT_PREFIX + "%6d%5d%5d%6d%6d%5d%5d\n"; - public final String statisticsLine(String r, int counter, RotatingQueue rqp) { + public final String statisticsLine(String r, int success, int failed, RotatingQueue rqp) { + int counter = success + failed; if (rqp.size() > 0 || counter > 0) { int arr[] = new int[12]; for (Page p : rqp) { arr[p.level]++; } return new Formatter() - .format(STATISTICS_FORMAT, r, counter, rqp.size(), arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]) + .format(STATISTICS_FORMAT, r, counter, success, failed, + rqp.size(), arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]) .toString(); } return ""; @@ -495,34 +513,37 @@ public void receivedPutFailed(FcpConnection c, PutFailed putFailed) { } private boolean doRefetchUnfetchable(Page page) { - counterRefetchUnfetchable++; boolean result = fetch(page); if (result) { add(toParse, page); + counterRefetchUnfetchableSuccess++; } else { add(toRefetchUnfetchable, page); + counterRefetchUnfetchableFailed++; } return result; } private boolean doRefetchToUpload(Page page) { - counterRefetchUpload++; boolean result = fetch(page); if (result) { add(toRefetch, page); + counterRefetchUploadSuccess++; } else { add(toUploadUnfetchable, page); + counterRefetchUploadFailed++; } return result; } private boolean doRefetch(Page page) { - counterRefetch++; boolean result = fetch(page); if (result) { add(toRefetch, page); + counterRefetchSuccess++; } else { handleUnfetchable(page); + counterRefetchFailed++; } return result; } @@ -538,23 +559,25 @@ private void handleNew(Page page) { } private boolean doFetch(Page page) { - counterFetch++; boolean result = fetch(page); if (result) { add(toParse, page); + counterFetchSuccess++; } else { handleUnfetchable(page); + counterFetchFailed++; } return result; } private void doParse(Page page) { - counterParse++; parse(page); if (oldUploads.check(page.getURI())) { add(toUploadUnfetchable, page); + counterParseFailed++; } else { add(toRefetch, page); + counterParseSuccess++; } } @@ -567,9 +590,13 @@ private void handleUnfetchable(Page page) { } private boolean doUploadUnfetchable(Page page) { - counterUploadUnfetchable++; boolean result = upload(page); add(toRefetch, page); + if (result) { + counterUploadUnfetchableSuccess++; + } else { + counterUploadUnfetchableFailed++; + } return result; } From bbafff7b36f3848350a93ee1edd9044bfeaf4345 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 1 Jul 2018 14:07:13 +0200 Subject: [PATCH 154/180] Adjusted the logic for splitting files to process for upload. --- uploader/src/freenet/library/uploader/Merger.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index a2758b0e..ea15c658 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -248,6 +248,9 @@ private static void createMergeDirectory(File directory) throws TaskAbortExcepti Set toBeRemoved = new HashSet(); + /** + * All files to read through in the correct order. + */ class ProcessedFilenames implements Iterator { String restBase; boolean createSelectedFiles = false; @@ -266,8 +269,7 @@ class ProcessedFilenames implements Iterator { ProcessedFilenames() { if (selectedFilesToMerge.length > 0) { doSelected = true; - if (processedFilesToMerge.length > 1 - && processedFilesToMerge.length * selectedFilesToMerge.length > filteredFilesToMerge.length) { + if (processedFilesToMerge.length > filteredFilesToMerge.length) { createSelectedFiles = true; doAllSelected = true; doFiltered = true; @@ -378,8 +380,7 @@ public Iterator iterator() { } if (found) { continue; - } else if (writers.size() < 10 || - writers.size() < 10 * (filteredFilesToMerge.length + processedFilesToMerge.length)) { + } else if (writers.size() < 3 || writers.size() < 10 * (filteredFilesToMerge.length - 1)) { lastSelected ++; String selectedFilename = SELECTED + lastSelected; IndexPeeker p = new IndexPeeker(directory); From 15ab5d8e12a93ed8f594bf8da9f8d67adf62ffbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 20 Sep 2018 21:13:27 +0200 Subject: [PATCH 155/180] Added logging when deferring something not connected to the last root. --- uploader/src/freenet/library/uploader/FetchAllOnce.java | 1 + 1 file changed, 1 insertion(+) diff --git a/uploader/src/freenet/library/uploader/FetchAllOnce.java b/uploader/src/freenet/library/uploader/FetchAllOnce.java index 9715ffdc..389123d2 100644 --- a/uploader/src/freenet/library/uploader/FetchAllOnce.java +++ b/uploader/src/freenet/library/uploader/FetchAllOnce.java @@ -982,6 +982,7 @@ public void doDownload() { } if (!taken.hasParent(lastRoot) && rand.nextInt(100) > 0) { + logger.finer("Defer fetching non-last " + taken.getURI()); objectQueue.offer(taken); continue; } From f2981aec823b551519c024a40142b857325db29c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 20 Sep 2018 21:14:03 +0200 Subject: [PATCH 156/180] Parse a little slower. --- uploader/src/freenet/library/uploader/DownloadAllOnce.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index 2e5f978d..34e08083 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -750,7 +750,7 @@ protected void process() { Page page = toParse.poll(); if (page != null) { doParse(page); - otherExecutors.schedule(this, 0, TimeUnit.SECONDS); + otherExecutors.schedule(this, 200, TimeUnit.MILLISECONDS); } else { otherExecutors.schedule(this, 10, TimeUnit.SECONDS); } From 49bf7933d0216f19dc635cb8bf482e56199fc9e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Mon, 1 Oct 2018 21:39:20 +0200 Subject: [PATCH 157/180] Catch another case when shutting down the executor. --- uploader/src/freenet/library/uploader/DownloadAllOnce.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index 34e08083..a8b1dbc2 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -30,6 +30,7 @@ import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; @@ -739,6 +740,11 @@ private abstract class ProcessSomething implements Runnable { public void run() { try { process(); + } catch (RejectedExecutionException e) { + // Do nothing. + if (!closingDown) { + logger.log(Level.SEVERE, "Confusion in the executor or queue full.", e); + } } catch (Exception e) { logger.log(Level.SEVERE, "Class " + this + " threw exception: " + e, e); } From 9a0f62cf9d432338482e255a9ad9f52ece89a6eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 14 Oct 2018 18:13:31 +0200 Subject: [PATCH 158/180] Remove more files every lap. Find a set of files to remove instead of just one. Also refactored the clean out code to fit better in the CleanupOldFiles class. --- .../library/uploader/DownloadAllOnce.java | 84 ++++++++++++------- 1 file changed, 55 insertions(+), 29 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index a8b1dbc2..c52b4e9f 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -22,12 +22,15 @@ import java.nio.file.FileAlreadyExistsException; import java.nio.file.StandardCopyOption; import java.util.Arrays; +import java.util.Comparator; import java.util.Date; import java.util.Formatter; import java.util.HashSet; import java.util.Iterator; import java.util.Random; import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; @@ -67,7 +70,7 @@ class DownloadAllOnce { private boolean closingDown = false; private File directory; private File morePagesDirectory; - private Set allFiles = new HashSet(); + private CleanupOldFiles cleanUp = null; private Uploads oldUploads = new Uploads(); private int getterCounter = 0; private int uploadCounter = 0; @@ -246,8 +249,8 @@ public synchronized final void logStatistics() { sb.append(statisticsLine("toUploadUnfetchable", counterUploadUnfetchableSuccess, counterUploadUnfetchableFailed, toUploadUnfetchable)); - if (allFiles.size() > 0) { - sb.append("Files left to remove: " + allFiles.size() + "\n"); + if (cleanUp != null) { + cleanUp.addLog(sb); } if (oldUploads.size() > 0) { sb.append("Uploads from previous run: " + oldUploads.size() + "\n"); @@ -552,7 +555,9 @@ private boolean doRefetch(Page page) { private void handleNew(Page page) { if (page.getFile().exists()) { page.getFile().setLastModified(System.currentTimeMillis()); - allFiles.remove(page.getFile()); + if (cleanUp != null) { + cleanUp.remove(page.getFile()); + } add(toParse, page); } else { add(toFetch, page); @@ -607,20 +612,38 @@ private void add(RotatingQueue whereto, Page p) { } private class CleanupOldFiles implements Runnable { - ScheduledFuture handle = null; + private final Set allFiles = new HashSet(); + private ScheduledFuture handle = null; + private int count = 1; + + public CleanupOldFiles() { + allFiles.addAll(Arrays.asList(directory.listFiles())); + } public ScheduledFuture setHandle(ScheduledFuture h) { handle = h; return h; } + public void addLog(StringBuffer sb) { + if (allFiles.size() > 0) { + sb.append("Files left to remove: " + allFiles.size() + "\n"); + } + } + + public void remove(File f) { + allFiles.remove(f); + } + public void run() { if (toParse.size() > 0) { // Don't delete anything if the parsing is not completed. + count = 1; return; } if (toFetch.size() > 0) { // Don't delete anything if the fetching is not completed. + count = 1; return; } if (allFiles.size() == 0) { @@ -630,32 +653,36 @@ public void run() { } return; } - // Find the oldest one. - long oldestAge = Long.MAX_VALUE; - File oldestFile = null; - for (File f : allFiles) { - if (!f.exists()) { - allFiles.remove(f); - try { - oldUploads.check(new FreenetURI(f.getName())); - } catch (MalformedURLException e) { - logger.log(Level.WARNING, "File " + f + " was deleted", e); + // Sort in oldest order. + SortedSet toRemove = new TreeSet(new Comparator() { + @Override + public int compare(File o1, File o2) { + int l = Long.compare(o1.lastModified(), o2.lastModified()); + if (l != 0) { + return l; } - return; + return o1.getName().compareTo(o2.getName()); } - if (f.lastModified() < oldestAge) { - oldestAge = f.lastModified(); - oldestFile = f; + }); + for (File f : allFiles) { + toRemove.add(f); + if (toRemove.size() > count) { + toRemove.remove(toRemove.last()); } } - allFiles.remove(oldestFile); - try { - oldUploads.check(new FreenetURI(oldestFile.getName())); - } catch (MalformedURLException e) { - logger.log(Level.WARNING, "Deleting file " + oldestFile, e); + for (File f : toRemove) { + allFiles.remove(f); + try { + oldUploads.check(new FreenetURI(f.getName())); + } catch (MalformedURLException e) { + logger.log(Level.WARNING, "File " + f + " strange filename.", e); + } + if (f.exists()) { + logger.fine("Removing file " + f); + f.delete(); + } } - logger.fine("Removing file " + oldestFile); - oldestFile.delete(); + count += 1 + count / 7; } } @@ -869,9 +896,8 @@ private void run(FreenetURI u, File morePagesDir) { directory = new File("library-download-all-once-db"); if (directory.exists()) { oldUploads.load(); - allFiles.addAll(Arrays.asList(directory.listFiles())); - CleanupOldFiles cleanUp = new CleanupOldFiles(); - cleanUp.setHandle(otherExecutors.scheduleWithFixedDelay(cleanUp, 30000, 5, TimeUnit.SECONDS)); + cleanUp = new CleanupOldFiles(); + cleanUp.setHandle(otherExecutors.scheduleWithFixedDelay(cleanUp, 500, 1, TimeUnit.MINUTES)); } else { directory.mkdir(); } From a2745903fea4398948f256a00c5efdea27da3e4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 2 Dec 2018 18:21:02 +0100 Subject: [PATCH 159/180] Fixed some warnings. --- .../library/uploader/DownloadAllOnce.java | 39 +++++++------------ 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index c52b4e9f..151c3770 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -26,7 +26,6 @@ import java.util.Date; import java.util.Formatter; import java.util.HashSet; -import java.util.Iterator; import java.util.Random; import java.util.Set; import java.util.SortedSet; @@ -80,6 +79,11 @@ class DownloadAllOnce { private static final long OPERATION_GIVE_UP_TIME = TimeUnit.HOURS.toMillis(2); class RotatingQueue extends LinkedBlockingQueue { + /** + * Serializeable. + */ + private static final long serialVersionUID = -9157586651059771247L; + public RotatingQueue(Random r) { random = r; } @@ -178,6 +182,11 @@ void fetchTimerReset() { } class AvoidRecentFetchesQueue extends RotatingQueue { + /** + * Serializeable. + */ + private static final long serialVersionUID = 7608442014226987011L; + AvoidRecentFetchesQueue(Random r) { super(r); } @@ -267,10 +276,12 @@ public final String statisticsLine(String r, int success, int failed, RotatingQu for (Page p : rqp) { arr[p.level]++; } - return new Formatter() - .format(STATISTICS_FORMAT, r, counter, success, failed, - rqp.size(), arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]) + Formatter formatter = new Formatter(); + String line = formatter.format(STATISTICS_FORMAT, r, counter, success, failed, + rqp.size(), arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]) .toString(); + formatter.close(); + return line; } return ""; } @@ -869,26 +880,6 @@ protected void process() { } } - private class ProcessRefetchUnfetchable extends ProcessSomething { - protected void process() { - Page page = toRefetchUnfetchable.poll(); - if (page != null) { - doRefetchUnfetchable(page); - return; - } - } - } - - private class ProcessRefetch extends ProcessSomething { - protected void process() { - Page page = toRefetch.poll(); - if (page != null) { - doRefetch(page); - return; - } - } - } - private void run(FreenetURI u, File morePagesDir) { morePagesDirectory = morePagesDir; FCPexecutors = Executors.newScheduledThreadPool(10); From 5cca0a1143ce95db4cd47e742540d3e220de038a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Wed, 5 Dec 2018 22:22:57 +0100 Subject: [PATCH 160/180] For the DownloadAllOnce, remember the not available unfetchables. The purpose of this is that when running a second time, these can be put in the refetchUnfetchable pile immediately and not blocking the fetching of new pages. Rename the old uploads to unfetchables and refactored. --- .../library/uploader/DownloadAllOnce.java | 52 +++++++++++-------- 1 file changed, 31 insertions(+), 21 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index 151c3770..879b50f2 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -70,7 +70,7 @@ class DownloadAllOnce { private File directory; private File morePagesDirectory; private CleanupOldFiles cleanUp = null; - private Uploads oldUploads = new Uploads(); + private Unfetchables unfetchables = new Unfetchables(); private int getterCounter = 0; private int uploadCounter = 0; @@ -130,7 +130,7 @@ public E take() throws InterruptedException { * A class to keep track of the Pages we work with. */ private class Page { - // private Page parent; + private final long START_DEFER_TIME = TimeUnit.HOURS.toMillis(4); private FreenetURI uri; private int level = 0; @@ -139,12 +139,13 @@ private class Page { private long timeToNextFetchAttempt; Page(FreenetURI u, Page p) { - // parent = p, uri = u; if (p != null) { level = p.level + 1; } - fetchTimerReset(); + nextFetchAttempt = new Date(); + logAttempts = new StringBuffer(); + timeToNextFetchAttempt = START_DEFER_TIME; } FreenetURI getURI() { @@ -174,7 +175,7 @@ boolean fetchAvailable() { } void fetchTimerReset() { - timeToNextFetchAttempt = TimeUnit.HOURS.toMillis(4); + timeToNextFetchAttempt = START_DEFER_TIME; logAttempts = new StringBuffer(); logAttempts.append("Deferred to ").append(new Date()).append("\n"); calculateNextFetchAttempt(); @@ -261,8 +262,8 @@ public synchronized final void logStatistics() { if (cleanUp != null) { cleanUp.addLog(sb); } - if (oldUploads.size() > 0) { - sb.append("Uploads from previous run: " + oldUploads.size() + "\n"); + if (unfetchables.size() > 0) { + sb.append("Unfetchables from previous run: " + unfetchables.size() + "\n"); } logger.info("Statistics:\n" + sb.toString() + "End Statistics."); } @@ -570,6 +571,8 @@ private void handleNew(Page page) { cleanUp.remove(page.getFile()); } add(toParse, page); + } else if (unfetchables.check(page.getURI())) { + add(toRefetchUnfetchable, page); } else { add(toFetch, page); } @@ -589,7 +592,7 @@ private boolean doFetch(Page page) { private void doParse(Page page) { parse(page); - if (oldUploads.check(page.getURI())) { + if (unfetchables.check(page.getURI())) { add(toUploadUnfetchable, page); counterParseFailed++; } else { @@ -684,7 +687,7 @@ public int compare(File o1, File o2) { for (File f : toRemove) { allFiles.remove(f); try { - oldUploads.check(new FreenetURI(f.getName())); + unfetchables.check(new FreenetURI(f.getName())); } catch (MalformedURLException e) { logger.log(Level.WARNING, "File " + f + " strange filename.", e); } @@ -700,12 +703,14 @@ public int compare(File o1, File o2) { /** * Class to keep track of uploads from the previous run. */ - private class Uploads { + private static class Unfetchables { private Set fromPreviousRun = new HashSet(); - private final static String OLD_UPLOADS_FILENAME = "old_uploads.saved"; + private final static String UNFETCHABLES_FILENAME = "unfetchables.saved"; + private File directory; - void load() { - File file = new File(directory, OLD_UPLOADS_FILENAME); + void load(File dir) { + directory = dir; + File file = new File(directory, UNFETCHABLES_FILENAME); if (file.exists()) { logger.finest("Reading file " + file); try { @@ -726,24 +731,29 @@ void load() { } private void rotate() { - String OLD_FILENAME = OLD_UPLOADS_FILENAME + ".old"; + String OLD_FILENAME = UNFETCHABLES_FILENAME + ".old"; File oldfile = new File(directory, OLD_FILENAME); if (oldfile.exists()) { oldfile.delete(); } - File file = new File(directory, OLD_UPLOADS_FILENAME); + File file = new File(directory, UNFETCHABLES_FILENAME); if (file.exists()) { file.renameTo(oldfile); } } - synchronized void save() { + synchronized void save(RotatingQueue toSave1, RotatingQueue toSave2) { rotate(); - File file = new File(directory, OLD_UPLOADS_FILENAME); + File file = new File(directory, UNFETCHABLES_FILENAME); Set set = new HashSet(); - for (Page p : toUploadUnfetchable) { + for (Page p : toSave1) { set.add(p.getURI()); } + if (toSave2 != null) { + for (Page p : toSave2) { + set.add(p.getURI()); + } + } if (set.size() > 0) { logger.finest("Writing file " + file); try { @@ -886,7 +896,7 @@ private void run(FreenetURI u, File morePagesDir) { otherExecutors = Executors.newScheduledThreadPool(1); directory = new File("library-download-all-once-db"); if (directory.exists()) { - oldUploads.load(); + unfetchables.load(directory); cleanUp = new CleanupOldFiles(); cleanUp.setHandle(otherExecutors.scheduleWithFixedDelay(cleanUp, 500, 1, TimeUnit.MINUTES)); } else { @@ -907,7 +917,7 @@ public void run() { otherExecutors.schedule(new ProcessParse(), 2000, TimeUnit.MILLISECONDS); otherExecutors.scheduleWithFixedDelay(new Runnable() { public void run() { - oldUploads.save(); + unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); } }, 100, 20, TimeUnit.MINUTES); FcpSession session; @@ -925,7 +935,7 @@ public void run() { logger.info("Shutdown."); FCPexecutors.shutdown(); otherExecutors.shutdown(); - oldUploads.save(); + unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME); logger.info("Shutdown now (after long wait)."); FCPexecutors.shutdownNow(); From 20fd1ca96c81d9f2508843cb6d91c8352144a067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Thu, 27 Dec 2018 21:58:56 +0100 Subject: [PATCH 161/180] Improved the stopping of the process. --- .../library/uploader/DownloadAllOnce.java | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadAllOnce.java index 879b50f2..4875eb6b 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadAllOnce.java @@ -936,15 +936,20 @@ public void run() { FCPexecutors.shutdown(); otherExecutors.shutdown(); unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); - waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME); - logger.info("Shutdown now (after long wait)."); - FCPexecutors.shutdownNow(); + if (!waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME)) { + logger.info("Shutdown now (after long wait)."); + FCPexecutors.shutdownNow(); + otherExecutors.shutdownNow(); + session.close(); + if (!waitTermination(TimeUnit.MINUTES.toMillis(1))) { + logger.info("Shutdown now did not succeed to stop all jobs"); + } + } FCPexecutors = null; - otherExecutors.shutdownNow(); otherExecutors = null; session.close(); session = null; - logger.info("Shutdown now completed."); + logger.info("Shutdown completed."); } } @@ -995,13 +1000,16 @@ public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, } } - private void waitTermination(long ms) { + private boolean waitTermination(long ms) { + boolean t1 = false; + boolean t2 = false; try { - FCPexecutors.awaitTermination(ms, TimeUnit.MILLISECONDS); - otherExecutors.awaitTermination(1 + ms / 10, TimeUnit.MILLISECONDS); + t1 = FCPexecutors.awaitTermination(ms, TimeUnit.MILLISECONDS); + t2 = otherExecutors.awaitTermination(1 + ms / 10, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { throw new RuntimeException("Waiting for jobs."); } + return t1 && t2; } public static void main(String[] argv) throws InterruptedException { From 6feda2bb2dcc240ab9af8e716f2faf46610769af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 30 Dec 2018 11:48:10 +0100 Subject: [PATCH 162/180] Improved the name of the downloading. --- ...wnloadAllOnce.java => DownloadOneEdition.java} | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) rename uploader/src/freenet/library/uploader/{DownloadAllOnce.java => DownloadOneEdition.java} (98%) diff --git a/uploader/src/freenet/library/uploader/DownloadAllOnce.java b/uploader/src/freenet/library/uploader/DownloadOneEdition.java similarity index 98% rename from uploader/src/freenet/library/uploader/DownloadAllOnce.java rename to uploader/src/freenet/library/uploader/DownloadOneEdition.java index 4875eb6b..2a0ee892 100644 --- a/uploader/src/freenet/library/uploader/DownloadAllOnce.java +++ b/uploader/src/freenet/library/uploader/DownloadOneEdition.java @@ -55,13 +55,16 @@ import net.pterodactylus.fcp.Verbosity; /** - * Class to download the entire index. + * Class to download the entire index and save it. * - * When a newer USK is seen, stop the processing and exit. + * When a newer USK is seen, stop the processing immediately and exit. + * + * If a non-downloadable part is encountered upload it from the saved parts or + * attempt to download later. */ -class DownloadAllOnce { +class DownloadOneEdition { /** Logger. */ - private static final Logger logger = Logger.getLogger(DownloadAllOnce.class.getName()); + private static final Logger logger = Logger.getLogger(DownloadOneEdition.class.getName()); private ScheduledExecutorService FCPexecutors; private ScheduledExecutorService otherExecutors; @@ -306,7 +309,7 @@ public void receivedAllData(FcpConnection c, AllData ad) { if (!token.equals(ad.getIdentifier())) { return; } - logger.entering(DownloadAllOnce.class.toString(), "receivedAllData", "receivedAllData for " + token); + logger.entering(DownloadOneEdition.class.toString(), "receivedAllData", "receivedAllData for " + token); try { Files.copy(ad.getPayloadInputStream(), page.getFile().toPath(), StandardCopyOption.REPLACE_EXISTING); @@ -1037,6 +1040,6 @@ public static void main(String[] argv) throws InterruptedException { } } - new DownloadAllOnce().run(u, morePagesDir); + new DownloadOneEdition().run(u, morePagesDir); } } From bb7ac9a75a5255ce3592cabf772c69f1b9622642 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Sun, 13 Jan 2019 13:18:35 +0100 Subject: [PATCH 163/180] Convert to only use tabs in indents. --- .../library/uploader/DownloadOneEdition.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadOneEdition.java b/uploader/src/freenet/library/uploader/DownloadOneEdition.java index 2a0ee892..aa41549e 100644 --- a/uploader/src/freenet/library/uploader/DownloadOneEdition.java +++ b/uploader/src/freenet/library/uploader/DownloadOneEdition.java @@ -834,7 +834,7 @@ protected void process() { } catch (IOException ioe) { logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", ioe); if (page.getFile().exists()) { - page.getFile().delete(); + page.getFile().delete(); logger.info("Deleted partial copy " + page.getFile()); } toRefetchUnfetchable.offer(page); @@ -1027,17 +1027,17 @@ public static void main(String[] argv) throws InterruptedException { File morePagesDir = null; if (argv.length > 1) { - morePagesDir = new File(argv[1]); - if (!morePagesDir.exists()) { - logger.severe("Directory " + morePagesDir + " does not exist."); - System.exit(2); - return; - } - if (!morePagesDir.isDirectory()) { - logger.severe("File " + morePagesDir + " is not a directory."); - System.exit(2); - return; - } + morePagesDir = new File(argv[1]); + if (!morePagesDir.exists()) { + logger.severe("Directory " + morePagesDir + " does not exist."); + System.exit(2); + return; + } + if (!morePagesDir.isDirectory()) { + logger.severe("File " + morePagesDir + " is not a directory."); + System.exit(2); + return; + } } new DownloadOneEdition().run(u, morePagesDir); From 4034dca84df4e17ccb5ac583fedff94c17546856 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Debora=20W=C3=B6pcke?= Date: Fri, 25 Jan 2019 18:25:07 +0100 Subject: [PATCH 164/180] Added function to specify number of simultaneous sessions. --- .../library/uploader/DownloadOneEdition.java | 402 +++++++++++++++--- 1 file changed, 352 insertions(+), 50 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DownloadOneEdition.java b/uploader/src/freenet/library/uploader/DownloadOneEdition.java index aa41549e..c88e210b 100644 --- a/uploader/src/freenet/library/uploader/DownloadOneEdition.java +++ b/uploader/src/freenet/library/uploader/DownloadOneEdition.java @@ -26,6 +26,7 @@ import java.util.Date; import java.util.Formatter; import java.util.HashSet; +import java.util.Queue; import java.util.Random; import java.util.Set; import java.util.SortedSet; @@ -197,8 +198,8 @@ class AvoidRecentFetchesQueue extends RotatingQueue { public Page pollNotDeferred() { int maxLaps = size(); - if (maxLaps > 20) { - maxLaps = 10; + if (maxLaps > 10) { + maxLaps = 6; } do { Page page = poll(); @@ -623,6 +624,30 @@ private boolean doUploadUnfetchable(Page page) { return result; } + private void doCopyAndUploadUnfetchable(Page page) { + File fromFile = new File(morePagesDirectory, page.getFile().getName()); + try { + Files.copy(fromFile.toPath(), page.getFile().toPath()); + boolean result = doUploadUnfetchable(page); + logger.finer("Uploaded Unfetchable" + (result ? "" : "failed") + "."); + } catch (UnsupportedOperationException uoe) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", uoe); + toRefetchUnfetchable.offer(page); + } catch (FileAlreadyExistsException faee) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", faee); + toUploadUnfetchable.offer(page); + } catch (IOException ioe) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", ioe); + if (page.getFile().exists()) { + page.getFile().delete(); + logger.info("Deleted partial copy " + page.getFile()); + } + toRefetchUnfetchable.offer(page); + } catch (SecurityException se) { + logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", se); + toRefetchUnfetchable.offer(page); + } + } private void add(RotatingQueue whereto, Page p) { whereto.offer(p); @@ -711,6 +736,11 @@ private static class Unfetchables { private final static String UNFETCHABLES_FILENAME = "unfetchables.saved"; private File directory; + @SuppressWarnings("unchecked") + private Set extracted(ObjectInputStream ois) throws IOException, ClassNotFoundException { + return (Set) ois.readObject(); + } + void load(File dir) { directory = dir; File file = new File(directory, UNFETCHABLES_FILENAME); @@ -719,7 +749,7 @@ void load(File dir) { try { FileInputStream f = new FileInputStream(file); ObjectInputStream ois = new ObjectInputStream(f); - fromPreviousRun = (Set) ois.readObject(); + fromPreviousRun = extracted(ois); ois.close(); } catch (IOException e) { logger.warning("Could not read the file " + file); @@ -819,29 +849,8 @@ protected void process() { if (morePagesDirectory != null) { Page page = toRefetchUnfetchable.poll(); if (page != null) { - File fromFile = new File(morePagesDirectory, page.getFile().getName()); - try { - Files.copy(fromFile.toPath(), page.getFile().toPath()); - boolean result = doUploadUnfetchable(page); - logger.finer("Uploaded Unfetchable" + (result ? "" : "failed") + "."); - return; - } catch (UnsupportedOperationException uoe) { - logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", uoe); - toRefetchUnfetchable.offer(page); - } catch (FileAlreadyExistsException faee) { - logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", faee); - toUploadUnfetchable.offer(page); - } catch (IOException ioe) { - logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", ioe); - if (page.getFile().exists()) { - page.getFile().delete(); - logger.info("Deleted partial copy " + page.getFile()); - } - toRefetchUnfetchable.offer(page); - } catch (SecurityException se) { - logger.log(Level.SEVERE, "Could not copy file " + fromFile + " to " + page.getFile() + ".", se); - toRefetchUnfetchable.offer(page); - } + doCopyAndUploadUnfetchable(page); + return; } } @@ -893,6 +902,29 @@ protected void process() { } } + void shutdown(FcpSession session) { + waitTermination(TimeUnit.SECONDS.toMillis(1)); + closingDown = true; + logger.info("Shutdown."); + FCPexecutors.shutdown(); + otherExecutors.shutdown(); + unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); + if (!waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME)) { + logger.info("Shutdown now (after long wait)."); + FCPexecutors.shutdownNow(); + otherExecutors.shutdownNow(); + session.close(); + if (!waitTermination(TimeUnit.MINUTES.toMillis(1))) { + logger.info("Shutdown now did not succeed to stop all jobs"); + } + } + FCPexecutors = null; + otherExecutors = null; + session.close(); + session = null; + logger.info("Shutdown completed."); + } + private void run(FreenetURI u, File morePagesDir) { morePagesDirectory = morePagesDir; FCPexecutors = Executors.newScheduledThreadPool(10); @@ -931,32 +963,288 @@ public void run() { return; } try { - run2(session, u); + startAndBlockUntilUpdate(session, u); } finally { - waitTermination(TimeUnit.SECONDS.toMillis(1)); - closingDown = true; - logger.info("Shutdown."); - FCPexecutors.shutdown(); - otherExecutors.shutdown(); - unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); - if (!waitTermination(TimeUnit.MINUTES.toMillis(1) + OPERATION_GIVE_UP_TIME)) { - logger.info("Shutdown now (after long wait)."); - FCPexecutors.shutdownNow(); - otherExecutors.shutdownNow(); - session.close(); - if (!waitTermination(TimeUnit.MINUTES.toMillis(1))) { - logger.info("Shutdown now did not succeed to stop all jobs"); + shutdown(session); + } + } + + private class ParseQueues implements Runnable { + private long count = 0; + private long sum = 0; + + @Override + public void run() { + final Page page = toParse.poll(); + if (page != null) { + otherExecutors.execute(new Runnable() { + @Override + public void run() { + Date start = new Date(); + doParse(page); + count++; + sum += new Date().getTime() - start.getTime(); + } + }); + + otherExecutors.execute(this); + } else { + otherExecutors.schedule(this, 10, TimeUnit.SECONDS); + } + } + + long getMean() { + if (count == 0) { + return 1; + } + return sum / count; + } + } + + private class QueueQueues implements Runnable { + + private boolean startedFetch = false; + private boolean startedUpload = false; + + private Random random = new Random(); + + private long nextLong(long l) { + if (l <= 0) { + l = 1L; + } + return (random.nextLong() >>> 1) % l; + } + + private boolean shallUpload(int ql) { + if (!startedFetch) { + return true; + } + return nextLong(uploadTime.get() * (toFetch.size() + toRefetchUnfetchable.size()) / (1 + ql) + / fetchTime.get()) == 0; + } + + private class Mean { + private Queue whereToRead; + private long count; + private long sum; + + Mean(Queue w) { + whereToRead = w; + count = 0L; + sum = 0L; + } + + private void consume() { + boolean done = false; + do { + Long found = whereToRead.poll(); + if (found != null) { + count += 1; + sum += found.longValue(); + } else { + done = true; + } + } while (!done); + } + + long get() { + consume(); + if (count == 0) { + return 1; + } + return sum / count; + } + } + + private final Queue fetchTimes = new LinkedBlockingQueue(); + private final Mean fetchTime = new Mean(fetchTimes); + private final Queue uploadTimes = new LinkedBlockingQueue(); + private final Mean uploadTime = new Mean(uploadTimes); + + private class MeasureTime { + private Queue whereToPost; + private Date start; + + MeasureTime(Queue w) { + whereToPost = w; + start = new Date(); + } + + void done() { + whereToPost.offer(new Date().getTime() - start.getTime()); + } + } + + private void queueFetch() { + { + final Page page = toFetch.poll(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + MeasureTime t = new MeasureTime(fetchTimes); + boolean result = doFetch(page); + t.done(); + logger.finest("Fetched Fetch" + (result ? "" : " failed") + "."); + } + }); + startedFetch = true; + } + } + + if (!startedFetch) { + final Page page = toRefetchUnfetchable.pollNotDeferred(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + String log = page.logAttempts.toString(); + MeasureTime t = new MeasureTime(fetchTimes); + boolean result = doRefetchUnfetchable(page); + t.done(); + logger.finer(log + "Fetched RefetchUnfetchable" + (result ? "" : " failed") + "."); + } + }); + startedFetch = true; + } + } + + { + final Page page = toUploadUnfetchable.pollNotDeferred(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + String log = page.logAttempts.toString(); + MeasureTime t = new MeasureTime(fetchTimes); + boolean result = doRefetchToUpload(page); + t.done(); + logger.finer(log + "Fetched ToUpload" + (result ? "" : " failed") + "."); + } + }); + startedFetch = true; + } + } + + if (!startedFetch) { + final Page page = toRefetch.poll(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + MeasureTime t = new MeasureTime(fetchTimes); + boolean result = doRefetch(page); + t.done(); + logger.finer("Fetched Refetch" + (result ? "" : " failed") + "."); + } + }); } } - FCPexecutors = null; - otherExecutors = null; - session.close(); - session = null; - logger.info("Shutdown completed."); + } + + private void queueUpload() { + if (morePagesDirectory != null && shallUpload(toRefetchUnfetchable.size())) { + final Page page = toRefetchUnfetchable.poll(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + MeasureTime t = new MeasureTime(uploadTimes); + doCopyAndUploadUnfetchable(page); + t.done(); + } + }); + startedUpload = true; + } + } + + if (shallUpload(toUploadUnfetchable.size())) { + final Page page = toUploadUnfetchable.poll(); + if (page != null) { + FCPexecutors.execute(new Runnable() { + @Override + public void run() { + MeasureTime t = new MeasureTime(uploadTimes); + boolean result = doUploadUnfetchable(page); + t.done(); + logger.finer("Uploaded Unfetchable" + (result ? "" : "failed") + "."); + } + }); + startedUpload = true; + } + } + } + + @Override + public void run() { + startedFetch = false; + startedUpload = false; + + queueFetch(); + queueUpload(); + + if (startedFetch || startedUpload) { + FCPexecutors.execute(this); + } else { + FCPexecutors.schedule(this, 10, TimeUnit.SECONDS); + } + } + + public Object getFetchMean() { + return fetchTime.get(); + } + + public Object getUploadMean() { + return uploadTime.get(); } } - private void run2(FcpSession session, FreenetURI uri) { + private void run2(int numThreads, FreenetURI u, File morePagesDir) { + morePagesDirectory = morePagesDir; + FCPexecutors = Executors.newScheduledThreadPool(numThreads); + otherExecutors = Executors.newScheduledThreadPool(1); + directory = new File("library-download-all-once-db"); + if (directory.exists()) { + unfetchables.load(directory); + cleanUp = new CleanupOldFiles(); + cleanUp.setHandle(FCPexecutors.scheduleWithFixedDelay(cleanUp, 500, 1, TimeUnit.MINUTES)); + } else { + directory.mkdir(); + } + + final ParseQueues pq = new ParseQueues(); + otherExecutors.execute(pq); + final QueueQueues qq = new QueueQueues(); + otherExecutors.scheduleWithFixedDelay(new Runnable() { + public void run() { + logStatistics(); + logger.log(Level.INFO, "Parse time: {0} Fetch time: {1} Upload time: {2}", + new Object[] { + pq.getMean(), qq.getFetchMean(), qq.getUploadMean() + }); + } + }, 1, 1, TimeUnit.MINUTES); + FCPexecutors.schedule(qq, 2, TimeUnit.SECONDS); + FCPexecutors.scheduleWithFixedDelay(new Runnable() { + public void run() { + unfetchables.save(toUploadUnfetchable, toRefetchUnfetchable); + } + }, 100, 20, TimeUnit.MINUTES); + FcpSession session; + try { + session = new FcpSession("DownloadOneEditionFor" + u); + } catch (IllegalStateException | IOException e1) { + logger.log(Level.SEVERE, "Exception", e1); + return; + } + try { + startAndBlockUntilUpdate(session, u); + } finally { + shutdown(session); + } + } + + private void startAndBlockUntilUpdate(FcpSession session, FreenetURI uri) { connection = session.getConnection(); if (connection == null) { throw new IllegalArgumentException("No connection."); @@ -1016,9 +1304,18 @@ private boolean waitTermination(long ms) { } public static void main(String[] argv) throws InterruptedException { + Integer numThreads; + try { + numThreads = new Integer(argv[0]); + } catch (NumberFormatException e) { + logger.log(Level.SEVERE, "First parameter must be a number, was " + argv[0] + ".", e); + System.exit(2); + return; + } + FreenetURI u; try { - u = new FreenetURI(argv[0]); + u = new FreenetURI(argv[1]); } catch (MalformedURLException e) { logger.log(Level.SEVERE, "Exception", e); System.exit(2); @@ -1026,8 +1323,8 @@ public static void main(String[] argv) throws InterruptedException { } File morePagesDir = null; - if (argv.length > 1) { - morePagesDir = new File(argv[1]); + if (argv.length > 2) { + morePagesDir = new File(argv[2]); if (!morePagesDir.exists()) { logger.severe("Directory " + morePagesDir + " does not exist."); System.exit(2); @@ -1040,6 +1337,11 @@ public static void main(String[] argv) throws InterruptedException { } } - new DownloadOneEdition().run(u, morePagesDir); + if (numThreads.intValue() == 0) { + new DownloadOneEdition().run(u, morePagesDir); + } else { + logger.info("Running with " + numThreads.intValue() + " threads."); + new DownloadOneEdition().run2(numThreads.intValue(), u, morePagesDir); + } } } From d6820dc848d0dd4e734e11410cfb91b8cafc3865 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 10 Dec 2019 13:19:24 +0200 Subject: [PATCH 165/180] fix generic --- src/plugins/Library/util/SkeletonBTreeMap.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index 84628770..aa326304 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -562,7 +562,7 @@ public interface SkeletonMap final Queue nodequeue = new PriorityQueue(); - Map, ProgressTracker> ids = null; + Map, ProgressTracker> ids = null; ProgressTracker ntracker = null;; if (nsrl instanceof Serialiser.Trackable) { From 2f4d2103d2f2157fdb3309c4f133d815bb0cd9bc Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 10 Dec 2019 13:29:45 +0200 Subject: [PATCH 166/180] explanatory comment added --- src/plugins/Library/util/SkeletonTreeMap.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/plugins/Library/util/SkeletonTreeMap.java b/src/plugins/Library/util/SkeletonTreeMap.java index 866fb2bc..1200f5c1 100644 --- a/src/plugins/Library/util/SkeletonTreeMap.java +++ b/src/plugins/Library/util/SkeletonTreeMap.java @@ -321,10 +321,10 @@ public static Map app(SkeletonTreeMap map, Map SkeletonTreeMap rev(Map intm, SkeletonTreeMap map, Translator ktr) throws DataFormatException { - if (ktr == null) { + if (ktr == null) { // this case when K is String and therefore no keyTranslator is needed try { for (Map.Entry en: intm.entrySet()) { - map.putGhost((K)en.getKey(), en.getValue()); + map.putGhost((K) en.getKey(), en.getValue()); } } catch (ClassCastException e) { throw new DataFormatException("TreeMapTranslator: reverse translation failed. Try supplying a non-null key-translator.", e, intm, null, null); From 86a7d0a2fc972489d5988575ba5c9680f97ee730 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 10 Dec 2019 15:31:34 +0200 Subject: [PATCH 167/180] update java version to 8 and snakeyaml to 1.25 --- build.xml | 20 +++++++------- src/plugins/Library/io/YamlReaderWriter.java | 27 +++++++------------ .../Library/io/serial/YamlMapTest.java | 14 +++------- 3 files changed, 24 insertions(+), 37 deletions(-) diff --git a/build.xml b/build.xml index fa7a6670..c0cee4e3 100644 --- a/build.xml +++ b/build.xml @@ -2,10 +2,10 @@ - + - - + + @@ -64,14 +64,14 @@ - - - + + + - - - + + + @@ -80,7 +80,7 @@ - + - + @@ -146,7 +146,7 @@ - + @@ -173,7 +173,7 @@ - + diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index a2b0977a..1d99dc6a 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -17,6 +17,8 @@ import org.yaml.snakeyaml.constructor.Constructor; import org.yaml.snakeyaml.constructor.AbstractConstruct; +import java.lang.reflect.InvocationTargetException; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Arrays; import java.util.Map; @@ -35,20 +37,18 @@ import plugins.Library.index.TermTermEntry; import freenet.keys.FreenetURI; - /** -** Converts between an object and a stream containing a YAML document. By -** default, this uses a {@link Yaml} processor with additional object and tag -** definitions relevant to the Library plugin. -** -** (Ideally this would implement {@link java.io.ObjectInput} and {@link -** java.io.ObjectOutput} but they have too many methods to bother with...) -** -** @see Yaml -** @author infinity0 +* Converts between an object and a stream containing a YAML document. By +* default, this uses a {@link Yaml} processor with additional object and tag +* definitions relevant to the Library plugin. +* +* (Ideally this would implement {@link java.io.ObjectInput} and {@link +* java.io.ObjectOutput} but they have too many methods to bother with...) +* +* @see Yaml +* @author infinity0 */ -public class YamlReaderWriter -implements ObjectStreamReader, ObjectStreamWriter { +public class YamlReaderWriter implements ObjectStreamReader, ObjectStreamWriter { final public static String MIME_TYPE = "text/yaml"; final public static String FILE_EXTENSION = ".yml"; @@ -60,10 +60,11 @@ public class YamlReaderWriter public YamlReaderWriter() { } - /*@Override**/ public Object readObject(InputStream is) throws IOException { + @Override + public Object readObject(InputStream is) throws IOException { parallelLimiter.acquireUninterruptibly(); try { - return makeYAML().load(new InputStreamReader(is, "UTF-8")); + return makeYAML().load(new InputStreamReader(is, StandardCharsets.UTF_8)); } catch (YAMLException e) { throw new DataFormatException("Yaml could not process the stream: " + is, e, is, null, null); } finally { @@ -71,10 +72,11 @@ public YamlReaderWriter() { } } - /*@Override**/ public void writeObject(Object o, OutputStream os) throws IOException { + @Override + public void writeObject(Object o, OutputStream os) throws IOException { parallelLimiter.acquireUninterruptibly(); try { - makeYAML().dump(o, new OutputStreamWriter(os, "UTF-8")); + makeYAML().dump(o, new OutputStreamWriter(os, StandardCharsets.UTF_8)); } catch (YAMLException e) { throw new DataFormatException("Yaml could not process the object", e, o, null, null); } finally { @@ -82,9 +84,11 @@ public YamlReaderWriter() { } } - /** We do NOT keep this thread-local, because the Composer is only cleared after + /** + * We do NOT keep this thread-local, because the Composer is only cleared after * the next call to load(), so it can persist with a lot of useless data if we - * then use a different thread. So lets just construct them as needed. */ + * then use a different thread. So lets just construct them as needed. + */ private Yaml makeYAML() { DumperOptions opt = new DumperOptions(); opt.setWidth(Integer.MAX_VALUE); @@ -97,37 +101,40 @@ private Yaml makeYAML() { final public static ObjectBlueprint tebp_page; static { try { - tebp_term = new ObjectBlueprint(TermTermEntry.class, Arrays.asList("subj", "rel", "term")); - tebp_index = new ObjectBlueprint(TermIndexEntry.class, Arrays.asList("subj", "rel", "index")); - tebp_page = new ObjectBlueprint(TermPageEntry.class, Arrays.asList("subj", "rel", "page", "title", "positions", "posFragments")); - } catch (NoSuchFieldException e) { - throw new AssertionError(e); - } catch (NoSuchMethodException e) { + tebp_term = new ObjectBlueprint<>(TermTermEntry.class, + Arrays.asList("subj", "rel", "term")); + tebp_index = new ObjectBlueprint<>(TermIndexEntry.class, + Arrays.asList("subj", "rel", "index")); + tebp_page = new ObjectBlueprint<>(TermPageEntry.class, + Arrays.asList("subj", "rel", "page", "title", "positions", "posFragments")); + } catch (NoSuchFieldException | NoSuchMethodException e) { throw new AssertionError(e); } } - /************************************************************************ - ** DOCUMENT + /** + * DOCUMENT */ public static class ExtendedRepresenter extends Representer { public ExtendedRepresenter() { this.representers.put(FreenetURI.class, new Represent() { - /*@Override**/ public Node representData(Object data) { - return representScalar(new Tag("!FreenetURI"), ((FreenetURI) data).toString()); + @Override + public Node representData(Object data) { + return representScalar(new Tag("!FreenetURI"), data.toString()); } }); this.representers.put(Packer.BinInfo.class, new Represent() { - /*@Override**/ public Node representData(Object data) { - Packer.BinInfo inf = (Packer.BinInfo)data; - Map map = Collections.singletonMap(inf.getID(), inf.getWeight()); + @Override + public Node representData(Object data) { + Packer.BinInfo inf = (Packer.BinInfo) data; + Map map = Collections.singletonMap(inf.getID(), inf.getWeight()); return representMapping(new Tag("!BinInfo"), map, DumperOptions.FlowStyle.FLOW); } }); - this.representers.put(TermTermEntry.class, new RepresentTermEntry(tebp_term)); - this.representers.put(TermIndexEntry.class, new RepresentTermEntry(tebp_index)); - this.representers.put(TermPageEntry.class, new RepresentTermEntry(tebp_page)); + this.representers.put(TermTermEntry.class, new RepresentTermEntry<>(tebp_term)); + this.representers.put(TermIndexEntry.class, new RepresentTermEntry<>(tebp_index)); + this.representers.put(TermPageEntry.class, new RepresentTermEntry<>(tebp_page)); } public class RepresentTermEntry implements Represent { @@ -140,45 +147,47 @@ public RepresentTermEntry(ObjectBlueprint bp) { tag = "!" + bp.getObjectClass().getSimpleName(); } - /*@Override**/ public Node representData(Object data) { - return representMapping(new Tag(tag), blueprint.objectAsMap((T)data), DumperOptions.FlowStyle.FLOW); + @Override + public Node representData(Object data) { + return representMapping(new Tag(tag), blueprint.objectAsMap((T) data), DumperOptions.FlowStyle.FLOW); } - } - } - - /************************************************************************ - ** DOCUMENT + /** + * DOCUMENT */ public static class ExtendedConstructor extends Constructor { public ExtendedConstructor() { this.yamlConstructors.put(new Tag("!FreenetURI"), new AbstractConstruct() { - /*@Override**/ public Object construct(Node node) { - String uri = (String) constructScalar((ScalarNode)node); + @Override + public Object construct(Node node) { + String uri = constructScalar((ScalarNode)node); try { return new FreenetURI(uri); } catch (java.net.MalformedURLException e) { - throw new ConstructorException("while constructing a FreenetURI", node.getStartMark(), "found malformed URI " + uri, null); + throw new ConstructorException("while constructing a FreenetURI", node.getStartMark(), + "found malformed URI " + uri, null); } } }); this.yamlConstructors.put(new Tag("!BinInfo"), new AbstractConstruct() { - /*@Override**/ public Object construct(Node node) { - Map map = (Map) constructMapping((MappingNode)node); + @Override + public Object construct(Node node) { + Map map = constructMapping((MappingNode)node); if (map.size() != 1) { - throw new ConstructorException("while constructing a Packer.BinInfo", node.getStartMark(), "found incorrectly sized map data " + map, null); + throw new ConstructorException("while constructing a Packer.BinInfo", node.getStartMark(), + "found incorrectly sized map data " + map, null); } - for (Map.Entry en: map.entrySet()) { - return new Packer.BinInfo(en.getKey(), (Integer)en.getValue()); + for (Map.Entry en: map.entrySet()) { + return new Packer.BinInfo(en.getKey(), (Integer) en.getValue()); } throw new AssertionError(); } }); - this.yamlConstructors.put(new Tag("!TermTermEntry"), new ConstructTermEntry(tebp_term)); - this.yamlConstructors.put(new Tag("!TermIndexEntry"), new ConstructTermEntry(tebp_index)); - this.yamlConstructors.put(new Tag("!TermPageEntry"), new ConstructTermEntry(tebp_page)); + this.yamlConstructors.put(new Tag("!TermTermEntry"), new ConstructTermEntry<>(tebp_term)); + this.yamlConstructors.put(new Tag("!TermIndexEntry"), new ConstructTermEntry<>(tebp_index)); + this.yamlConstructors.put(new Tag("!TermPageEntry"), new ConstructTermEntry<>(tebp_page)); } public class ConstructTermEntry extends AbstractConstruct { @@ -189,19 +198,17 @@ public ConstructTermEntry(ObjectBlueprint bp) { blueprint = bp; } - /*@Override**/ public Object construct(Node node) { - Map map = (Map)constructMapping((MappingNode)node); - map.put("rel", new Float(((Double)map.get("rel")).floatValue())); + @Override + public Object construct(Node node) { + Map map = constructMapping((MappingNode)node); + map.put("rel", ((Double) map.get("rel")).floatValue()); try { return blueprint.objectFromMap(map); - } catch (Exception e) { - //java.lang.InstantiationException - //java.lang.IllegalAccessException - //java.lang.reflect.InvocationTargetException - throw new ConstructorException("while constructing a " + blueprint.getObjectClass().getSimpleName(), node.getStartMark(), "could not instantiate map " + map, null, e); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new ConstructorException("while constructing a " + blueprint.getObjectClass().getSimpleName(), + node.getStartMark(), "could not instantiate map " + map, null, e); } } - } } @@ -214,6 +221,4 @@ public ConstructorException(String context, Mark contextMark, String problem, Ma super(context, contextMark, problem, problemMark, cause); } } - - } diff --git a/test/plugins/Library/io/serial/YamlMapTest.java b/test/plugins/Library/io/serial/YamlMapTest.java index 70ea7f30..7ef8eccc 100644 --- a/test/plugins/Library/io/serial/YamlMapTest.java +++ b/test/plugins/Library/io/serial/YamlMapTest.java @@ -24,7 +24,7 @@ public class YamlMapTest extends TestCase { public void testYamlMap() throws IOException { - Map data = new TreeMap(); + Map data = new TreeMap<>(); data.put("key1", new Bean()); data.put("key2", new Bean()); data.put("key3", new Custom("test")); @@ -38,15 +38,13 @@ public void testYamlMap() throws IOException { os.close(); FileInputStream is = new FileInputStream(file); - Object o = yaml.load(new InputStreamReader(is)); + Map map = yaml.load(new InputStreamReader(is)); is.close(); - assertTrue(o instanceof Map); - Map m = (Map)o; - assertTrue(m.get("key1") instanceof Bean); - assertTrue(m.get("key2") instanceof Bean); // NOTE these tests fail in snakeYAML 1.2 and below, fixed in 1.3 - assertTrue(m.get("key3") instanceof Custom); - assertTrue(m.get("key4") instanceof Wrapper); + assertTrue(map.get("key1") instanceof Bean); + assertTrue(map.get("key2") instanceof Bean); // NOTE these tests fail in snakeYAML 1.2 and below, fixed in 1.3 + assertTrue(map.get("key3") instanceof Custom); + assertTrue(map.get("key4") instanceof Wrapper); } public static class Bean { @@ -54,7 +52,6 @@ public static class Bean { public Bean() { a = ""; } public String getA() { return a; } public void setA(String s) { a = s; } - } public static class Wrapper { @@ -86,7 +83,6 @@ public Custom(Custom c) { public String toString() { return str; } } - public static class ExtendedRepresenter extends Representer { public ExtendedRepresenter() { this.representers.put(Custom.class, new RepresentCustom()); @@ -94,12 +90,11 @@ public ExtendedRepresenter() { private class RepresentCustom implements Represent { public Node representData(Object data) { - return representScalar(new Tag("!Custom"), ((Custom) data).toString()); + return representScalar(new Tag("!Custom"), data.toString()); } } } - public static class ExtendedConstructor extends Constructor { public ExtendedConstructor() { this.yamlConstructors.put(new Tag("!Custom"), new ConstructCustom()); @@ -107,11 +102,10 @@ public ExtendedConstructor() { private class ConstructCustom implements Construct { public Object construct(Node node) { - String str = (String) constructScalar((ScalarNode)node); + String str = constructScalar((ScalarNode)node); return new Custom(str); } public void construct2ndStep(Node node, Object object) { } } } - } From 07d43f9a9c5d69b6abd6ce3e0b0481383da2f8c8 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 10 Dec 2019 17:52:19 +0200 Subject: [PATCH 170/180] refactoring: bypass for same bug in second place --- .../Library/index/ProtoIndexSerialiser.java | 36 ++++++++++--------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index fc0e64d2..3485950c 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -9,7 +9,6 @@ import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.exec.SimpleProgress; import plugins.Library.util.exec.TaskAbortException; -import plugins.Library.io.serial.Serialiser.*; import plugins.Library.io.serial.LiveArchiver; import plugins.Library.io.serial.Serialiser; import plugins.Library.io.serial.Translator; @@ -110,7 +109,7 @@ public static ProtoIndexSerialiser forIndex(File prefix) { serialisable.data.put("reqID", task.meta); } try { - task.data = trans.rev(serialisable.data); + task.data = trans.rev(cast(serialisable.data /* FIXME: there Double and String keys */)); } catch (DataFormatException e) { throw new TaskAbortException("Could not construct index from data", e); } @@ -123,6 +122,23 @@ public static ProtoIndexSerialiser forIndex(File prefix) { task.meta = serialisable.meta; } + // should be removed when the cause of the mixture of Double and String among the keys is eliminated + private static Map cast(Map map) { + List> wrongEntries = new ArrayList<>(); + for(Iterator> it = map.entrySet().iterator(); it.hasNext(); ) { + Map.Entry entry = it.next(); + if (entry.getKey() instanceof String) { + continue; + } + wrongEntries.add(entry); + it.remove(); + } + for (Map.Entry entry : wrongEntries) { + map.put(String.valueOf(entry.getKey()), entry.getValue()); + } + return (Map) map; + } + public static class IndexTranslator implements Translator> { @@ -200,20 +216,8 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); // FIXME: termTable has Double and String keys - Map termTable = (Map) map.get("ttab"); - List wrongEntries = new ArrayList<>(); - for(Iterator it = termTable.entrySet().iterator(); it.hasNext(); ) { - Map.Entry entry = it.next(); - if (entry.getKey() instanceof String) { - continue; - } - wrongEntries.add(entry); - it.remove(); - } - for (Map.Entry entry : wrongEntries) { - termTable.put(String.valueOf(entry.getKey()), entry.getValue()); - } - SkeletonBTreeMap> ttab = ttrans.rev((Map) termTable); + Map termTable = ProtoIndexSerialiser.cast((Map) map.get("ttab")); + SkeletonBTreeMap> ttab = ttrans.rev(termTable); return cmpsrl.setSerialiserFor(new ProtoIndex(reqID, name, ownerName, ownerEmail, totalPages, modified, extra, utab, ttab)); From bc7ab73b89a464207f95a0426a883fe5bdc3e813 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 10 Dec 2019 22:08:45 +0200 Subject: [PATCH 171/180] disable snakeyaml implicit resolvers --- .../Library/index/ProtoIndexSerialiser.java | 40 +++++-------------- src/plugins/Library/io/YamlReaderWriter.java | 11 +++-- 2 files changed, 18 insertions(+), 33 deletions(-) diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index 3485950c..e6206530 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -19,6 +19,8 @@ import freenet.keys.FreenetURI; +import java.text.ParseException; +import java.text.SimpleDateFormat; import java.util.*; import java.io.File; @@ -109,7 +111,7 @@ public static ProtoIndexSerialiser forIndex(File prefix) { serialisable.data.put("reqID", task.meta); } try { - task.data = trans.rev(cast(serialisable.data /* FIXME: there Double and String keys */)); + task.data = trans.rev(serialisable.data); } catch (DataFormatException e) { throw new TaskAbortException("Could not construct index from data", e); } @@ -122,23 +124,6 @@ public static ProtoIndexSerialiser forIndex(File prefix) { task.meta = serialisable.meta; } - // should be removed when the cause of the mixture of Double and String among the keys is eliminated - private static Map cast(Map map) { - List> wrongEntries = new ArrayList<>(); - for(Iterator> it = map.entrySet().iterator(); it.hasNext(); ) { - Map.Entry entry = it.next(); - if (entry.getKey() instanceof String) { - continue; - } - wrongEntries.add(entry); - it.remove(); - } - for (Map.Entry entry : wrongEntries) { - map.put(String.valueOf(entry.getKey()), entry.getValue()); - } - return (Map) map; - } - public static class IndexTranslator implements Translator> { @@ -204,20 +189,15 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); - // FIXME yaml idiocy??? It seems to give a Long if the number is big enough to need one, and an Integer otherwise. - long totalPages; - Object o = map.get("totalPages"); - if(o instanceof Long) - totalPages = (Long)o; - else // Integer - totalPages = (Integer)o; - Date modified = (Date)map.get("modified"); + long totalPages = Long.parseLong((String) map.get("totalPages")); + Date modified = null; + try { + modified = new SimpleDateFormat("yyyy-MM-dd").parse((String) map.get("modified")); + } catch (ParseException ignored) { + } Map extra = (Map)map.get("extra"); SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); - - // FIXME: termTable has Double and String keys - Map termTable = ProtoIndexSerialiser.cast((Map) map.get("ttab")); - SkeletonBTreeMap> ttab = ttrans.rev(termTable); + SkeletonBTreeMap> ttab = ttrans.rev((Map) map.get("ttab")); return cmpsrl.setSerialiserFor(new ProtoIndex(reqID, name, ownerName, ownerEmail, totalPages, modified, extra, utab, ttab)); diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index 1d99dc6a..4279f6fd 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -30,6 +30,7 @@ import java.io.IOException; /* class definitions added to the extended Yaml processor */ +import org.yaml.snakeyaml.resolver.Resolver; import plugins.Library.io.serial.Packer; import plugins.Library.index.TermEntry; import plugins.Library.index.TermPageEntry; @@ -93,7 +94,11 @@ private Yaml makeYAML() { DumperOptions opt = new DumperOptions(); opt.setWidth(Integer.MAX_VALUE); opt.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); - return new Yaml(new ExtendedConstructor(), new ExtendedRepresenter(), opt); + return new Yaml(new ExtendedConstructor(), new ExtendedRepresenter(), opt, new Resolver() { + @Override + protected void addImplicitResolvers() { // disable implicit resolvers + } + }); } final public static ObjectBlueprint tebp_term; @@ -180,7 +185,7 @@ public Object construct(Node node) { "found incorrectly sized map data " + map, null); } for (Map.Entry en: map.entrySet()) { - return new Packer.BinInfo(en.getKey(), (Integer) en.getValue()); + return new Packer.BinInfo(en.getKey(), Integer.parseInt((String) en.getValue())); } throw new AssertionError(); } @@ -201,7 +206,7 @@ public ConstructTermEntry(ObjectBlueprint bp) { @Override public Object construct(Node node) { Map map = constructMapping((MappingNode)node); - map.put("rel", ((Double) map.get("rel")).floatValue()); + map.put("rel", Float.valueOf((String) map.get("rel"))); try { return blueprint.objectFromMap(map); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { From 10d6ea2eb6962ae9eb0b964f5f1193e065c55e10 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Thu, 12 Dec 2019 17:30:47 +0200 Subject: [PATCH 172/180] kludge related to bc7ab73 --- src/plugins/Library/SpiderIndexUploader.java | 2 +- .../Library/index/ProtoIndexSerialiser.java | 41 +++++++++--- src/plugins/Library/index/TermEntry.java | 3 +- src/plugins/Library/io/ObjectBlueprint.java | 3 + src/plugins/Library/io/YamlReaderWriter.java | 61 ++++++++++++++++-- .../Library/util/SkeletonBTreeMap.java | 62 ++++++++++++++----- 6 files changed, 141 insertions(+), 31 deletions(-) diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 9b853337..29fc76dd 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -389,7 +389,7 @@ private Closure>, TaskAbortException> /*@Override**/ public void invoke(Map.Entry> entry) throws TaskAbortException { String key = entry.getKey(); SkeletonBTreeSet tree = entry.getValue(); - if(logMINOR) Logger.minor(this, "Processing: "+key+" : "+tree); + Logger.minor(this, "Processing: " + key + " : " + (tree != null ? tree : "new")); if(tree != null) Logger.debug(this, "Merging data (on disk) in term "+key); else diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index e6206530..0051da5a 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -179,21 +179,48 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) } /*@Override**/ public ProtoIndex rev(Map map) throws DataFormatException { - long magic = (Long)map.get("serialVersionUID"); + Object serialVersionUID = map.get("serialVersionUID"); + long magic; + if (serialVersionUID instanceof String) { // FIXME + magic = Long.parseLong((String) map.get("serialVersionUID")); + } else { + magic = (Long) serialVersionUID; + } if (magic == ProtoIndex.serialVersionUID) { try { // FIXME yet more hacks related to the lack of proper asynchronous FreenetArchiver... - ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get((Integer)map.get("serialFormatUID"), subsrl); + Object serialFormatUIDObj = map.get("serialFormatUID"); + int serialFormatUID; + if (serialFormatUIDObj instanceof String) { // FIXME + serialFormatUID = Integer.parseInt((String) map.get("serialFormatUID")); + } else { + serialFormatUID = (Integer) serialFormatUIDObj; + } + ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get(serialFormatUID, subsrl); FreenetURI reqID = (FreenetURI)map.get("reqID"); String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); - long totalPages = Long.parseLong((String) map.get("totalPages")); - Date modified = null; - try { - modified = new SimpleDateFormat("yyyy-MM-dd").parse((String) map.get("modified")); - } catch (ParseException ignored) { + Object totalPagesObj = map.get("totalPages"); + long totalPages; + if (totalPagesObj instanceof String) { // FIXME + totalPages = Long.parseLong((String) totalPagesObj); + } else if (totalPagesObj instanceof Long) { // FIXME yaml??? It seems to give a Long if the number + totalPages = (Long) totalPagesObj; // is big enough to need one, and an Integer otherwise. + } else { + totalPages = (Integer) totalPagesObj; + } + Object modifiedObj = map.get("modified"); + Date modified; + if (modifiedObj instanceof String) { // FIXME + try { + modified = new SimpleDateFormat("yyyy-MM-dd").parse((String) modifiedObj); + } catch (ParseException ignored) { + modified = null; + } + } else { + modified = (Date) modifiedObj; } Map extra = (Map)map.get("extra"); SkeletonBTreeMap> utab = utrans.rev((Map)map.get("utab")); diff --git a/src/plugins/Library/index/TermEntry.java b/src/plugins/Library/index/TermEntry.java index e5832b5a..5e86727a 100644 --- a/src/plugins/Library/index/TermEntry.java +++ b/src/plugins/Library/index/TermEntry.java @@ -37,7 +37,8 @@ public enum EntryType { public TermEntry(String s, float r) { if (s == null) { - throw new IllegalArgumentException("can't have a null subject!"); +// throw new IllegalArgumentException("can't have a null subject!"); + s = "null"; // FIXME } if (r < 0/* || r > 1*/) { // FIXME: I don't see how our relevance algorithm can be guaranteed to produce relevance <1. throw new IllegalArgumentException("Relevance must be in the half-closed interval (0,1]. Supplied: " + r); diff --git a/src/plugins/Library/io/ObjectBlueprint.java b/src/plugins/Library/io/ObjectBlueprint.java index 36969847..771ad312 100644 --- a/src/plugins/Library/io/ObjectBlueprint.java +++ b/src/plugins/Library/io/ObjectBlueprint.java @@ -299,6 +299,9 @@ public T objectFromMap(Map map) throws InstantiationException, IllegalAcce String property = en.getKey(); Class type = en.getValue(); Object value = map.get(property); + if (value != null && value.equals("null")) { // FIXME: case when type Map and value String "null" + value = null; + } try { if (type.isPrimitive()) { value = boxCast(type, value); diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index 4279f6fd..f0a9eeae 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -3,6 +3,7 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.io; +import freenet.support.SortedIntSet; import org.yaml.snakeyaml.nodes.Tag; import org.yaml.snakeyaml.Yaml; @@ -19,9 +20,7 @@ import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.Arrays; -import java.util.Map; +import java.util.*; import java.util.concurrent.Semaphore; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -185,7 +184,13 @@ public Object construct(Node node) { "found incorrectly sized map data " + map, null); } for (Map.Entry en: map.entrySet()) { - return new Packer.BinInfo(en.getKey(), Integer.parseInt((String) en.getValue())); + int w; // FIXME + if (en.getValue() instanceof String) { + w = Integer.parseInt((String) en.getValue()); + } else { + w = (Integer) en.getValue(); + } + return new Packer.BinInfo(en.getKey(), w); } throw new AssertionError(); } @@ -206,7 +211,53 @@ public ConstructTermEntry(ObjectBlueprint bp) { @Override public Object construct(Node node) { Map map = constructMapping((MappingNode)node); - map.put("rel", Float.valueOf((String) map.get("rel"))); + Object relObj = map.get("rel"); + float rel; + if (relObj instanceof Double) { // FIXME + rel = ((Double) relObj).floatValue(); + } else { + rel = Float.parseFloat((String) relObj); + } + map.put("rel", rel); + + // FIXME + Object posObj = map.get("positions"); + if (posObj != null) { + if ("null".equals(posObj)) { + map.put("positions", null); + } else { + Set pos = new SortedIntSet(); + for (Object p : (Set) posObj) { + if (p instanceof String) { + pos.add("null".equals(p) ? null : Integer.parseInt((String) p)); + } else { + pos.add((Integer) p); + } + } + map.put("positions", pos); + } + } + + // FIXME + Object posFragmentsObj = map.get("posFragments"); + if (posFragmentsObj != null) { + if ("null".equals(posFragmentsObj)) { + map.put("posFragments", null); + } else { + Map frags = new HashMap<>(); + for (Map.Entry entry : ((Map) posFragmentsObj).entrySet()) { + Integer key; + if (entry.getKey() instanceof String) { + key = "null".equals(entry.getKey()) ? null : Integer.parseInt((String) entry.getKey()); + } else { + key = (Integer) entry.getKey(); + } + frags.put(key, "null".equals(entry.getValue()) ? null : (String) entry.getValue()); + } + map.put("posFragments", frags); + } + } + try { return blueprint.objectFromMap(map); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { diff --git a/src/plugins/Library/util/SkeletonBTreeMap.java b/src/plugins/Library/util/SkeletonBTreeMap.java index aa326304..450ee336 100644 --- a/src/plugins/Library/util/SkeletonBTreeMap.java +++ b/src/plugins/Library/util/SkeletonBTreeMap.java @@ -1333,19 +1333,17 @@ private void handleLocalRemove(SkeletonNode n, K key, TrackingSweeper 10)) { count = 0; -// if(ccount++ > 10) { - System.out.println(/*System.identityHashCode(this) + " " + */proc_val + " " + proc_pull + " " + proc_push+ " "+proc_deflate); -// ccount = 0; -// } + if (proc_val == null) { + Logger.debug(this, /*System.identityHashCode(this) + " " + */ + proc_val + " " + proc_pull + " " + proc_push + " " + proc_deflate); + } else { + Logger.minor(this, proc_val + " " + proc_pull + " " + proc_push + " " + proc_deflate); + } notifier.waitUpdate(1000); } progress = false; @@ -1508,17 +1506,35 @@ public NodeTranslator(Translator k, Translator, R> m boolean notleaf = map.containsKey("subnodes"); List gh = null; if (notleaf) { - Map subnodes = (Map)map.get("subnodes"); - gh = new ArrayList(subnodes.size()); - for (Map.Entry en: subnodes.entrySet()) { - GhostNode ghost = new GhostNode(null, null, null, en.getValue()); + Map subnodes = (Map) map.get("subnodes"); + gh = new ArrayList<>(subnodes.size()); + for (Map.Entry en: subnodes.entrySet()) { + Object sObj = en.getValue(); + int s; + if (sObj instanceof String) { // FIXME + s = Integer.parseInt((String) sObj); + } else { + s = (Integer) sObj; + } + GhostNode ghost = new GhostNode(null, null, null, s); ghost.setMeta(en.getKey()); gh.add(ghost); } } + + // FIXME + Object lkey = map.get("lkey"); + if ("null".equals(lkey)) { + lkey = null; + } + Object rkey = map.get("rkey"); + if ("null".equals(rkey)) { + rkey = null; + } + SkeletonNode node = new SkeletonNode( - (ktr == null)? (K)map.get("lkey"): ktr.rev((Q)map.get("lkey")), - (ktr == null)? (K)map.get("rkey"): ktr.rev((Q)map.get("rkey")), + (ktr == null) ? (K) lkey : ktr.rev((Q) lkey), + (ktr == null) ? (K) rkey : ktr.rev((Q) rkey), !notleaf, (mtr == null)? (SkeletonTreeMap)map.get("entries") : mtr.rev((R)map.get("entries")), @@ -1575,8 +1591,20 @@ public TreeTranslator(Translator k, Translator, ?> m /*@Override**/ public SkeletonBTreeMap rev(Map map) throws DataFormatException { try { - SkeletonBTreeMap tree = new SkeletonBTreeMap((Integer)map.get("node_min")); - tree.size = (Integer)map.get("size"); + Object nodeMinObj = map.get("node_min"); + int nodeMin; + if (nodeMinObj instanceof String) { // FIXME + nodeMin = Integer.parseInt((String) nodeMinObj); + } else { + nodeMin = (Integer) map.get("node_min"); + } + SkeletonBTreeMap tree = new SkeletonBTreeMap<>(nodeMin); + Object sizeObj = map.get("size"); + if (sizeObj instanceof String) { + tree.size = Integer.parseInt((String) sizeObj); + } else { + tree.size = (Integer) sizeObj; + } // map.put("lkey", null); // NULLNOTICE: get() gives null which matches // map.put("rkey", null); // NULLNOTICE: get() gives null which matches tree.root = tree.makeNodeTranslator(ktr, mtr).rev(map); From afee80553f175e5e4f15b979bb787c70c9acc107 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Thu, 12 Dec 2019 21:31:26 +0200 Subject: [PATCH 173/180] clean --- src/plugins/Library/io/YamlReaderWriter.java | 21 ++++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index f0a9eeae..1fd6b07d 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -166,7 +166,7 @@ public ExtendedConstructor() { this.yamlConstructors.put(new Tag("!FreenetURI"), new AbstractConstruct() { @Override public Object construct(Node node) { - String uri = constructScalar((ScalarNode)node); + String uri = constructScalar((ScalarNode) node); try { return new FreenetURI(uri); } catch (java.net.MalformedURLException e) { @@ -178,21 +178,20 @@ public Object construct(Node node) { this.yamlConstructors.put(new Tag("!BinInfo"), new AbstractConstruct() { @Override public Object construct(Node node) { - Map map = constructMapping((MappingNode)node); + Map map = constructMapping((MappingNode) node); if (map.size() != 1) { throw new ConstructorException("while constructing a Packer.BinInfo", node.getStartMark(), "found incorrectly sized map data " + map, null); } - for (Map.Entry en: map.entrySet()) { - int w; // FIXME - if (en.getValue() instanceof String) { - w = Integer.parseInt((String) en.getValue()); - } else { - w = (Integer) en.getValue(); - } - return new Packer.BinInfo(en.getKey(), w); + + Map.Entry entry = map.entrySet().iterator().next(); + int w; // FIXME + if (entry.getValue() instanceof String) { + w = Integer.parseInt((String) entry.getValue()); + } else { + w = (Integer) entry.getValue(); } - throw new AssertionError(); + return new Packer.BinInfo(entry.getKey(), w); } }); this.yamlConstructors.put(new Tag("!TermTermEntry"), new ConstructTermEntry<>(tebp_term)); From 77ca196c88db950f0835243a8bcec46cd504e16d Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Mon, 16 Dec 2019 18:26:27 +0200 Subject: [PATCH 174/180] localized the place that led to the hang + temporary solution --- src/plugins/Library/SpiderIndexUploader.java | 8 ++++++-- src/plugins/Library/io/serial/Packer.java | 6 +++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/plugins/Library/SpiderIndexUploader.java b/src/plugins/Library/SpiderIndexUploader.java index 29fc76dd..d4eaec9f 100644 --- a/src/plugins/Library/SpiderIndexUploader.java +++ b/src/plugins/Library/SpiderIndexUploader.java @@ -66,7 +66,7 @@ public class SpiderIndexUploader { } private final PluginRespirator pr; - private Object freenetMergeSync = new Object(); + private final Object freenetMergeSync = new Object(); private boolean freenetMergeRunning = false; private boolean diskMergeRunning = false; @@ -75,7 +75,8 @@ public class SpiderIndexUploader { // When pushing is broken, allow max handling to reach this level before stalling forever to prevent running out of disk space. private int PUSH_BROKEN_MAX_HANDLING_COUNT = 10; // Don't use too much disk space, take into account fact that Spider slows down over time. - + + // Flag for the Spider plugin to hang private boolean pushBroken; /** The temporary on-disk index. We merge stuff into this until it exceeds a threshold size, then @@ -877,6 +878,9 @@ public void run() { } } + /** + * Retrieving data from Spider + */ public void handlePushBuffer(SimpleFieldSet params, Bucket data) { if(data.size() == 0) { diff --git a/src/plugins/Library/io/serial/Packer.java b/src/plugins/Library/io/serial/Packer.java index 363155ec..510b01e9 100644 --- a/src/plugins/Library/io/serial/Packer.java +++ b/src/plugins/Library/io/serial/Packer.java @@ -551,7 +551,11 @@ protected void pullUnloaded(Map> tasks, Object meta) throws TaskA PullTask> bintask = en.getValue(); for (Map.Entry el: bintask.data.entrySet()) { if (tasks.containsKey(el.getKey())) { - throw new TaskAbortException("Packer found an extra unexpected element (" + el.getKey() + ") inside a bin (" + en.getKey() + "). Either the data is corrupt, or the child serialiser is buggy.", new Exception("internal error")); + Logger.error(this, "Skip '" + el.getKey() + "' don't know why"); + continue; +// throw new TaskAbortException("Packer found an extra unexpected element (" + el.getKey() + ") " + +// "inside a bin (" + en.getKey() + "). Either the data is corrupt, " + +// "or the child serialiser is buggy.", new Exception("internal error")); } PullTask task = new PullTask(scale.makeMeta(en.getKey(), scale.weigh(el.getValue()))); task.data = el.getValue(); From c950a7891a7a41c22d1a4d2d66013a803e56d457 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Wed, 8 Jan 2020 13:08:07 +0200 Subject: [PATCH 175/180] merge uploader --- README | 11 ++---- .../library/uploader/AdHocDataReader.java | 9 +++-- .../library/uploader/DirectoryCreator.java | 20 +++++------ .../library/uploader/DirectoryUploader.java | 36 +++++++++---------- .../freenet/library/uploader/DownloadAll.java | 2 +- .../library/uploader/DownloadOneEdition.java | 3 +- .../freenet/library/uploader/FcpArchiver.java | 31 ++++++++-------- .../library/uploader/FetchAllOnce.java | 9 ++--- .../freenet/library/uploader/IndexPeeker.java | 8 ++--- .../src/freenet/library/uploader/Merger.java | 8 ++--- .../library/uploader/TermEntryFileWriter.java | 4 +-- .../uploader/TermEntryReaderIterator.java | 4 +-- .../library/uploader/UploaderLibrary.java | 21 ++++++----- 13 files changed, 81 insertions(+), 85 deletions(-) diff --git a/README b/README index c1cdeaee..8681abfb 100644 --- a/README +++ b/README @@ -4,6 +4,7 @@ Build: plugin-Library$ ant + == Javadoc == If you want to generate Javadocs, download bliki-doclet, which is a little @@ -56,15 +57,9 @@ this may change soon : + == Ongoing work to split == The plugin is in src, test (for historical reasons). -The uploader is in uploader/src and uploader/test depending on fcp and -shared parts. - -The shared parts are in shared/src and shared/test. shared means shared -between the plugin and the uploader. - -Plan: These are three separate eclipse project so eclipse will help -monitoring the dependencies. +The uploader (standalone program) is in uploader/src and uploader/test depending on fcp and plugin. diff --git a/uploader/src/freenet/library/uploader/AdHocDataReader.java b/uploader/src/freenet/library/uploader/AdHocDataReader.java index ec1f61db..90f80d0d 100644 --- a/uploader/src/freenet/library/uploader/AdHocDataReader.java +++ b/uploader/src/freenet/library/uploader/AdHocDataReader.java @@ -10,11 +10,10 @@ import java.util.logging.Level; import java.util.logging.Logger; -import freenet.library.io.FreenetURI; -import freenet.library.io.YamlReaderWriter; -import freenet.library.io.serial.Packer; -import freenet.library.io.serial.Packer.BinInfo; - +import plugins.Library.io.FreenetURI; +import plugins.Library.io.YamlReaderWriter; +import plugins.Library.io.serial.Packer; +import plugins.Library.io.serial.Packer.BinInfo; class AdHocDataReader { /** Logger. */ diff --git a/uploader/src/freenet/library/uploader/DirectoryCreator.java b/uploader/src/freenet/library/uploader/DirectoryCreator.java index 18a524dd..9e060022 100644 --- a/uploader/src/freenet/library/uploader/DirectoryCreator.java +++ b/uploader/src/freenet/library/uploader/DirectoryCreator.java @@ -8,16 +8,16 @@ import java.util.Map; import java.util.Map.Entry; -import freenet.library.index.ProtoIndex; -import freenet.library.index.ProtoIndexComponentSerialiser; -import freenet.library.index.ProtoIndexSerialiser; -import freenet.library.index.TermEntry; -import freenet.library.io.FreenetURI; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.io.serial.Serialiser.PushTask; -import freenet.library.util.SkeletonBTreeSet; -import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; +import plugins.Library.index.ProtoIndex; +import plugins.Library.index.ProtoIndexComponentSerialiser; +import plugins.Library.index.ProtoIndexSerialiser; +import plugins.Library.index.TermEntry; +import plugins.Library.io.FreenetURI; +import plugins.Library.io.serial.LiveArchiver; +import plugins.Library.io.serial.Serialiser.PushTask; +import plugins.Library.util.SkeletonBTreeSet; +import plugins.Library.util.exec.SimpleProgress; +import plugins.Library.util.exec.TaskAbortException; class DirectoryCreator { private ProtoIndex idxDisk; diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index ca835bca..f3fa7d18 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -21,21 +21,21 @@ import java.util.TreeSet; import java.util.concurrent.TimeUnit; -import freenet.library.Priority; -import freenet.library.index.ProtoIndex; -import freenet.library.index.ProtoIndexComponentSerialiser; -import freenet.library.index.ProtoIndexSerialiser; -import freenet.library.index.TermEntry; -import freenet.library.io.FreenetURI; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.io.serial.Serialiser.PullTask; -import freenet.library.io.serial.Serialiser.PushTask; -import freenet.library.util.SkeletonBTreeMap; -import freenet.library.util.SkeletonBTreeSet; -import freenet.library.util.TaskAbortExceptionConvertor; -import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; -import freenet.library.util.func.Closure; +import plugins.Library.Priority; +import plugins.Library.index.ProtoIndex; +import plugins.Library.index.ProtoIndexComponentSerialiser; +import plugins.Library.index.ProtoIndexSerialiser; +import plugins.Library.index.TermEntry; +import plugins.Library.io.FreenetURI; +import plugins.Library.io.serial.LiveArchiver; +import plugins.Library.io.serial.Serialiser.PullTask; +import plugins.Library.io.serial.Serialiser.PushTask; +import plugins.Library.util.SkeletonBTreeMap; +import plugins.Library.util.SkeletonBTreeSet; +import plugins.Library.util.TaskAbortExceptionConvertor; +import plugins.Library.util.exec.SimpleProgress; +import plugins.Library.util.exec.TaskAbortException; +import plugins.Library.util.func.Closure; import net.pterodactylus.fcp.ClientPut; import net.pterodactylus.fcp.FcpAdapter; @@ -46,7 +46,7 @@ import net.pterodactylus.fcp.UploadFrom; class DirectoryUploader implements Runnable { - + FcpConnection connection; File directory; boolean forceCreateUSK; @@ -56,7 +56,7 @@ class DirectoryUploader implements Runnable { directory = d; forceCreateUSK = fcu; } - + public void run() { mergeToFreenet(directory); } @@ -469,7 +469,7 @@ private void uploadUSKForFreenetIndex(FreenetURI uri) { String insertURI = readFileLine(PRIV_URI_FILENAME); String keyPart = insertURI.substring("freenet:SSK@".length()); int lastEdition = Integer.parseInt(readFileLine(EDITION_FILENAME)); - final ClientPut usk = new ClientPut("USK@" + keyPart + "/" + (lastEdition + 1), + final ClientPut usk = new ClientPut("USK@" + keyPart + "/" + (lastEdition + 1), "USKupload", UploadFrom.redirect); usk.setTargetURI(uri.toString()); diff --git a/uploader/src/freenet/library/uploader/DownloadAll.java b/uploader/src/freenet/library/uploader/DownloadAll.java index 0108b431..f229c487 100644 --- a/uploader/src/freenet/library/uploader/DownloadAll.java +++ b/uploader/src/freenet/library/uploader/DownloadAll.java @@ -13,7 +13,7 @@ import java.net.MalformedURLException; -import freenet.library.io.FreenetURI; +import plugins.Library.io.FreenetURI; /** * Class to download the entire index. diff --git a/uploader/src/freenet/library/uploader/DownloadOneEdition.java b/uploader/src/freenet/library/uploader/DownloadOneEdition.java index c88e210b..35bc433c 100644 --- a/uploader/src/freenet/library/uploader/DownloadOneEdition.java +++ b/uploader/src/freenet/library/uploader/DownloadOneEdition.java @@ -40,7 +40,8 @@ import java.util.logging.Level; import java.util.logging.Logger; -import freenet.library.io.FreenetURI; +import plugins.Library.io.FreenetURI; + import net.pterodactylus.fcp.AllData; import net.pterodactylus.fcp.ClientGet; import net.pterodactylus.fcp.ClientPut; diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index d45fbb12..88727ee9 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -12,6 +12,17 @@ import java.util.Map; import java.util.Set; +import freenet.support.Base64; +import freenet.crypt.SHA256; + +import plugins.Library.Priority; +import plugins.Library.io.FreenetURI; +import plugins.Library.io.ObjectStreamReader; +import plugins.Library.io.ObjectStreamWriter; +import plugins.Library.io.serial.LiveArchiver; +import plugins.Library.util.exec.SimpleProgress; +import plugins.Library.util.exec.TaskAbortException; + import net.pterodactylus.fcp.ClientPut; import net.pterodactylus.fcp.FcpAdapter; import net.pterodactylus.fcp.FcpConnection; @@ -20,19 +31,9 @@ import net.pterodactylus.fcp.PutSuccessful; import net.pterodactylus.fcp.URIGenerated; import net.pterodactylus.fcp.Verbosity; -import freenet.copied.Base64; -import freenet.copied.SHA256; -import freenet.library.Priority; -import freenet.library.io.FreenetURI; -import freenet.library.io.ObjectStreamReader; -import freenet.library.io.ObjectStreamWriter; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.util.exec.SimpleProgress; -import freenet.library.util.exec.TaskAbortException; - public class FcpArchiver - implements LiveArchiver { + implements LiveArchiver { private FcpConnection connection; private File cacheDir; private ObjectStreamReader reader; @@ -72,13 +73,13 @@ private net.pterodactylus.fcp.Priority getPriority() { } @Override - public void pull(freenet.library.io.serial.Serialiser.PullTask task) + public void pull(plugins.Library.io.serial.Serialiser.PullTask task) throws TaskAbortException { pullLive(task, null); } @Override - public void push(freenet.library.io.serial.Serialiser.PushTask task) + public void push(plugins.Library.io.serial.Serialiser.PushTask task) throws TaskAbortException { pushLive(task, null); } @@ -88,7 +89,7 @@ public void push(freenet.library.io.serial.Serialiser.PushTask task) * that we cannot take over someone else's index. */ @Override - public void pullLive(freenet.library.io.serial.Serialiser.PullTask task, + public void pullLive(plugins.Library.io.serial.Serialiser.PullTask task, SimpleProgress progress) throws TaskAbortException { if (cacheDir.exists()) { String cacheKey = null; @@ -286,7 +287,7 @@ FreenetURI getURI() { private static int counter = 1; @Override - public void pushLive(freenet.library.io.serial.Serialiser.PushTask task, + public void pushLive(plugins.Library.io.serial.Serialiser.PushTask task, SimpleProgress progress) throws TaskAbortException { // Slow down the build up of the queue. try { diff --git a/uploader/src/freenet/library/uploader/FetchAllOnce.java b/uploader/src/freenet/library/uploader/FetchAllOnce.java index 389123d2..a5ee5ab1 100644 --- a/uploader/src/freenet/library/uploader/FetchAllOnce.java +++ b/uploader/src/freenet/library/uploader/FetchAllOnce.java @@ -41,6 +41,11 @@ import java.util.logging.Level; import java.util.logging.Logger; +import plugins.Library.io.FreenetURI; +import plugins.Library.io.YamlReaderWriter; +import plugins.Library.io.serial.Packer; +import plugins.Library.io.serial.Packer.BinInfo; + import net.pterodactylus.fcp.AllData; import net.pterodactylus.fcp.ClientGet; import net.pterodactylus.fcp.ClientPut; @@ -54,10 +59,6 @@ import net.pterodactylus.fcp.SubscribedUSKUpdate; import net.pterodactylus.fcp.URIGenerated; import net.pterodactylus.fcp.Verbosity; -import freenet.library.io.FreenetURI; -import freenet.library.io.YamlReaderWriter; -import freenet.library.io.serial.Packer; -import freenet.library.io.serial.Packer.BinInfo; /** * Class to download the entire index. diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 09d4f7bb..f2b95d43 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -9,10 +9,10 @@ import java.util.List; import java.util.Set; -import freenet.library.index.TermEntry; -import freenet.library.io.YamlReaderWriter; -import freenet.library.util.SkeletonBTreeMap; -import freenet.library.util.SkeletonBTreeSet; +import plugins.Library.index.TermEntry; +import plugins.Library.io.YamlReaderWriter; +import plugins.Library.util.SkeletonBTreeMap; +import plugins.Library.util.SkeletonBTreeSet; class IndexPeeker { private File directory; diff --git a/uploader/src/freenet/library/uploader/Merger.java b/uploader/src/freenet/library/uploader/Merger.java index ea15c658..957dd79b 100644 --- a/uploader/src/freenet/library/uploader/Merger.java +++ b/uploader/src/freenet/library/uploader/Merger.java @@ -18,11 +18,11 @@ import java.util.Map; import java.util.Set; -import net.pterodactylus.fcp.FcpConnection; +import net.pterodactylus.fcp.FcpConnection; // https://github.com/Bombe/jFCPlib -import freenet.library.FactoryRegister; -import freenet.library.index.TermEntry; -import freenet.library.util.exec.TaskAbortException; +import plugins.Library.FactoryRegister; +import plugins.Library.index.TermEntry; +import plugins.Library.util.exec.TaskAbortException; /** * Standalone program to do the merging. diff --git a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java index bf19668f..01d6fbc9 100644 --- a/uploader/src/freenet/library/uploader/TermEntryFileWriter.java +++ b/uploader/src/freenet/library/uploader/TermEntryFileWriter.java @@ -8,8 +8,8 @@ import java.util.Map; import java.util.Map.Entry; -import freenet.library.index.TermEntry; -import freenet.library.index.TermEntryReaderWriter; +import plugins.Library.index.TermEntry; +import plugins.Library.index.TermEntryReaderWriter; class TermEntryFileWriter { private DataOutputStream os; diff --git a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java index c9bd2c6e..2c0845e5 100644 --- a/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java +++ b/uploader/src/freenet/library/uploader/TermEntryReaderIterator.java @@ -8,8 +8,8 @@ import java.util.Iterator; import java.util.Map; -import freenet.library.index.TermEntry; -import freenet.library.index.TermEntryReaderWriter; +import plugins.Library.index.TermEntry; +import plugins.Library.index.TermEntryReaderWriter; class TermEntryReaderIterator implements Iterable { private DataInputStream is; diff --git a/uploader/src/freenet/library/uploader/UploaderLibrary.java b/uploader/src/freenet/library/uploader/UploaderLibrary.java index 08b55786..2ab62494 100644 --- a/uploader/src/freenet/library/uploader/UploaderLibrary.java +++ b/uploader/src/freenet/library/uploader/UploaderLibrary.java @@ -6,16 +6,15 @@ import java.io.File; import java.security.MessageDigest; -import freenet.library.ArchiverFactory; -import freenet.library.io.ObjectStreamReader; -import freenet.library.io.ObjectStreamWriter; -import freenet.library.io.serial.LiveArchiver; -import freenet.library.util.exec.SimpleProgress; +import plugins.Library.ArchiverFactory; +import plugins.Library.Priority; +import plugins.Library.io.ObjectStreamReader; +import plugins.Library.io.ObjectStreamWriter; +import plugins.Library.io.serial.LiveArchiver; +import plugins.Library.util.exec.SimpleProgress; import net.pterodactylus.fcp.FcpConnection; - - /** * Library class is the api for others to use search facilities, it is used by the interfaces * @author MikeB @@ -46,9 +45,9 @@ public static UploaderLibrary getInstance() { } return lib; } - + public static FcpConnection fcpConnection; - + public synchronized static void init(FcpConnection connection) { fcpConnection = connection; } @@ -87,7 +86,7 @@ public static String MD5(String text) { public LiveArchiver newArchiver(S rw, String mime, int size, - freenet.library.Priority priorityLevel) { + Priority priorityLevel) { return new FcpArchiver(fcpConnection, new File(UploaderPaths.LIBRARY_CACHE), rw, @@ -99,7 +98,7 @@ public static String MD5(String text) { LiveArchiver newArchiver(S rw, String mime, int size, LiveArchiver archiver) { - freenet.library.Priority priorityLevel = freenet.library.Priority.Bulk; + Priority priorityLevel = Priority.Bulk; /* if (archiver != null && archiver isinstance ??) { From a6a9a94445247954e57d332fa16a34d3c32d9906 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Wed, 8 Jan 2020 18:47:40 +0200 Subject: [PATCH 176/180] first uploader launch --- .../src/freenet/library/uploader/DirectoryUploader.java | 5 ++++- .../src/freenet/library/uploader/DownloadOneEdition.java | 2 +- uploader/src/freenet/library/uploader/FetchAllOnce.java | 2 +- uploader/src/freenet/library/uploader/IndexPeeker.java | 8 ++++++++ 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index f3fa7d18..c2a485f4 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -299,7 +299,10 @@ protected void mergeToFreenet(ProtoIndex diskToMerge, ProtoIndex removeIdxDisk, assert diskToMerge != null || removeIdxDisk != null; if (lastUploadURI == null) { try { - lastUploadURI = new FreenetURI(readStringFrom(new File(LAST_URL_FILENAME))); + String lastURI = readStringFrom(new File(LAST_URL_FILENAME)); + if (lastURI != null) { // lastURI == null: case when the index is created for the first time + lastUploadURI = new FreenetURI(lastURI); + } } catch (MalformedURLException e) { throw new RuntimeException("File contents of " + LAST_URL_FILENAME + " invalid.", e); } diff --git a/uploader/src/freenet/library/uploader/DownloadOneEdition.java b/uploader/src/freenet/library/uploader/DownloadOneEdition.java index 35bc433c..34064faa 100644 --- a/uploader/src/freenet/library/uploader/DownloadOneEdition.java +++ b/uploader/src/freenet/library/uploader/DownloadOneEdition.java @@ -1266,7 +1266,7 @@ public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, } catch (MalformedURLException e) { throw new RuntimeException(e); } - if (subscribedUSKUpdate.isNewKnownGood() && !newUri.equals(newUris[0])) { + if (/*FIXME subscribedUSKUpdate.isNewKnownGood() &&*/ !newUri.equals(newUris[0])) { newUris[0] = newUri; editions[0] = subscribedUSKUpdate.getEdition(); synchronized (subscriber) { diff --git a/uploader/src/freenet/library/uploader/FetchAllOnce.java b/uploader/src/freenet/library/uploader/FetchAllOnce.java index a5ee5ab1..aae8887d 100644 --- a/uploader/src/freenet/library/uploader/FetchAllOnce.java +++ b/uploader/src/freenet/library/uploader/FetchAllOnce.java @@ -362,7 +362,7 @@ public USKUpdateAdapter(Object s) { @Override public void receivedSubscribedUSKUpdate(FcpConnection fcpConnection, SubscribedUSKUpdate subscribedUSKUpdate) { assert fcpConnection == connection; - if (subscribedUSKUpdate.isNewKnownGood() && + if (/*FIXME subscribedUSKUpdate.isNewKnownGood() &&*/ subscribedUSKUpdate.getEdition() > edition) { updated = true; try { diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index f2b95d43..3e7d151a 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -26,6 +26,14 @@ class IndexPeeker { IndexPeeker(File dir) { directory = dir; String lastCHK = DirectoryUploader.readStringFrom(new File(directory, UploaderPaths.LAST_URL_FILENAME)); + + if (lastCHK == null) { // case when the index is created for the first time + topTtab = new LinkedHashMap<>(); + topElements = new HashSet<>(); + activeSections = new LinkedList<>(); + return; + } + String rootFilename = directory + "/" + UploaderPaths.LIBRARY_CACHE + "/" + lastCHK; try { LinkedHashMap top = (LinkedHashMap) new YamlReaderWriter().readObject(new FileInputStream(new File(rootFilename))); From a013ef9a66249aaf59b47d1d10431c457e14943b Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Thu, 9 Jan 2020 20:26:08 +0200 Subject: [PATCH 177/180] continued testing --- .../library/uploader/DirectoryUploader.java | 6 +++- .../freenet/library/uploader/IndexPeeker.java | 28 ++++++++++--------- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/uploader/src/freenet/library/uploader/DirectoryUploader.java b/uploader/src/freenet/library/uploader/DirectoryUploader.java index c2a485f4..4710f052 100644 --- a/uploader/src/freenet/library/uploader/DirectoryUploader.java +++ b/uploader/src/freenet/library/uploader/DirectoryUploader.java @@ -471,7 +471,11 @@ protected void writeFileLine(String filename, String string) { private void uploadUSKForFreenetIndex(FreenetURI uri) { String insertURI = readFileLine(PRIV_URI_FILENAME); String keyPart = insertURI.substring("freenet:SSK@".length()); - int lastEdition = Integer.parseInt(readFileLine(EDITION_FILENAME)); + int lastEdition = 0; + try { + lastEdition = Integer.parseInt(readFileLine(EDITION_FILENAME)); + } catch (RuntimeException ignore) { } // FileNotFound + System.out.println("lastEdition: " + lastEdition); final ClientPut usk = new ClientPut("USK@" + keyPart + "/" + (lastEdition + 1), "USKupload", UploadFrom.redirect); diff --git a/uploader/src/freenet/library/uploader/IndexPeeker.java b/uploader/src/freenet/library/uploader/IndexPeeker.java index 3e7d151a..a86865c6 100644 --- a/uploader/src/freenet/library/uploader/IndexPeeker.java +++ b/uploader/src/freenet/library/uploader/IndexPeeker.java @@ -43,19 +43,21 @@ class IndexPeeker { e.printStackTrace(); System.exit(1); } - if (topTtab.size() < 1000) { - // So far the growth of the index and the growth of the elements - // in the top node has gone hand in hand keeping the amount of - // pages to update for each merger low. When the amount of terms - // will exceed 1500 x 2048 the B-tree index will suddenly be - // rebuilt with just two entries on top that will share all the - // terms between them. This means that this logic of splitting - // on the top level only will split into two piles instead of - // over a thousand and there is a risk that way too much will be - // included in each update. This code needs to be improved to - // handle this. - throw new IllegalArgumentException("This version of the script does not handle multi-level tree."); - } + + System.out.println("topTtab.size: " + topTtab.size()); +// if (topTtab.size() < 1000) { +// // So far the growth of the index and the growth of the elements +// // in the top node has gone hand in hand keeping the amount of +// // pages to update for each merger low. When the amount of terms +// // will exceed 1500 x 2048 the B-tree index will suddenly be +// // rebuilt with just two entries on top that will share all the +// // terms between them. This means that this logic of splitting +// // on the top level only will split into two piles instead of +// // over a thousand and there is a risk that way too much will be +// // included in each update. This code needs to be improved to +// // handle this. FIXME +// throw new IllegalArgumentException("This version of the script does not handle multi-level tree."); +// } topElements = new HashSet(topTtab.keySet()); activeSections = new LinkedList(); From 2ef742a089663bc9cb377e62bc10b09a10b2fe80 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Tue, 14 Jan 2020 20:30:04 +0200 Subject: [PATCH 178/180] unclear null in url --- src/plugins/Library/io/FreenetURI.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/plugins/Library/io/FreenetURI.java b/src/plugins/Library/io/FreenetURI.java index ffe8d8b0..165929f1 100644 --- a/src/plugins/Library/io/FreenetURI.java +++ b/src/plugins/Library/io/FreenetURI.java @@ -29,6 +29,11 @@ public class FreenetURI implements Cloneable, Serializable { private String contents; public FreenetURI(String uri) throws MalformedURLException { + if (uri.endsWith("null")) { // FIXME + System.out.println("fixed uri in library (ends with 'null'): " + uri); + uri = uri.substring(0, uri.length() - 4); + } + contents = uri; if (contents.matches("^[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + "[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-" + @@ -140,7 +145,7 @@ else if(type == KSK) for(int i = 0; i < metaStrings.length; i++) { b.append('/').append(URLEncoder.encode(metaStrings[i], "UTF-8")); } - + return new FreenetURI(b.toString()); } From 4cd7990a559b0e61431254d7eda42e6c9e2f73e4 Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Mon, 20 Jan 2020 14:03:51 +0200 Subject: [PATCH 179/180] try to fix hang `Outstanding 0 jobs (0/0)` but keeps waiting --- uploader/src/freenet/library/uploader/FcpArchiver.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/uploader/src/freenet/library/uploader/FcpArchiver.java b/uploader/src/freenet/library/uploader/FcpArchiver.java index 88727ee9..211a115f 100644 --- a/uploader/src/freenet/library/uploader/FcpArchiver.java +++ b/uploader/src/freenet/library/uploader/FcpArchiver.java @@ -11,6 +11,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import freenet.support.Base64; import freenet.crypt.SHA256; @@ -45,10 +46,8 @@ public class FcpArchiver * Before synchronizing on stillRunning, be sure to synchronize * connection! */ - private Map stillRunning = - new HashMap(); + private Map stillRunning = new HashMap<>(); private Thread cleanupThread; - public FcpArchiver(FcpConnection fcpConnection, File directory, @@ -227,6 +226,9 @@ public void receivedSimpleProgress(FcpConnection c, progressCompleted = sp.getSucceeded(); progressTotal = sp.getTotal(); printLeft(); + synchronized (stillRunning) { + stillRunning.notifyAll(); + } } public void receivedURIGenerated(FcpConnection c, URIGenerated uriGenerated) { @@ -422,7 +424,7 @@ public void waitForAsyncInserts() throws TaskAbortException { if (moreJobs) { synchronized (stillRunning) { try { - stillRunning.wait(); + stillRunning.wait(TimeUnit.HOURS.toMillis(1)); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); From a8b849026860aef4414aa9299d7c0e8567b8bead Mon Sep 17 00:00:00 2001 From: Oleh Shklyar Date: Thu, 30 Jan 2020 12:47:00 +0200 Subject: [PATCH 180/180] backward compatibility with previous indexes --- .../Library/index/ProtoIndexSerialiser.java | 37 +++++-------------- src/plugins/Library/io/YamlReaderWriter.java | 9 +---- .../index/ProtoIndexSerialiserTest.java | 4 +- test/plugins/Library/io/FreenetURITest.java | 22 +++++++++-- 4 files changed, 31 insertions(+), 41 deletions(-) diff --git a/src/plugins/Library/index/ProtoIndexSerialiser.java b/src/plugins/Library/index/ProtoIndexSerialiser.java index efb2f53e..1405c65b 100644 --- a/src/plugins/Library/index/ProtoIndexSerialiser.java +++ b/src/plugins/Library/index/ProtoIndexSerialiser.java @@ -3,8 +3,6 @@ * http://www.gnu.org/ for further details of the GPL. */ package plugins.Library.index; -import plugins.Library.Library; -import plugins.Library.client.FreenetArchiver; import plugins.Library.util.SkeletonBTreeMap; import plugins.Library.util.SkeletonBTreeSet; import plugins.Library.util.exec.SimpleProgress; @@ -19,28 +17,15 @@ import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.*; import java.io.File; -import java.net.MalformedURLException; import java.util.Date; import java.util.LinkedHashMap; import java.util.Map; import plugins.Library.FactoryRegister; -import plugins.Library.io.DataFormatException; import plugins.Library.io.FreenetURI; -import plugins.Library.io.YamlReaderWriter; -import plugins.Library.io.serial.Archiver; -import plugins.Library.io.serial.FileArchiver; -import plugins.Library.io.serial.LiveArchiver; -import plugins.Library.io.serial.Serialiser; -import plugins.Library.io.serial.Translator; import plugins.Library.Priority; -import plugins.Library.util.SkeletonBTreeMap; -import plugins.Library.util.SkeletonBTreeSet; -import plugins.Library.util.exec.SimpleProgress; -import plugins.Library.util.exec.TaskAbortException; /** ** Serialiser for ProtoIndex @@ -209,20 +194,16 @@ public IndexTranslator(LiveArchiver, SimpleProgress> subsrl) if (magic == ProtoIndex.serialVersionUID) { try { // FIXME yet more hacks related to the lack of proper asynchronous FreenetArchiver... -//<<<<<<< HEAD:src/plugins/Library/index/ProtoIndexSerialiser.java -// Object serialFormatUIDObj = map.get("serialFormatUID"); -// int serialFormatUID; -// if (serialFormatUIDObj instanceof String) { // FIXME -// serialFormatUID = Integer.parseInt((String) map.get("serialFormatUID")); -// } else { -// serialFormatUID = (Integer) serialFormatUIDObj; -// } -// ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get(serialFormatUID, subsrl); -// FreenetURI reqID = (FreenetURI)map.get("reqID"); -//======= - ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get((Integer)map.get("serialFormatUID"), subsrl); + Object serialFormatUIDObj = map.get("serialFormatUID"); + int serialFormatUID; + if (serialFormatUIDObj instanceof String) { // FIXME + serialFormatUID = Integer.parseInt((String) map.get("serialFormatUID")); + } else { + serialFormatUID = (Integer) serialFormatUIDObj; + } + ProtoIndexComponentSerialiser cmpsrl = ProtoIndexComponentSerialiser.get(serialFormatUID, subsrl); + FreenetURI reqID = (FreenetURI) map.get("reqID"); -//>>>>>>> debbiedub/fcp-uploader:shared/src/freenet/library/index/ProtoIndexSerialiser.java String name = (String)map.get("name"); String ownerName = (String)map.get("ownerName"); String ownerEmail = (String)map.get("ownerEmail"); diff --git a/src/plugins/Library/io/YamlReaderWriter.java b/src/plugins/Library/io/YamlReaderWriter.java index 12bcc7a9..e0bb1ccc 100644 --- a/src/plugins/Library/io/YamlReaderWriter.java +++ b/src/plugins/Library/io/YamlReaderWriter.java @@ -36,11 +36,6 @@ import plugins.Library.index.TermPageEntry; import plugins.Library.index.TermIndexEntry; import plugins.Library.index.TermTermEntry; -import plugins.Library.index.TermEntry; -import plugins.Library.index.TermIndexEntry; -import plugins.Library.index.TermPageEntry; -import plugins.Library.index.TermTermEntry; -import plugins.Library.io.serial.Packer; /** * Converts between an object and a stream containing a YAML document. By @@ -130,7 +125,7 @@ public ExtendedRepresenter() { this.representers.put(FreenetURI.class, new Represent() { @Override public Node representData(Object data) { - return representScalar(new Tag("!plugins.Library.io.FreenetURI"), data.toString()); + return representScalar(new Tag("!FreenetURI"), data.toString()); } }); this.representers.put(Packer.BinInfo.class, new Represent() { @@ -168,7 +163,7 @@ public Node representData(Object data) { */ public static class ExtendedConstructor extends Constructor { public ExtendedConstructor() { - this.yamlConstructors.put(new Tag("!plugins.Library.io.FreenetURI"), new AbstractConstruct() { + this.yamlConstructors.put(new Tag("!FreenetURI"), new AbstractConstruct() { @Override public Object construct(Node node) { String uri = constructScalar((ScalarNode) node); diff --git a/test/plugins/Library/index/ProtoIndexSerialiserTest.java b/test/plugins/Library/index/ProtoIndexSerialiserTest.java index 9bcc2c02..0f3fa9dc 100644 --- a/test/plugins/Library/index/ProtoIndexSerialiserTest.java +++ b/test/plugins/Library/index/ProtoIndexSerialiserTest.java @@ -242,8 +242,8 @@ public void testPull() throws TaskAbortException, MalformedURLException { " entries:\n" + " adam: !BinInfo {? &id001 !!binary \"abcdef==\" : !!int '1'}\n" + " subnodes:\n" + - " !plugins.Library.io.FreenetURI 'CHK@123,456,A789': !!int '1234'\n" + - " !plugins.Library.io.FreenetURI 'CHK@456,678,A890': !!int '1235'\n" + + " !FreenetURI 'CHK@123,456,A789': !!int '1234'\n" + + " !FreenetURI 'CHK@456,678,A890': !!int '1235'\n" + "").getBytes(); task.data = mockProtoIndex; diff --git a/test/plugins/Library/io/FreenetURITest.java b/test/plugins/Library/io/FreenetURITest.java index b33c5526..f0e3c3ea 100644 --- a/test/plugins/Library/io/FreenetURITest.java +++ b/test/plugins/Library/io/FreenetURITest.java @@ -1,13 +1,27 @@ package plugins.Library.io; +import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.net.MalformedURLException; -import junit.framework.TestCase; +import org.junit.Test; -public class FreenetURITest extends TestCase { - public void testGetUSKRoot() throws MalformedURLException { +import static org.junit.Assert.*; + +public class FreenetURITest { + + @Test + public void getUSKRootTest() throws MalformedURLException { FreenetURI to = new FreenetURI("USK@aa,bb,Acc/file/12345/meta"); - assertEquals("USK@aa,bb,Acc/file", to.getRoot()); } + + @Test + public void toYamlTest() throws IOException { + FreenetURI freenetURI = new FreenetURI("USK@aa,bb,Acc/file/12345/meta"); + YamlReaderWriter yamlReaderWriter = new YamlReaderWriter(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + yamlReaderWriter.writeObject(freenetURI, outputStream); + assertEquals("!FreenetURI 'USK@aa,bb,Acc/file/12345/meta'" + System.lineSeparator(), outputStream.toString()); + } }