From 3afe81045919b70a0a1f7c87d19abb2cb819da7a Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 5 Nov 2020 23:01:44 -0800 Subject: [PATCH 01/31] modernize dev-shell --- shells/dev-shell/index.js | 153 +++++++++++++++++++------------------- 1 file changed, 76 insertions(+), 77 deletions(-) diff --git a/shells/dev-shell/index.js b/shells/dev-shell/index.js index d72140e03ad..27f6c766895 100644 --- a/shells/dev-shell/index.js +++ b/shells/dev-shell/index.js @@ -7,28 +7,20 @@ * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ + import './file-pane.js'; import './output-pane.js'; import '../configuration/whitelisted.js'; import '../lib/platform/loglevel-web.js'; -import {Runtime} from '../../build/runtime/runtime.js'; -import {RamDiskStorageDriverProvider} from '../../build/runtime/storage/drivers/ramdisk.js'; -import {SimpleVolatileMemoryProvider} from '../../build/runtime/storage/drivers/volatile.js'; -import {DirectStorageEndpointManager} from '../../build/runtime/storage/direct-storage-endpoint-manager.js'; -import {Loader} from '../../build/platform/loader.js'; import {Arc} from '../../build/runtime/arc.js'; -import {IdGenerator} from '../../build/runtime/id.js'; -import {pecIndustry} from '../../build/platform/pec-industry-web.js'; +import {Runtime} from '../../build/runtime/runtime.js'; import {RecipeResolver} from '../../build/runtime/recipe-resolver.js'; import {devtoolsArcInspectorFactory} from '../../build/devtools-connector/devtools-arc-inspector.js'; -import {SlotComposer} from '../../build/runtime/slot-composer.js'; import {SlotObserver} from '../lib/xen-renderer.js'; -import '../../build/services/random-service.js'; - +// how to reach arcs root from our URL/PWD const root = '../..'; -const urlMap = Runtime.mapFromRootPath(root); // import DOM node references const { @@ -41,21 +33,19 @@ const { helpButton } = window; -let memoryProvider; init(); function init() { - memoryProvider = new SimpleVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - + // prepare ui filePane.init(execute, toggleFilesButton, exportFilesButton); executeButton.addEventListener('click', execute); helpButton.addEventListener('click', showHelp); popupContainer.addEventListener('click', () => popupContainer.style.display = 'none'); - + // scan window parameters const params = new URLSearchParams(window.location.search); + // set logLevel window.logLevel = (params.get('log') !== null) ? 1 : 0; - + // seed manifest as requested const manifestParam = params.get('m') || params.get('manifest'); if (manifestParam) { filePane.seedManifest(manifestParam.split(';').map(m => `import '${m}'`)); @@ -76,7 +66,6 @@ recipe h0: copy DataStore P data: reads h0`; - const exampleParticle = `\ defineParticle(({SimpleParticle, html, log}) => { return class extends SimpleParticle { @@ -89,88 +78,98 @@ defineParticle(({SimpleParticle, html, log}) => { } }; });`; - filePane.seedExample(exampleManifest, exampleParticle); } } function execute() { - wrappedExecute().catch(e => outputPane.showError('Unhandled exception', e.stack)); + wrappedExecute().catch(e => { + outputPane.showError('Unhandled exception', e.stack); + console.error(e); + }); } async function wrappedExecute() { + // clear ui document.dispatchEvent(new Event('clear-arcs-explorer')); outputPane.reset(); - - const loader = new Loader(urlMap, filePane.getFileMap()); - // TODO(sjmiles): should be a static method - loader.flushCaches(); - - const pecFactory = pecIndustry(loader); - - let manifest; + // establish a runtime using custom parameters + const runtime = await createRuntime(); + // attempt to parse the context manifest try { - const options = {loader, fileName: './manifest', throwImportErrors: true, memoryProvider}; - manifest = await Runtime.parseManifest(filePane.getManifest(), options); + runtime.context = await runtime.parse(filePane.getManifest(), {fileName: './manifest', throwImportErrors: true}); } catch (e) { outputPane.showError('Error in Manifest.parse', e); return; } - - if (manifest.allRecipes.length == 0) { + // check for existence of recipes + if (runtime.context.allRecipes.length == 0) { outputPane.showError('No recipes found in Manifest.parse'); } - + // instantiate an arc for each recipe in context let arcIndex = 1; - for (const recipe of manifest.allRecipes) { - const id = IdGenerator.newSession().newArcId('arc' + arcIndex++); - const arcPanel = outputPane.addArcPanel(id); + for (const recipe of runtime.context.allRecipes) { + executeArc(recipe, runtime, arcIndex++); + } +} - const errors = new Map(); - if (!recipe.normalize({errors})) { - arcPanel.showError('Error in recipe.normalize', [...errors.values()].join('\n')); - continue; - } +async function createRuntime(context) { + const runtime = Runtime.create({root, staticMap: filePane.getFileMap(), context}); + runtime.loader.flushCaches(); + return runtime; +} - const slotComposer = new SlotComposer(); - slotComposer.observeSlots(new SlotObserver(arcPanel.shadowRoot)); - - const arc = new Arc({ - id, - context: manifest, - pecFactories: [pecFactory], - slotComposer, - loader, - storageManager: new DirectStorageEndpointManager(), - inspectorFactory: devtoolsArcInspectorFactory - }); - arcPanel.attachArc(arc); - - recipe.normalize(); - - let resolvedRecipe = null; - if (recipe.isResolved()) { - resolvedRecipe = recipe; - } else { - const resolver = new RecipeResolver(arc); - const options = {errors: new Map()}; - resolvedRecipe = await resolver.resolve(recipe, options); - if (!resolvedRecipe) { - arcPanel.showError('Error in RecipeResolver', `${ - [...options.errors.entries()].join('\n') - }.\n${recipe.toString()}`); - continue; - } +const extraParams = { + inspectorFactory: devtoolsArcInspectorFactory +}; + +async function executeArc(recipe, runtime, index) { + // verify recipe is normalized + const errors = new Map(); + if (!recipe.normalize({errors})) { + arcPanel.showError('Error in recipe.normalize', [...errors.values()].join('\n')); + return; + } + // ask runtime to assemble arc parameter boilerplate (argument is the arc name) + const params = runtime.getArcParams(`arc${index}`); + // establish a UI Surface + const arcPanel = outputPane.addArcPanel(params.id); + const error = err => arcPanel.showError(err); + // attach a renderer (SlotObserver and a DOM node) to the composer + params.slotComposer.observeSlots(new SlotObserver(arcPanel.shadowRoot)); + // construct the arc + const arc = new Arc({...params, extraParams}); + // attach arc to bespoke shell ui + arcPanel.attachArc(arc); + // attempt to resolve recipe + let resolvedRecipe = null; + if (recipe.isResolved()) { + resolvedRecipe = recipe; + } else { + const errors = await resolveRecipe(arc, recipe); + if (!errors) { + error('Error in RecipeResolver', `${[...errors.entries()].join('\n')}.\n${recipe.toString()}`); + return; } + } + // instantiate recipe + try { + await arc.instantiate(resolvedRecipe); + } catch (e) { + error('Error in arc.instantiate', e); + return; + } + // display description + await arcPanel.arcInstantiated(await Runtime.getArcDescription(arc)); +} - try { - await arc.instantiate(resolvedRecipe); - } catch (e) { - arcPanel.showError('Error in arc.instantiate', e); - continue; +async function resolveRecipe(arc, recipe) { + if (!recipe.isResolved()) { + const resolver = new RecipeResolver(arc); + const errors = new Map(); + if (!await resolver.resolve(recipe, {errors})) { + return errors; } - const description = await Runtime.getArcDescription(arc); - await arcPanel.arcInstantiated(description); } } From a908f48f4c3fa6493f67c4cf57969c95a8efb64c Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 5 Nov 2020 23:28:49 -0800 Subject: [PATCH 02/31] tweaks --- shells/dev-shell/index.js | 65 +++++++++++++++++++++++++++++---------- src/runtime/runtime.ts | 39 +++++++++++++++-------- 2 files changed, 75 insertions(+), 29 deletions(-) diff --git a/shells/dev-shell/index.js b/shells/dev-shell/index.js index 27f6c766895..73ecafd9aa4 100644 --- a/shells/dev-shell/index.js +++ b/shells/dev-shell/index.js @@ -19,7 +19,7 @@ import {RecipeResolver} from '../../build/runtime/recipe-resolver.js'; import {devtoolsArcInspectorFactory} from '../../build/devtools-connector/devtools-arc-inspector.js'; import {SlotObserver} from '../lib/xen-renderer.js'; -// how to reach arcs root from our URL/PWD +// how to reach arcs root from our URL/CWD const root = '../..'; // import DOM node references @@ -124,14 +124,8 @@ const extraParams = { }; async function executeArc(recipe, runtime, index) { - // verify recipe is normalized - const errors = new Map(); - if (!recipe.normalize({errors})) { - arcPanel.showError('Error in recipe.normalize', [...errors.values()].join('\n')); - return; - } // ask runtime to assemble arc parameter boilerplate (argument is the arc name) - const params = runtime.getArcParams(`arc${index}`); + const params = runtime.buildArcParams(`arc${index}`); // establish a UI Surface const arcPanel = outputPane.addArcPanel(params.id); const error = err => arcPanel.showError(err); @@ -141,26 +135,65 @@ async function executeArc(recipe, runtime, index) { const arc = new Arc({...params, extraParams}); // attach arc to bespoke shell ui arcPanel.attachArc(arc); + arc.arcPanel = arcPanel; + // + try { + // verify recipe is normalized + const errors = new Map(); + if (!recipe.normalize({errors})) { + throw (`Error in recipe.normalize: ${[...errors.values()].join('\n')}`); + } + await instantiateRecipe(arc, recipe); + } catch (x) { + arcPanel.showError(x); + return; + } + + // // attempt to resolve recipe + // let resolvedRecipe = null; + // if (recipe.isResolved()) { + // resolvedRecipe = recipe; + // } else { + // const resolver = new RecipeResolver(arc); + // const options = {errors: new Map()}; + // resolvedRecipe = await resolver.resolve(recipe, options); + // if (!resolvedRecipe) { + // arcPanel.showError('Error in RecipeResolver', `${ + // [...options.errors.entries()].join('\n') + // }.\n${recipe.toString()}`); + // return; + // } + // } + // // instantiate recipe + // try { + // await arc.instantiate(resolvedRecipe); + // } catch (e) { + // arcPanel.showError('Error in arc.instantiate', e); + // return; + // } + // display description + await arcPanel.arcInstantiated(await Runtime.getArcDescription(arc)); +} + +async function instantiateRecipe(arc, recipe) { // attempt to resolve recipe let resolvedRecipe = null; if (recipe.isResolved()) { resolvedRecipe = recipe; } else { - const errors = await resolveRecipe(arc, recipe); - if (!errors) { - error('Error in RecipeResolver', `${[...errors.entries()].join('\n')}.\n${recipe.toString()}`); - return; + const resolver = new RecipeResolver(arc); + const options = {errors: new Map()}; + resolvedRecipe = await resolver.resolve(recipe, options); + if (!resolvedRecipe) { + throw `Error in RecipeResolver: ${[...options.errors.entries()].join('\n')}.\n${recipe.toString()}`; } } // instantiate recipe try { await arc.instantiate(resolvedRecipe); } catch (e) { - error('Error in arc.instantiate', e); - return; + throw `Error in arc.instantiate: ${e}`; } - // display description - await arcPanel.arcInstantiated(await Runtime.getArcDescription(arc)); } async function resolveRecipe(arc, recipe) { diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index de8bd680c5a..c9ca134d8e3 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -73,22 +73,23 @@ export class Runtime { * arguments through numerous functions. * Some static methods on this class automatically use the default environment. */ - static init(root?: string, urls?: {}): Runtime { - const map = {...Runtime.mapFromRootPath(root), ...urls}; - const loader = new Loader(map); + static init(root?: string, urlMap?: {}, staticMap?: {}, context?: {}): Runtime { + const map = {...Runtime.mapFromRootPath(root), ...urlMap}; + const loader = new Loader(map, staticMap); const pecFactory = pecIndustry(loader); - const memoryProvider = new SimpleVolatileMemoryProvider(); const runtime = new Runtime({ loader, composerClass: SlotComposer, pecFactory, memoryProvider }); - RamDiskStorageDriverProvider.register(memoryProvider); - VolatileStorageKey.register(); return runtime; } + static create({root, urlMap, staticMap, context}): Runtime { + return this.init(root, urlMap, staticMap, context); + } + static mapFromRootPath(root: string) { // TODO(sjmiles): this is a commonly-used map, but it's not generic enough to live here. // Shells that use this default should be provide it to `init` themselves. @@ -137,6 +138,17 @@ export class Runtime { this.context = context; } + buildArcParams(name?: string) { + const id = IdGenerator.newSession().newArcId(name); + const {loader, context} = this; + const pecFactories = [this.pecFactory]; + const slotComposer = this.composerClass ? new this.composerClass() : null; + const factories = [new VolatileStorageKeyFactory()]; + const storageService = this.storageService; + const capabilitiesResolver = new CapabilitiesResolver({arcId: id, factories}); + return {id, loader, pecFactories, slotComposer, storageService, capabilitiesResolver, context}; + } + // TODO(shans): Clean up once old storage is removed. // Note that this incorrectly assumes every storage key can be of the form `prefix` + `arcId`. // Should ids be provided to the Arc constructor, or should they be constructed by the Arc? @@ -157,7 +169,7 @@ export class Runtime { return new Arc({id, storageKey, capabilitiesResolver, loader, slotComposer, context, storageManager, ...options}); } - // Stuff the shell needs + // Stuff the shell(s) need /** * Given an arc name, return either: @@ -197,7 +209,9 @@ export class Runtime { * class for the options accepted. */ static async parseManifest(content: string, options?): Promise { - return Manifest.parse(content, options); + const runtime = this.getRuntime(); + const loader = runtime && runtime.loader; + return Manifest.parse(content, {loader, ...options}); } /** @@ -214,20 +228,19 @@ export class Runtime { // by other modules. Suggestions welcome. async parse(content: string, options?): Promise { - const {loader} = this; // TODO(sjmiles): this method of generating a manifest id is ad-hoc, // maybe should be using one of the id generators, or even better // we could eliminate it if the Manifest object takes care of this. - const id = `in-memory-${Math.floor((Math.random()+1)*1e6)}.manifest`; // TODO(sjmiles): this is a virtual manifest, the fileName is invented - const opts = {id, fileName: `./${id}`, loader, memoryProvider: this.memoryProvider, ...options}; + const id = `in-memory-${Math.floor((Math.random()+1)*1e6)}.manifest`; + const {loader, memoryProvider, storageService} = this; + const opts = {id, fileName: `./${id}`, loader, memoryProvider, storageService, ...options}; return Manifest.parse(content, opts); } async parseFile(path: string, options?): Promise { const content = await this.loader.loadResource(path); - const opts = {id: path, fileName: path, loader: this.loader, memoryProvider: this.memoryProvider, ...options}; - return this.parse(content, opts); + return this.parse(content, {id: path, fileName: path, ...options}); } static async resolveRecipe(arc: Arc, recipe: Recipe): Promise { From a0930360e05dd04513d966ba4e1033bcfb952eb4 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 5 Nov 2020 23:32:37 -0800 Subject: [PATCH 03/31] law of Demeter --- shells/dev-shell/index.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/shells/dev-shell/index.js b/shells/dev-shell/index.js index 73ecafd9aa4..9ded63a665c 100644 --- a/shells/dev-shell/index.js +++ b/shells/dev-shell/index.js @@ -22,6 +22,11 @@ import {SlotObserver} from '../lib/xen-renderer.js'; // how to reach arcs root from our URL/CWD const root = '../..'; +// extra params for created arcs +const extraArcParams = { + inspectorFactory: devtoolsArcInspectorFactory +}; + // import DOM node references const { filePane, @@ -119,10 +124,6 @@ async function createRuntime(context) { return runtime; } -const extraParams = { - inspectorFactory: devtoolsArcInspectorFactory -}; - async function executeArc(recipe, runtime, index) { // ask runtime to assemble arc parameter boilerplate (argument is the arc name) const params = runtime.buildArcParams(`arc${index}`); @@ -132,7 +133,7 @@ async function executeArc(recipe, runtime, index) { // attach a renderer (SlotObserver and a DOM node) to the composer params.slotComposer.observeSlots(new SlotObserver(arcPanel.shadowRoot)); // construct the arc - const arc = new Arc({...params, extraParams}); + const arc = new Arc({...params, extraArcParams}); // attach arc to bespoke shell ui arcPanel.attachArc(arc); arc.arcPanel = arcPanel; From 193c1a7a169850842905dcec2dae245cfba20ae8 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Wed, 11 Nov 2020 23:47:25 -0800 Subject: [PATCH 04/31] remove storageService --- src/runtime/runtime.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index c9ca134d8e3..908a25137fb 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -144,9 +144,9 @@ export class Runtime { const pecFactories = [this.pecFactory]; const slotComposer = this.composerClass ? new this.composerClass() : null; const factories = [new VolatileStorageKeyFactory()]; - const storageService = this.storageService; + const storageManager = this.storageManager; const capabilitiesResolver = new CapabilitiesResolver({arcId: id, factories}); - return {id, loader, pecFactories, slotComposer, storageService, capabilitiesResolver, context}; + return {id, loader, pecFactories, slotComposer, storageManager, capabilitiesResolver, context}; } // TODO(shans): Clean up once old storage is removed. @@ -233,8 +233,8 @@ export class Runtime { // we could eliminate it if the Manifest object takes care of this. // TODO(sjmiles): this is a virtual manifest, the fileName is invented const id = `in-memory-${Math.floor((Math.random()+1)*1e6)}.manifest`; - const {loader, memoryProvider, storageService} = this; - const opts = {id, fileName: `./${id}`, loader, memoryProvider, storageService, ...options}; + const {loader, memoryProvider} = this; + const opts = {id, fileName: `./${id}`, loader, memoryProvider, ...options}; return Manifest.parse(content, opts); } From 4dad548cbde090f5c6f138fbd456ecd6110f8ad4 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 12 Nov 2020 00:08:02 -0800 Subject: [PATCH 05/31] cleanups --- shells/dev-shell/index.js | 85 +++++++++++++-------------------------- 1 file changed, 27 insertions(+), 58 deletions(-) diff --git a/shells/dev-shell/index.js b/shells/dev-shell/index.js index 9ded63a665c..00fd8125480 100644 --- a/shells/dev-shell/index.js +++ b/shells/dev-shell/index.js @@ -76,7 +76,7 @@ defineParticle(({SimpleParticle, html, log}) => { return class extends SimpleParticle { get template() { log(\`Add '?log' to the URL to enable particle logging\`); - return html\`{{num}} : {{str}}\`; + return \`
{{num}} : {{str}}
\`; } render({data}) { return data ? {num: data.num, str: data.txt} : {}; @@ -114,7 +114,7 @@ async function wrappedExecute() { // instantiate an arc for each recipe in context let arcIndex = 1; for (const recipe of runtime.context.allRecipes) { - executeArc(recipe, runtime, arcIndex++); + createRecipeArc(recipe, runtime, arcIndex++); } } @@ -124,87 +124,56 @@ async function createRuntime(context) { return runtime; } -async function executeArc(recipe, runtime, index) { +async function createRecipeArc(recipe, runtime, index) { // ask runtime to assemble arc parameter boilerplate (argument is the arc name) const params = runtime.buildArcParams(`arc${index}`); + // construct the arc + const arc = new Arc({...params, extraArcParams}); // establish a UI Surface const arcPanel = outputPane.addArcPanel(params.id); - const error = err => arcPanel.showError(err); // attach a renderer (SlotObserver and a DOM node) to the composer params.slotComposer.observeSlots(new SlotObserver(arcPanel.shadowRoot)); - // construct the arc - const arc = new Arc({...params, extraArcParams}); // attach arc to bespoke shell ui arcPanel.attachArc(arc); arc.arcPanel = arcPanel; - // try { - // verify recipe is normalized - const errors = new Map(); - if (!recipe.normalize({errors})) { - throw (`Error in recipe.normalize: ${[...errors.values()].join('\n')}`); - } - await instantiateRecipe(arc, recipe); + normalizeRecipe(arc, recipe); + const resolvedRecipe = await resolveRecipe(arc, recipe); + await instantiateRecipe(arc, resolvedRecipe); } catch (x) { - arcPanel.showError(x); + arcPanel.showError('recipe error', x); return; } - - // // attempt to resolve recipe - // let resolvedRecipe = null; - // if (recipe.isResolved()) { - // resolvedRecipe = recipe; - // } else { - // const resolver = new RecipeResolver(arc); - // const options = {errors: new Map()}; - // resolvedRecipe = await resolver.resolve(recipe, options); - // if (!resolvedRecipe) { - // arcPanel.showError('Error in RecipeResolver', `${ - // [...options.errors.entries()].join('\n') - // }.\n${recipe.toString()}`); - // return; - // } - // } - // // instantiate recipe - // try { - // await arc.instantiate(resolvedRecipe); - // } catch (e) { - // arcPanel.showError('Error in arc.instantiate', e); - // return; - // } // display description await arcPanel.arcInstantiated(await Runtime.getArcDescription(arc)); } -async function instantiateRecipe(arc, recipe) { - // attempt to resolve recipe - let resolvedRecipe = null; - if (recipe.isResolved()) { - resolvedRecipe = recipe; - } else { - const resolver = new RecipeResolver(arc); - const options = {errors: new Map()}; - resolvedRecipe = await resolver.resolve(recipe, options); - if (!resolvedRecipe) { - throw `Error in RecipeResolver: ${[...options.errors.entries()].join('\n')}.\n${recipe.toString()}`; - } - } - // instantiate recipe - try { - await arc.instantiate(resolvedRecipe); - } catch (e) { - throw `Error in arc.instantiate: ${e}`; +function normalizeRecipe(arc, recipe) { + const errors = new Map(); + if (!recipe.normalize({errors})) { + throw `Error in recipe.normalize: ${[...errors.values()].join('\n')}`; } } async function resolveRecipe(arc, recipe) { + let resolved = recipe; if (!recipe.isResolved()) { - const resolver = new RecipeResolver(arc); const errors = new Map(); - if (!await resolver.resolve(recipe, {errors})) { - return errors; + const resolver = new RecipeResolver(arc); + resolved = await resolver.resolve(recipe, {errors}); + if (!resolved) { + throw `Error in RecipeResolver: ${[...errors.entries()].join('\n')}.\n${recipe.toString()}`; } } + return resolved; +} + +async function instantiateRecipe(arc, recipe) { + try { + await arc.instantiate(recipe); + } catch (e) { + throw `Error in arc.instantiate: ${e}`; + } } function showHelp() { From 09fd6b6e4d88132f32c0d6b0f4315341828ca491 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Tue, 17 Nov 2020 01:24:35 -0800 Subject: [PATCH 06/31] surgically remove static (global) runtime object --- src/planning/planner.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/planning/planner.ts b/src/planning/planner.ts index a54191af784..eb9a16c5775 100644 --- a/src/planning/planner.ts +++ b/src/planning/planner.ts @@ -74,6 +74,8 @@ export interface PlannerInitOptions { runtime?: Runtime; } +const gRuntime = new Runtime(); + export class Planner implements InspectablePlanner { public arc: Arc; runtime: Runtime; From 32e20aec89c5344dcabcc5ab2778e750c627826a Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Wed, 18 Nov 2020 15:03:18 -0800 Subject: [PATCH 07/31] clean ups --- src/planning/planner.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/planning/planner.ts b/src/planning/planner.ts index eb9a16c5775..a54191af784 100644 --- a/src/planning/planner.ts +++ b/src/planning/planner.ts @@ -74,8 +74,6 @@ export interface PlannerInitOptions { runtime?: Runtime; } -const gRuntime = new Runtime(); - export class Planner implements InspectablePlanner { public arc: Arc; runtime: Runtime; From 1987f50c9c6edb7dffd26d1b4e4b3170d9794c7e Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 19 Nov 2020 18:50:24 -0800 Subject: [PATCH 08/31] big runtime usage refactor --- shells/tests/arcs/ts/runtime/arc-test.ts | 4 +- .../ts/runtime/hotreload-integration-test.ts | 2 +- .../runtime/multiplexer-integration-test.ts | 29 +- .../tests/arcs/ts/runtime/multiplexer-test.ts | 6 +- .../arcs/ts/runtime/particle-api-test.ts | 3 - ...ticle-interface-loading-with-slots-test.ts | 8 +- .../arcs/ts/runtime/plan-consumer-test.ts | 24 +- shells/tests/arcs/ts/runtime/products-test.ts | 27 +- .../arcs/ts/runtime/slot-composer-test.ts | 170 ++-- .../ts/runtime/transformation-slots-test.ts | 17 +- .../analysis/testing/flow-graph-testing.ts | 7 +- src/planning/plan/tests/plan-consumer-test.ts | 40 +- src/planning/plan/tests/plan-producer-test.ts | 23 +- src/planning/plan/tests/planificator-test.ts | 4 +- .../plan/tests/planning-result-test.ts | 46 +- .../plan/tests/test-environment-test.ts | 4 +- .../strategies/tests/coalesce-recipes-test.ts | 14 +- .../strategies/tests/resolve-recipe-test.ts | 50 +- .../tests/search-tokens-to-handles-test.ts | 24 +- src/planning/tests/planner-test.ts | 36 +- src/runtime/env.ts | 41 + src/runtime/recipe/tests/recipe-test.ts | 30 +- src/runtime/runtime.ts | 61 +- src/runtime/storage/drivers/driver-factory.ts | 7 - src/runtime/storage/drivers/firebase.ts | 4 +- .../storage/tests/active-store-test.ts | 5 +- .../storage/tests/direct-store-muxer-test.ts | 4 +- .../tests/firebase-store-integration-test.ts | 4 +- ...isk-direct-store-muxer-integration-test.ts | 7 +- .../tests/ramdisk-store-integration-test.ts | 9 +- .../reference-mode-store-integration-test.ts | 15 +- .../tests/reference-mode-store-test.ts | 5 +- src/runtime/storage/tests/storage-key-test.ts | 4 +- .../storage/tests/store-sequence-test.ts | 10 +- src/runtime/tests/arc-test.ts | 77 +- .../artifacts/Products/Interests.recipes | 2 +- .../artifacts/Products/Manufacturer.recipes | 2 +- .../artifacts/Products/ShowProducts.recipes | 2 +- .../tests/capabilities-resolver-test.ts | 104 +-- src/runtime/tests/manifest-test.ts | 809 +++++++++--------- src/runtime/tests/runtime-test.ts | 52 +- src/runtime/tests/test-environment-test.ts | 4 +- src/tests/arc-integration-test.ts | 16 +- src/tests/particles/common-test.ts | 10 +- src/tests/particles/dataflow-test.ts | 17 +- src/tests/particles/particles-test.ts | 15 +- src/tests/recipe-descriptions-test.ts | 32 +- src/tools/allocator-recipe-resolver.ts | 1 - .../tests/allocator-recipe-resolver-test.ts | 9 +- src/tools/tests/codegen-unit-test-base.ts | 4 +- src/tools/tests/manifest2proto-test.ts | 1 + src/tools/tests/recipe2plan-test.ts | 7 +- 52 files changed, 894 insertions(+), 1014 deletions(-) create mode 100644 src/runtime/env.ts diff --git a/shells/tests/arcs/ts/runtime/arc-test.ts b/shells/tests/arcs/ts/runtime/arc-test.ts index 116249a1b15..22693ffc721 100644 --- a/shells/tests/arcs/ts/runtime/arc-test.ts +++ b/shells/tests/arcs/ts/runtime/arc-test.ts @@ -16,7 +16,7 @@ import {Manifest} from '../../../../../build/runtime/manifest.js'; import {SlotComposer} from '../../../../../build/runtime/slot-composer.js'; import {Entity} from '../../../../../build/runtime/entity.js'; import {EntityType} from '../../../../../build/types/lib-types.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../../../../build/runtime/runtime.js'; import {VolatileStorageKey} from '../../../../../build/runtime/storage/drivers/volatile.js'; import {DirectStorageEndpointManager} from '../../../../../build/runtime/storage/direct-storage-endpoint-manager.js'; import {StoreInfo} from '../../../../../build/runtime/storage/store-info.js'; @@ -25,7 +25,7 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('Arc', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('deserializing a serialized arc with a Transformation produces that arc', async () => { diff --git a/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts b/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts index ea66b9cc68f..1869c272be5 100644 --- a/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts +++ b/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts @@ -35,8 +35,8 @@ describe('Hot Code Reload for JS Particle', async () => { }; });` }); - const runtime = new Runtime({loader, context}); + const arc = runtime.newArc('HotReload'); const [recipe] = arc.context.recipes; diff --git a/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts b/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts index 65fa923f32f..66b37b8405e 100644 --- a/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts +++ b/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts @@ -10,15 +10,11 @@ import {assert} from '../../../../../build/platform/chai-web.js'; import {Entity} from '../../../../../build/runtime/entity.js'; -import {Manifest} from '../../../../../build/runtime/manifest.js'; import {Runtime} from '../../../../../build/runtime/runtime.js'; import {SlotTestObserver} from '../../../../../build/runtime/testing/slot-test-observer.js'; import {Loader} from '../../../../../build/platform/loader.js'; -import {TestVolatileMemoryProvider} from '../../../../../build/runtime/testing/test-volatile-memory-provider.js'; import {storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; import {StrategyTestHelper} from '../../../../../build/planning/testing/strategy-test-helper.js'; -import {RamDiskStorageDriverProvider} from '../../../../../build/runtime/storage/drivers/ramdisk.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; import {handleForStoreInfo, CollectionEntityType} from '../../../../../build/runtime/storage/storage.js'; import {StoreInfo} from '../../../../../build/runtime/storage/store-info.js'; import {DirectStorageEndpointManager} from '../../../../../build/runtime/storage/direct-storage-endpoint-manager.js'; @@ -27,13 +23,10 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('Multiplexer', () => { it('renders polymorphic multiplexed slots', async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - const storageManager = new DirectStorageEndpointManager(); - RamDiskStorageDriverProvider.register(memoryProvider); - const loader = new Loader(); - const manifest = './shells/tests/artifacts/polymorphic-muxing.recipes'; - const context = await Manifest.load(manifest, loader, {memoryProvider}); - + const runtime = new Runtime(); + const context = await runtime.parseFile('./shells/tests/artifacts/polymorphic-muxing.recipes'); + runtime.context = context; + // const showOneParticle = context.particles.find(p => p.name === 'ShowOne'); const showOneSpec = JSON.stringify(showOneParticle.toLiteral()); const recipeOne = @@ -56,7 +49,6 @@ describe('Multiplexer', () => { post: reads v1 item: consumes s1`; - const runtime = new Runtime({loader, context, memoryProvider}); const thePostsStore = context.stores.find(StoreInfo.isCollectionEntityStore); const postsHandle = await handleForStoreInfo(thePostsStore, {...context, storageManager: new DirectStorageEndpointManager()}); await postsHandle.add(Entity.identify( @@ -123,16 +115,13 @@ describe('Multiplexer', () => { await postsHandle2.add(entity); await arc.idle; - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); // TODO(sjmiles): probably should be in particles/tests/* because of Multiplexer.js // TODO(sjmiles): skipped because (in summary) plumbing data from the hostedParticle to the outer // arc is not this simple ... research is afoot it.skip('multiplexer can host non-slot-using particle', async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - // const canonMultiplexer = `./particles/List/source/Multiplexer.js`; const manifest = ` schema Foo @@ -172,15 +161,15 @@ describe('Multiplexer', () => { }; // const loader = new Loader(null, statics); - const context = await Manifest.parse(manifest, {fileName: './', loader, memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); - const arc = runtime.newArc('fooTest', storageKeyPrefixForTest()); + const runtime = new Runtime({loader}); + const context = await runtime.parseFile('./shells/tests/artifacts/polymorphic-muxing.recipes'); // + const arc = runtime.newArc('fooTest', storageKeyPrefixForTest()); const recipe = context.recipes[0]; const plan = await Runtime.resolveRecipe(arc, recipe); await arc.instantiate(plan); await arc.idle; - + // // NOTE: a direct translation of this to new storage is unlikely to work as // the store map inside arcs is different now. // diff --git a/shells/tests/arcs/ts/runtime/multiplexer-test.ts b/shells/tests/arcs/ts/runtime/multiplexer-test.ts index a24446b10bb..09a747db406 100644 --- a/shells/tests/arcs/ts/runtime/multiplexer-test.ts +++ b/shells/tests/arcs/ts/runtime/multiplexer-test.ts @@ -19,7 +19,8 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('Multiplexer', () => { it('processes multiple inputs', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` import 'shells/tests/artifacts/Common/Multiplexer.manifest' import 'shells/tests/artifacts/test-particles.manifest' @@ -30,12 +31,11 @@ describe('Multiplexer', () => { hostedParticle: ConsumerParticle annotation: consumes slot0 list: reads handle0 - `, {loader: new Loader(), fileName: ''}); + `); const recipe = manifest.recipes[0]; const barType = checkDefined(manifest.findTypeByName('Bar')) as EntityType; - const runtime = new Runtime({context: manifest, loader: new Loader()}); const arc = runtime.newArc('test'); const barStore = await arc.createStore(barType.collectionOf(), null, 'test:1'); const barHandle = await handleForStoreInfo(barStore, arc); diff --git a/shells/tests/arcs/ts/runtime/particle-api-test.ts b/shells/tests/arcs/ts/runtime/particle-api-test.ts index 467d8f73538..c9de100bf2d 100644 --- a/shells/tests/arcs/ts/runtime/particle-api-test.ts +++ b/shells/tests/arcs/ts/runtime/particle-api-test.ts @@ -15,8 +15,6 @@ import {Runtime} from '../../../../../build/runtime/runtime.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('particle-api', () => { - - // TODO(sjmiles): uses xen particle it('loadRecipe returns ids of provided slots', async () => { const context = await Manifest.parse(` particle TransformationParticle in 'TransformationParticle.js' @@ -26,7 +24,6 @@ describe('particle-api', () => { slot0: slot 'rootslotid-root' TransformationParticle root: consumes slot0`); - const loader = new Loader(null, { 'TransformationParticle.js': `defineParticle(({UiParticle}) => { return class extends UiParticle { diff --git a/shells/tests/arcs/ts/runtime/particle-interface-loading-with-slots-test.ts b/shells/tests/arcs/ts/runtime/particle-interface-loading-with-slots-test.ts index afdb7c28add..f82301d896d 100644 --- a/shells/tests/arcs/ts/runtime/particle-interface-loading-with-slots-test.ts +++ b/shells/tests/arcs/ts/runtime/particle-interface-loading-with-slots-test.ts @@ -23,7 +23,7 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('particle interface loading with slots', () => { async function initializeManifestAndArc(contextContainer?): Promise<{manifest: Manifest, recipe: Recipe, observer: SlotTestObserver, arc: Arc}> { - const loader = new Loader(); + //const loader = new Loader(); const manifestText = ` import './shells/tests/artifacts/transformations/test-slots-particles.manifest' recipe @@ -34,12 +34,14 @@ describe('particle interface loading with slots', () => { foos: reads handle0 annotationsSet: consumes slot0 `; - const manifest = await Manifest.parse(manifestText, {loader, fileName: ''}); + const runtime = new Runtime(); + const manifest = await runtime.parse(manifestText); + + //const manifest = await Manifest.parse(manifestText/*, {loader, fileName: ''}*/); const recipe = manifest.recipes[0]; assert(recipe.normalize(), `can't normalize recipe`); assert(recipe.isResolved(), `recipe isn't resolved`); - const runtime = new Runtime({loader, context: manifest}); const arc = runtime.newArc('test'); const observer = new SlotTestObserver(); arc.peh.slotComposer.observeSlots(observer); diff --git a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts index cc0690f86d5..a6e4cb8355f 100644 --- a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts +++ b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts @@ -8,18 +8,13 @@ * http://polymer.github.io/PATENTS.txt */ import {assert} from '../../../../../build/platform/chai-web.js'; -import {Manifest} from '../../../../../build/runtime/manifest.js'; import {Runtime} from '../../../../../build/runtime/runtime.js'; import {storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; -import {Loader} from '../../../../../build/platform/loader.js'; import {PlanConsumer} from '../../../../../build/planning/plan/plan-consumer.js'; import {Planificator} from '../../../../../build/planning/plan/planificator.js'; import {PlanningResult} from '../../../../../build/planning/plan/planning-result.js'; import {Suggestion} from '../../../../../build/planning/plan/suggestion.js'; import {StrategyTestHelper} from '../../../../../build/planning/testing/strategy-test-helper.js'; -import {RamDiskStorageDriverProvider} from '../../../../../build/runtime/storage/drivers/ramdisk.js'; -import {TestVolatileMemoryProvider} from '../../../../../build/runtime/testing/test-volatile-memory-provider.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; import {Arc} from '../../../../../build/runtime/arc.js'; import {ActiveSingletonEntityStore} from '../../../../../build/runtime/storage/storage.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; @@ -37,21 +32,17 @@ async function storeResults(consumer: PlanConsumer, suggestions: Suggestion[]) { await new Promise(resolve => setTimeout(resolve, 100)); } -describe('plan consumer', () => { +describe('planFOOB consumer', () => { beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); - // TODO(sjmiles): uses xen particle it('consumes', async () => { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.parse(` + const manifestText = ` import './shells/tests/artifacts/Products/Products.recipes' particle Test1 in './shells/tests/artifacts/consumer-particle.js' @@ -70,8 +61,11 @@ describe('plan consumer', () => { Test2 other: consumes other description \`Test Recipe\` - `, {loader, fileName: '', memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + `; + + const runtime = new Runtime(); + runtime.context = await runtime.parse(manifestText); + const arc = runtime.newArc('demo', storageKeyPrefixForTest()); let suggestions = await StrategyTestHelper.planForArc(runtime, arc); diff --git a/shells/tests/arcs/ts/runtime/products-test.ts b/shells/tests/arcs/ts/runtime/products-test.ts index 99601f545db..66867b00353 100644 --- a/shells/tests/arcs/ts/runtime/products-test.ts +++ b/shells/tests/arcs/ts/runtime/products-test.ts @@ -9,16 +9,10 @@ */ import {assert} from '../../../../../build/platform/chai-web.js'; -import {Loader} from '../../../../../build/platform/loader.js'; import {Arc} from '../../../../../build/runtime/arc.js'; -import {IdGenerator} from '../../../../../build/runtime/id.js'; -import {Manifest} from '../../../../../build/runtime/manifest.js'; import {Runtime} from '../../../../../build/runtime/runtime.js'; import {SlotTestObserver} from '../../../../../build/runtime/testing/slot-test-observer.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; -import {RamDiskStorageDriverProvider} from '../../../../../build/runtime/storage/drivers/ramdisk.js'; -import {storageKeyForTest, storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; -import {TestVolatileMemoryProvider} from '../../../../../build/runtime/testing/test-volatile-memory-provider.js'; +import {storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; import {CollectionEntityHandle, CollectionEntityType, handleForStoreInfo} from '../../../../../build/runtime/storage/storage.js'; import {StoreInfo} from '../../../../../build/runtime/storage/store-info.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; @@ -26,7 +20,7 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('products test', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); const manifestFilename = './shells/tests/artifacts/ProductsTestNg.arcs'; @@ -41,14 +35,8 @@ describe('products test', () => { }; it('filters', async () => { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const runtime = new Runtime({ - loader, - context: await Manifest.load(manifestFilename, loader, {memoryProvider}), - memoryProvider - }); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile(manifestFilename); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const recipe = arc.context.recipes.find(r => r.name === 'FilterBooks'); assert.isTrue(recipe.normalize() && recipe.isResolved()); @@ -58,11 +46,8 @@ describe('products test', () => { }); it('filters and displays', async () => { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const id = IdGenerator.newSession().newArcId('demo'); - const runtime = new Runtime({loader, context: await Manifest.load(manifestFilename, loader, {memoryProvider})}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile(manifestFilename); const arc = runtime.newArc('demo'); const recipe = arc.context.recipes.find(r => r.name === 'FilterAndDisplayBooks'); assert.isTrue(recipe.normalize() && recipe.isResolved()); diff --git a/shells/tests/arcs/ts/runtime/slot-composer-test.ts b/shells/tests/arcs/ts/runtime/slot-composer-test.ts index f3c7223a382..675ea4c6267 100644 --- a/shells/tests/arcs/ts/runtime/slot-composer-test.ts +++ b/shells/tests/arcs/ts/runtime/slot-composer-test.ts @@ -14,12 +14,8 @@ import {Loader} from '../../../../../build/platform/loader.js'; import {SlotComposer} from '../../../../../build/runtime/slot-composer.js'; import {SlotTestObserver} from '../../../../../build/runtime/testing/slot-test-observer.js'; import {StrategyTestHelper} from '../../../../../build/planning/testing/strategy-test-helper.js'; -import {Manifest} from '../../../../../build/runtime/manifest.js'; import {Runtime} from '../../../../../build/runtime/runtime.js'; import {storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; -import {TestVolatileMemoryProvider} from '../../../../../build/runtime/testing/test-volatile-memory-provider.js'; -import {RamDiskStorageDriverProvider} from '../../../../../build/runtime/storage/drivers/ramdisk.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; class TestSlotComposer extends SlotComposer { @@ -32,7 +28,6 @@ class TestSlotComposer extends SlotComposer { } async function initSlotComposer(recipeStr) { - const manifest = await Manifest.parse(recipeStr); const loader = new Loader(null, { '*': ` defineParticle(({UiParticle}) => { @@ -44,7 +39,9 @@ async function initSlotComposer(recipeStr) { }); ` }); - const runtime = new Runtime({loader, context: manifest, composerClass: TestSlotComposer}); + const runtime = new Runtime({loader, composerClass: TestSlotComposer}); + runtime.context = await runtime.parse(recipeStr); + const arc = runtime.newArc('test-arc'); const planner = new Planner(); @@ -54,15 +51,16 @@ async function initSlotComposer(recipeStr) { await planner.strategizer.generate(); assert.lengthOf(planner.strategizer.population, 1); + const observer = (arc.peh.slotComposer as TestSlotComposer).observer; const plan = planner.strategizer.population[0].result; - return {arc, observer: (arc.peh.slotComposer as TestSlotComposer).observer, plan}; + return {arc, observer, plan}; } describe('slot composer', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('initialize recipe and render slots', async () => { @@ -117,13 +115,9 @@ recipe // in the render expectations; rendering uses _eventual correctness_ so it's not necessarily // deterministic: we may need to update the expectations system to take this into account. it.skip('initialize recipe and render hosted slots', async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const loader = new Loader(); - const file = 'ProductsTestNg.arcs'; - const manifest = `./shells/tests/artifacts/${file}`; - const context = await Manifest.load(manifest, loader, {memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + const manifest = `./shells/tests/artifacts/ProductsTestNg.arcs`; + const runtime = new Runtime(); + runtime.context = await runtime.parseFile(manifest); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const suggestions = await StrategyTestHelper.planForArc(runtime, arc); @@ -183,84 +177,84 @@ recipe }); it('renders inner slots in transformations without intercepting', async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - - const loader = new Loader(null, { - 'TransformationParticle.js': `defineParticle(({UiParticle}) => { - return class extends UiParticle { - async setHandles(handles) { - super.setHandles(handles); - - const innerArc = await this.constructInnerArc(); - const hostedSlotId = await innerArc.createSlot(this, 'root'); - - await innerArc.loadRecipe(\` - particle A in 'A.js' - content: consumes - detail: provides - - particle B in 'B.js' - detail: consumes - - recipe - hosted: slot '\` + hostedSlotId + \`' - A - content: consumes hosted - detail: provides detail - B - detail: consumes detail - \`); - } - renderHostedSlot(slotName, hostedSlotId, content) { - this.setState(content); - } - shouldRender() { - return Boolean(this.state.template); - } - getTemplate() { - return '
intercepted-template' + this.state.template + '
'; - } - getTemplateName() { - return this.state.templateName + '/intercepted'; - } - render() { - return Object.assign({}, this.state.model, {a: this.state.model.a + '/intercepted-model'}); - } - }; - });`, - 'A.js': `defineParticle(({UiParticle}) => { - return class extends UiParticle { - get template() { - return '
{{a}}
'; - } - render() { - return {a: 'A content'}; - } - }; - });`, - 'B.js': `defineParticle(({UiParticle}) => { - return class extends UiParticle { - get template() { - return '
{{b}}
'; - } - render() { - return {b: 'B content'}; - } - }; - });` - }); - - const context = await Manifest.parse(` - particle TransformationParticle in 'TransformationParticle.js' + const staticFiles = { + './TransformationParticle.js': `defineParticle(({UiParticle}) => { + return class extends UiParticle { + async setHandles(handles) { + super.setHandles(handles); + + const innerArc = await this.constructInnerArc(); + const hostedSlotId = await innerArc.createSlot(this, 'root'); + + await innerArc.loadRecipe(\` + particle A in './A.js' + content: consumes + detail: provides + + particle B in './B.js' + detail: consumes + + recipe + hosted: slot '\` + hostedSlotId + \`' + A + content: consumes hosted + detail: provides detail + B + detail: consumes detail + \`); + } + renderHostedSlot(slotName, hostedSlotId, content) { + this.setState(content); + } + shouldRender() { + return Boolean(this.state.template); + } + getTemplate() { + return '
intercepted-template' + this.state.template + '
'; + } + getTemplateName() { + return this.state.templateName + '/intercepted'; + } + render() { + return Object.assign({}, this.state.model, {a: this.state.model.a + '/intercepted-model'}); + } + }; + });`, + './A.js': `defineParticle(({UiParticle}) => { + return class extends UiParticle { + get template() { + return '
{{a}}
'; + } + render() { + return {a: 'A content'}; + } + }; + });`, + './B.js': `defineParticle(({UiParticle}) => { + return class extends UiParticle { + get template() { + return '
{{b}}
'; + } + render() { + return {b: 'B content'}; + } + }; + });` + }; + + const contextText = ` + particle TransformationParticle in './TransformationParticle.js' root: consumes recipe slot0: slot 'rootslotid-root' TransformationParticle - root: consumes slot0`, {loader, fileName: '', memoryProvider} - ); + root: consumes slot0 + `; + + const loader = new Loader(null, staticFiles); + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse(contextText); - const runtime = new Runtime({loader, context, memoryProvider}); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const [recipe] = arc.context.recipes; recipe.normalize(); diff --git a/shells/tests/arcs/ts/runtime/transformation-slots-test.ts b/shells/tests/arcs/ts/runtime/transformation-slots-test.ts index 08d22f15598..d003760fd44 100644 --- a/shells/tests/arcs/ts/runtime/transformation-slots-test.ts +++ b/shells/tests/arcs/ts/runtime/transformation-slots-test.ts @@ -9,30 +9,21 @@ */ import {assert} from '../../../../../build/platform/chai-web.js'; -import {Manifest} from '../../../../../build/runtime/manifest.js'; import {Runtime} from '../../../../../build/runtime/runtime.js'; import {storageKeyPrefixForTest} from '../../../../../build/runtime/testing/handle-for-test.js'; import {SlotTestObserver} from '../../../../../build/runtime/testing/slot-test-observer.js'; -import {Loader} from '../../../../../build/platform/loader.js'; -import {TestVolatileMemoryProvider} from '../../../../../build/runtime/testing/test-volatile-memory-provider.js'; import {StrategyTestHelper} from '../../../../../build/planning/testing/strategy-test-helper.js'; -import {RamDiskStorageDriverProvider} from '../../../../../build/runtime/storage/drivers/ramdisk.js'; -import {DriverFactory} from '../../../../../build/runtime/storage/drivers/driver-factory.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('transformation slots', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('combines hosted particles provided singleton slots into transformation provided set slot', async () => { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.load( - './shells/tests/artifacts/provide-hosted-particle-slots.manifest', loader, {memoryProvider}); - const runtime = new Runtime({ - loader, context, memoryProvider}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile('./shells/tests/artifacts/provide-hosted-particle-slots.manifest'); + const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const slotComposer = arc.peh.slotComposer; diff --git a/src/dataflow/analysis/testing/flow-graph-testing.ts b/src/dataflow/analysis/testing/flow-graph-testing.ts index 425a9bf0f9e..562399dfd57 100644 --- a/src/dataflow/analysis/testing/flow-graph-testing.ts +++ b/src/dataflow/analysis/testing/flow-graph-testing.ts @@ -9,16 +9,15 @@ */ import {assert} from '../../../platform/chai-web.js'; -import {Manifest} from '../../../runtime/manifest.js'; +import {Runtime} from '../../../runtime/runtime.js'; import {FlowGraph} from '../flow-graph.js'; import {CheckCondition} from '../../../runtime/arcs-types/check.js'; import {Edge, Node, FlowModifier} from '../graph-internals.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; /** Constructs a FlowGraph from the recipe in the given manifest. */ export async function buildFlowGraph(manifestContent: string): Promise { - const memoryProvider = new TestVolatileMemoryProvider(); - const manifest = await Manifest.parse(manifestContent, {memoryProvider}); + const runtime = new Runtime(); + const manifest = await runtime.parse(manifestContent); assert.lengthOf(manifest.recipes, 1); const recipe = manifest.recipes[0]; assert(recipe.normalize(), 'Failed to normalize recipe.'); diff --git a/src/planning/plan/tests/plan-consumer-test.ts b/src/planning/plan/tests/plan-consumer-test.ts index 0d72469244a..85aab2d3a72 100644 --- a/src/planning/plan/tests/plan-consumer-test.ts +++ b/src/planning/plan/tests/plan-consumer-test.ts @@ -8,27 +8,23 @@ * http://polymer.github.io/PATENTS.txt */ import {assert} from '../../../platform/chai-web.js'; -import {Manifest} from '../../../runtime/manifest.js'; import {Modality} from '../../../runtime/arcs-types/modality.js'; import {Relevance} from '../../../runtime/relevance.js'; import {Runtime} from '../../../runtime/runtime.js'; import {storageKeyPrefixForTest} from '../../../runtime/testing/handle-for-test.js'; -import {Loader} from '../../../platform/loader.js'; import {PlanConsumer} from '../../plan/plan-consumer.js'; import {Planificator} from '../../plan/planificator.js'; import {PlanningResult} from '../../plan/planning-result.js'; import {Suggestion} from '../../plan/suggestion.js'; import {SuggestFilter} from '../../plan/suggest-filter.js'; -import {RamDiskStorageDriverProvider} from '../../../runtime/storage/drivers/ramdisk.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; import {Arc} from '../../../runtime/arc.js'; +import {Manifest} from '../../../runtime/manifest.js'; import {ActiveSingletonEntityStore} from '../../../runtime/storage/storage.js'; -async function createPlanConsumer(arc: Arc) { +async function createPlanConsumer(arc: Arc, context: Manifest) { const store: ActiveSingletonEntityStore = await Planificator['_initSuggestStore'](arc); assert.isNotNull(store); - const result = new PlanningResult({context: arc.context, loader: arc.loader, storageManager: arc.storageManager}, store); + const result = new PlanningResult({context, loader: arc.loader, storageManager: arc.storageManager}, store); return new PlanConsumer(arc, result); } @@ -41,11 +37,11 @@ async function storeResults(consumer: PlanConsumer, suggestions: Suggestion[]) { describe('plan consumer', () => { beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('filters suggestions by modality', async () => { @@ -60,10 +56,8 @@ describe('plan consumer', () => { `).join('')} `; }; - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.parse(` + const runtime = new Runtime(); + const context = await runtime.parse(` particle ParticleDom in './src/runtime/tests/artifacts/consumer-particle.js' root: consumes Slot particle ParticleTouch in './src/runtime/tests/artifacts/consumer-particle.js' @@ -77,19 +71,23 @@ ${addRecipe(['ParticleDom'])} ${addRecipe(['ParticleTouch'])} ${addRecipe(['ParticleDom', 'ParticleBoth'])} ${addRecipe(['ParticleTouch', 'ParticleBoth'])} - `, {loader, fileName: '', memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + `); + //runtime.context = context; + const arc = runtime.newArc('demo', storageKeyPrefixForTest(), {modality}); - assert.lengthOf(arc.context.allRecipes, 4); - const consumer = await createPlanConsumer(arc); + assert.lengthOf(context.allRecipes, 4); + + const consumer = await createPlanConsumer(arc, context); assert.isNotNull(consumer); - await storeResults(consumer, arc.context.allRecipes.map((plan, index) => { + + await storeResults(consumer, context.allRecipes.map((plan, index) => { const suggestion = Suggestion.create(plan, /* hash */`${index}`, Relevance.create(arc, plan)); suggestion.descriptionByModality['text'] = `${plan.name}`; return suggestion; })); assert.lengthOf(consumer.result.suggestions, 4); assert.isEmpty(consumer.getCurrentSuggestions()); + consumer.suggestFilter = new SuggestFilter(true); return consumer; }; @@ -99,17 +97,17 @@ ${addRecipe(['ParticleTouch', 'ParticleBoth'])} assert.lengthOf(domSuggestions, 2); assert.deepEqual(domSuggestions.map(s => s.plan.particles.map(p => p.name)), [['ParticleDom'], ['ParticleDom', 'ParticleBoth']]); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); const consumerVr = await initConsumer(Modality.vr); assert.isEmpty(consumerVr.getCurrentSuggestions()); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); const consumerTouch = await initConsumer(Modality.domTouch); const touchSuggestions = consumerTouch.getCurrentSuggestions(); assert.lengthOf(touchSuggestions, 2); assert.deepEqual(touchSuggestions.map(s => s.plan.particles.map(p => p.name)), [['ParticleTouch'], ['ParticleTouch', 'ParticleBoth']]); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); }); diff --git a/src/planning/plan/tests/plan-producer-test.ts b/src/planning/plan/tests/plan-producer-test.ts index 88951410056..3fc39bd95ac 100644 --- a/src/planning/plan/tests/plan-producer-test.ts +++ b/src/planning/plan/tests/plan-producer-test.ts @@ -13,17 +13,13 @@ import {ArcId} from '../../../runtime/id.js'; import {Loader} from '../../../platform/loader.js'; import {Manifest} from '../../../runtime/manifest.js'; import {Runtime} from '../../../runtime/runtime.js'; -import {SlotComposer} from '../../../runtime/slot-composer.js'; import {storageKeyPrefixForTest, storageKeyForTest} from '../../../runtime/testing/handle-for-test.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; import {PlanProducer} from '../../plan/plan-producer.js'; import {Planificator} from '../../plan/planificator.js'; import {PlanningResult} from '../../plan/planning-result.js'; import {Suggestion} from '../../plan/suggestion.js'; import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; -import {RamDiskStorageDriverProvider} from '../../../runtime/storage/drivers/ramdisk.js'; import {ActiveSingletonEntityStore, handleForActiveStore} from '../../../runtime/storage/storage.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; class TestPlanProducer extends PlanProducer { options; @@ -84,19 +80,16 @@ class TestPlanProducer extends PlanProducer { // Run test suite for each storageKeyBase describe('plan producer', () => { beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); async function createProducer() { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.load('./src/runtime/tests/artifacts/Products/Products.recipes', loader, {memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile('./src/runtime/tests/artifacts/Products/Products.recipes'); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const suggestions = await StrategyTestHelper.planForArc( runtime, @@ -178,13 +171,11 @@ describe('plan producer - search', () => { } async function init(): Promise { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + runtime.context = await runtime.parse(` schema Bar value: Text - `, {memoryProvider}); - const runtime = new Runtime({loader, context: manifest, memoryProvider}); + `); const arc = runtime.newArc('test', storageKeyForTest, {id: ArcId.newForTest('test')}); const searchStore = await Planificator['_initSearchStore'](arc); diff --git a/src/planning/plan/tests/planificator-test.ts b/src/planning/plan/tests/planificator-test.ts index a98a4654caf..055187b2d2b 100644 --- a/src/planning/plan/tests/planificator-test.ts +++ b/src/planning/plan/tests/planificator-test.ts @@ -51,14 +51,14 @@ describe.skip('remote planificator', () => { let memoryProvider; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); arcStorageKey = storageKeyPrefixForTest(); memoryProvider = new TestVolatileMemoryProvider(); RamDiskStorageDriverProvider.register(memoryProvider); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); async function createArc(options, storageKey) { diff --git a/src/planning/plan/tests/planning-result-test.ts b/src/planning/plan/tests/planning-result-test.ts index b8860859b9e..8079e777570 100644 --- a/src/planning/plan/tests/planning-result-test.ts +++ b/src/planning/plan/tests/planning-result-test.ts @@ -19,50 +19,55 @@ import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile import {storageKeyPrefixForTest} from '../../../runtime/testing/handle-for-test.js'; import {Loader} from '../../../platform/loader.js'; import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; import {VolatileStorageDriverProvider} from '../../../runtime/storage/drivers/volatile.js'; describe('planning result', () => { let memoryProvider; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('serializes and deserializes Products recipes', async () => { - const loader = new Loader(); - const context = await Manifest.load('./src/runtime/tests/artifacts/Products/Products.recipes', loader, {memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile('./src/runtime/tests/artifacts/Products/Products.recipes'); + const arc = runtime.newArc('demo', storageKeyPrefixForTest()); VolatileStorageDriverProvider.register(arc); - const storageManager = arc.storageManager; - const suggestions = await StrategyTestHelper.planForArc(runtime, arc); + const suggestions = await StrategyTestHelper.planForArc(runtime, arc); assert.isNotEmpty(suggestions); + + const {loader, context} = runtime; + const {storageManager} = arc; + const result = new PlanningResult({context, loader, storageManager}); result.merge({suggestions}, arc); const serialization = result.toLiteral(); assert(serialization.suggestions); + const resultNew = new PlanningResult({context, loader, storageManager}); assert.isEmpty(resultNew.suggestions); + await resultNew.fromLiteral({suggestions: serialization.suggestions}); assert.isTrue(resultNew.isEquivalent(suggestions)); }); it('appends search suggestions', async () => { - const loader = new Loader(); - const context = await Manifest.load('./src/runtime/tests/artifacts/Products/Products.recipes', loader, {memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile('./src/runtime/tests/artifacts/Products/Products.recipes'); + const arc = runtime.newArc('demo', storageKeyPrefixForTest()); - const storageManager = arc.storageManager; const suggestions = await StrategyTestHelper.planForArc(runtime, arc); + const {loader, context} = runtime; + const {storageManager} = arc; + const result = new PlanningResult({loader, context, storageManager}); + // Appends new suggestion. assert.isTrue(result.merge({suggestions}, arc)); assert.lengthOf(result.suggestions, 1); @@ -91,13 +96,11 @@ describe('planning result', () => { describe('planning result merge', () => { let memoryProvider; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); const commonManifestStr = ` @@ -133,8 +136,7 @@ recipe R3 thing: reads thingHandle `; async function prepareMerge(manifestStr1, manifestStr2) { - const loader = new Loader(); - const runtime = new Runtime({loader}); + const runtime = new Runtime(); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const planToSuggestion = async (plan: Recipe): Promise => { @@ -148,8 +150,8 @@ recipe R3 return suggestion; }; const manifestToResult = async (manifestStr) => { - const manifest = await Manifest.parse(manifestStr, {loader, fileName: '', memoryProvider}); - const result = new PlanningResult({context: arc.context, loader, storageManager: arc.storageManager}); + const manifest = await runtime.parse(manifestStr); + const result = new PlanningResult({context: arc.context, loader: runtime.loader, storageManager: arc.storageManager}); const suggestions: Suggestion[] = await Promise.all( manifest.recipes.map(async plan => planToSuggestion(plan)) as Promise[] diff --git a/src/planning/plan/tests/test-environment-test.ts b/src/planning/plan/tests/test-environment-test.ts index 9812dda8b64..04efd83b5af 100644 --- a/src/planning/plan/tests/test-environment-test.ts +++ b/src/planning/plan/tests/test-environment-test.ts @@ -8,7 +8,7 @@ * http://polymer.github.io/PATENTS.txt */ import {registerSystemExceptionHandler, removeSystemExceptionHandler, defaultSystemExceptionHandler} from '../../../runtime/arc-exceptions.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../../runtime/runtime.js'; let exceptions: Error[] = []; @@ -24,5 +24,5 @@ afterEach(function() { // Error function not yet included in mocha typescript declarations... this.test['error'](exception); } - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); diff --git a/src/planning/strategies/tests/coalesce-recipes-test.ts b/src/planning/strategies/tests/coalesce-recipes-test.ts index 3d59e83b08f..fd5a726ace0 100644 --- a/src/planning/strategies/tests/coalesce-recipes-test.ts +++ b/src/planning/strategies/tests/coalesce-recipes-test.ts @@ -8,28 +8,22 @@ * http://polymer.github.io/PATENTS.txt */ - import {assert} from '../../../platform/chai-web.js'; -import {Manifest} from '../../../runtime/manifest.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; -import {RamDiskStorageDriverProvider} from '../../../runtime/storage/drivers/ramdisk.js'; import {CoalesceRecipes} from '../../strategies/coalesce-recipes.js'; - import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../../runtime/runtime.js'; describe('CoalesceRecipes', () => { let memoryProvider; beforeEach(() => { - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); async function tryCoalesceRecipes(manifestStr: string) { - const manifest = await Manifest.parse(manifestStr, {memoryProvider}); + const runtime = new Runtime(); + const manifest = await runtime.parse(manifestStr); const recipes = manifest.recipes; assert.isTrue(recipes.every(recipe => recipe.normalize())); assert.isFalse(recipes.every(recipe => recipe.isResolved())); diff --git a/src/planning/strategies/tests/resolve-recipe-test.ts b/src/planning/strategies/tests/resolve-recipe-test.ts index 7c9e8dbf0e9..ae2e54de3d4 100644 --- a/src/planning/strategies/tests/resolve-recipe-test.ts +++ b/src/planning/strategies/tests/resolve-recipe-test.ts @@ -11,22 +11,19 @@ import {assert} from '../../../platform/chai-web.js'; import {Manifest} from '../../../runtime/manifest.js'; import {ResolveRecipe} from '../../strategies/resolve-recipe.js'; import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; -import {RamDiskStorageDriverProvider} from '../../../runtime/storage/drivers/ramdisk.js'; import {Entity} from '../../../runtime/entity.js'; import {SingletonType} from '../../../types/lib-types.js'; +import {Runtime} from '../../../runtime/runtime.js'; const {createTestArc, onlyResult, theResults, noResult} = StrategyTestHelper; describe('resolve recipe', () => { - let memoryProvider; beforeEach(() => { - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); }); it('does not resolve a mapping of a handle with an invalid type', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Car doors: Number schema Tesla extends Car @@ -44,7 +41,7 @@ describe('resolve recipe', () => { resource EmptyListJson start [] - `, {memoryProvider}); + `); const [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); @@ -53,7 +50,8 @@ describe('resolve recipe', () => { }); it('resolves a mapping of a handle with a less specific entity type', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Car doors: Number schema Tesla extends Car @@ -71,7 +69,7 @@ describe('resolve recipe', () => { resource EmptyListJson start [] - `, {memoryProvider}); + `); let [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); @@ -81,7 +79,8 @@ describe('resolve recipe', () => { }); it('resolves a mapping of a handle with a more specific entity type', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Car doors: Number schema Tesla extends Car @@ -99,7 +98,7 @@ describe('resolve recipe', () => { resource EmptyListJson start [] - `, {memoryProvider}); + `); let [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); @@ -109,7 +108,8 @@ describe('resolve recipe', () => { }); it('resolves a mapping of a handle with an equivalent entity type', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Car doors: Number schema Tesla extends Car @@ -127,7 +127,7 @@ describe('resolve recipe', () => { resource EmptyListJson start [] - `, {memoryProvider}); + `); let [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); @@ -137,14 +137,15 @@ describe('resolve recipe', () => { }); it('maps slots by tags', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` particle A in 'A.js' master: consumes Slot #parent recipe s0: slot 'id0' #parent A - `, {memoryProvider}); + `); let [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); @@ -153,7 +154,8 @@ describe('resolve recipe', () => { }); it('map slots by slot connection tags', async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` particle A in 'A.js' master: consumes Slot #root detail: provides? Slot #info #detail @@ -164,7 +166,7 @@ describe('resolve recipe', () => { master: consumes #root B info: consumes #detail - `, {memoryProvider}); + `); const strategy = new ResolveRecipe(createTestArc(manifest)); const results = await strategy.generateFrom([{result: manifest.recipes[0], score: 1}]); @@ -177,7 +179,8 @@ describe('resolve recipe', () => { }); it(`maps 'map' handles specified by id to storage`, async () => { - const context = await Manifest.parse(` + const runtime = new Runtime(); + const context = await runtime.parse(` schema Car doors: Number @@ -185,11 +188,11 @@ describe('resolve recipe', () => { resource EmptyListJson start [] - `, {memoryProvider}); + `); // Separating context from the recipe as otherwise // manifest parser maps to storage all by itself itself. - const recipe = (await Manifest.parse(` + const recipe = (await runtime.parse(` schema Car doors: Number @@ -200,7 +203,7 @@ describe('resolve recipe', () => { h0: map 'batmobile' P param: reads h0 - `, {memoryProvider})).recipes[0]; + `)).recipes[0]; recipe.normalize(); assert.isUndefined(recipe.handles[0].storageKey); @@ -216,7 +219,8 @@ describe('resolve recipe', () => { }); it(`maps 'use' handles specified by id to storage`, async () => { - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Car doors: Number @@ -227,7 +231,7 @@ describe('resolve recipe', () => { h0: use 'batmobile' P param: reads h0 - `, {memoryProvider}); + `); const arc = createTestArc(manifest); diff --git a/src/planning/strategies/tests/search-tokens-to-handles-test.ts b/src/planning/strategies/tests/search-tokens-to-handles-test.ts index 1d11201950c..f108c13ac9a 100644 --- a/src/planning/strategies/tests/search-tokens-to-handles-test.ts +++ b/src/planning/strategies/tests/search-tokens-to-handles-test.ts @@ -11,24 +11,21 @@ import {assert} from '../../../platform/chai-web.js'; import {Loader} from '../../../platform/loader.js'; import {Manifest} from '../../../runtime/manifest.js'; -import {RamDiskStorageDriverProvider} from '../../../runtime/storage/drivers/ramdisk.js'; -import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile-memory-provider.js'; import {SearchTokensToHandles} from '../../strategies/search-tokens-to-handles.js'; import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../../runtime/runtime.js'; describe('SearchTokensToHandles', () => { - let memoryProvider; + let runtime; beforeEach(() => { - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + runtime = new Runtime(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('finds local handle by tags', async () => { - const manifest = (await Manifest.parse(` + const manifest = await runtime.parse(` schema Thing particle ShowThing &show in 'A.js' inThing: reads Thing @@ -42,7 +39,7 @@ describe('SearchTokensToHandles', () => { resource ThingsJson start {"root": {}, "locations": {}} - `, {memoryProvider})); + `); const arc = StrategyTestHelper.createTestArc(manifest); await arc._registerStore(arc.context.findStoreById('thing-id'), ['mything']); @@ -62,16 +59,15 @@ describe('SearchTokensToHandles', () => { }); it('finds remote handle by tags', async () => { - const loader = new Loader(); - const storeManifest = (await Manifest.parse(` + const storeManifest = (await runtime.parse(` import 'src/runtime/tests/artifacts/test-particles.manifest' store Things of Foo #mything in ThingsJson store Things of [Foo] #manythings in ThingsJson resource ThingsJson start [{}] - `, {loader, fileName: '', memoryProvider})); - const manifest = (await Manifest.parse(` + `)); + const manifest = (await runtime.parse(` import 'src/runtime/tests/artifacts/test-particles.manifest' particle ChooseFoo &choose in 'A.js' inFoos: reads [Foo] @@ -84,7 +80,7 @@ recipe ChooseFoo inFoos: reads h0 outFoo: writes h1 - `, {loader, fileName: '', memoryProvider})); + `)); const arc = StrategyTestHelper.createTestArc(manifest); arc.context.imports.push(storeManifest); const recipe = manifest.recipes[0]; diff --git a/src/planning/tests/planner-test.ts b/src/planning/tests/planner-test.ts index 0c6fb241200..57833f5e74e 100644 --- a/src/planning/tests/planner-test.ts +++ b/src/planning/tests/planner-test.ts @@ -9,7 +9,6 @@ */ import {assert} from '../../platform/chai-web.js'; -import {Arc} from '../../runtime/arc.js'; import {Particle} from '../../runtime/particle.js'; import {Loader} from '../../platform/loader.js'; import {Manifest} from '../../runtime/manifest.js'; @@ -17,34 +16,29 @@ import {Planner} from '../planner.js'; import {Speculator} from '../speculator.js'; import {assertThrowsAsync, ConCap} from '../../testing/test-util.js'; import {StrategyTestHelper} from '../testing/strategy-test-helper.js'; -import {ArcId} from '../../runtime/id.js'; -import {RamDiskStorageDriverProvider, RamDiskStorageKey} from '../../runtime/storage/drivers/ramdisk.js'; +import {RamDiskStorageKey} from '../../runtime/storage/drivers/ramdisk.js'; import {TestVolatileMemoryProvider} from '../../runtime/testing/test-volatile-memory-provider.js'; import {EntityType, SingletonType} from '../../types/lib-types.js'; import {Entity} from '../../runtime/entity.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; import {Runtime} from '../../runtime/runtime.js'; async function planFromManifest(manifest, {arcFactory, testSteps}: {arcFactory?, testSteps?} = {}) { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + const runtime = new Runtime(); if (typeof manifest === 'string') { - const fileName = './test.manifest'; - manifest = await Manifest.parse(manifest, {loader, fileName, memoryProvider}); + manifest = await runtime.parse(manifest); } - arcFactory = arcFactory || ((manifest) => StrategyTestHelper.createTestArc(manifest)); - testSteps = testSteps || ((planner) => planner.plan(Infinity, [])); + arcFactory = arcFactory || (manifest => StrategyTestHelper.createTestArc(manifest)); + testSteps = testSteps || (planner => planner.plan(Infinity, [])); const arc = await arcFactory(manifest); + const planner = new Planner(); const options = {strategyArgs: StrategyTestHelper.createTestStrategyArgs(arc)}; planner.init(arc, options); - const result = await testSteps(planner); - - DriverFactory.clearRegistrationsForTesting(); + const result = await testSteps(planner); + Runtime.resetDrivers(); return result; } @@ -571,20 +565,15 @@ ${recipeManifest} }); describe('Type variable resolution', () => { - let memoryProvider; - beforeEach(() => { - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); }); - afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); - const loadAndPlan = async (manifestStr) => { - const loader = new NullLoader(); - const manifest = (await Manifest.parse(manifestStr, {loader, memoryProvider})); + const loadAndPlan = async manifestStr => { + const runtime = new Runtime({loader: new NullLoader()}); + const manifest = await runtime.parse(manifestStr); const arc = StrategyTestHelper.createTestArc(manifest); const planner = new Planner(); const options = {strategyArgs: StrategyTestHelper.createTestStrategyArgs(arc)}; @@ -594,7 +583,6 @@ describe('Type variable resolution', () => { const verifyResolvedPlan = async (manifestStr) => { const plans = await loadAndPlan(manifestStr); assert.lengthOf(plans, 1); - const recipe = plans[0]; recipe.normalize(); assert.isTrue(recipe.isResolved()); diff --git a/src/runtime/env.ts b/src/runtime/env.ts new file mode 100644 index 00000000000..2e31ef97f0c --- /dev/null +++ b/src/runtime/env.ts @@ -0,0 +1,41 @@ +/** + * @license + * Copyright (c) 2020 Google Inc. All rights reserved. + * This code may only be used under the BSD style license found at + * http://polymer.github.io/LICENSE.txt + * Code distributed by Google as part of this project is also + * subject to an additional IP rights grant found at + * http://polymer.github.io/PATENTS.txt + */ + +import {Loader} from './loader.js'; + +export class Env { + static loader: Loader; + /** + * Call `init` to establish a default loader environment. + */ + static init(root?: string, urls?: {}) { + this.loader = new Loader({...this.mapFromRootPath(root), ...urls}); + } + static mapFromRootPath(root: string) { + // TODO(sjmiles): the map below is commonly-used, but it's not generic enough to live here. + // Shells that use this default should be provide it to `init` themselves. + return { + // important: path to `worker.js` + 'https://$worker/': `${root}/shells/lib/worker/dist/`, + // TODO(sjmiles): for backward compat + 'https://$build/': `${root}/shells/lib/worker/dist/`, + // these are optional (?) + 'https://$arcs/': `${root}/`, + 'https://$shells': `${root}/shells`, + 'https://$particles/': { + root, + path: '/particles/', + buildDir: '/bazel-bin', + buildOutputRegex: /\.wasm$/.source + } + }; + + } +} diff --git a/src/runtime/recipe/tests/recipe-test.ts b/src/runtime/recipe/tests/recipe-test.ts index dac629db437..cabdf04e027 100644 --- a/src/runtime/recipe/tests/recipe-test.ts +++ b/src/runtime/recipe/tests/recipe-test.ts @@ -17,21 +17,20 @@ import {Entity} from '../../entity.js'; import {Recipe} from '../lib-recipe.js'; import {TestVolatileMemoryProvider} from '../../testing/test-volatile-memory-provider.js'; import {RamDiskStorageDriverProvider} from '../../storage/drivers/ramdisk.js'; -import {DriverFactory} from '../../storage/drivers/driver-factory.js'; +import {Runtime} from '../../runtime.js'; describe('recipe', () => { - let memoryProvider; + let runtime; beforeEach(() => { - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + runtime = new Runtime(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('normalize errors', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` schema S1 schema S2 particle P1 @@ -58,7 +57,7 @@ describe('recipe', () => { options.errors.has(recipe.slots[1]); }); it('clones recipe', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` particle Particle1 recipe MyRecipe Particle1 @@ -68,7 +67,7 @@ describe('recipe', () => { assert.strictEqual(recipe.toString(), clonedRecipe.toString()); }); it('clones recipe with require section', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` particle P1 details: consumes Slot recipe MyRecipe @@ -85,7 +84,7 @@ describe('recipe', () => { assert.isTrue(clonedRecipe.slots[0] === clonedRecipe.requires[0].particles[0].getSlotConnectionByName('root').providedSlots['details'], 'cloned recipe slots don\'t match'); }); it('validate handle connection types', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` schema MyType schema MySubType extends MyType schema OtherType @@ -236,9 +235,7 @@ describe('recipe', () => { }); const getFirstRecipeHash = async manifestContent => { - const loader = new Loader(); - const manifest = await Manifest.parse(manifestContent, - {loader, fileName: './manifest.manifest', memoryProvider}); + const manifest = await runtime.parse(manifestContent, {fileName: 'test.file'}); const [recipe] = manifest.recipes; assert.isTrue(recipe.normalize()); return recipe.digest(); @@ -406,7 +403,7 @@ describe('recipe', () => { assert.isFalse(recipe.isResolved()); }); it('considers type resolution as recipe update', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` schema Thing particle Generic anyA: reads ~a @@ -422,7 +419,7 @@ describe('recipe', () => { resource ThingsJson start [{}] - `, {memoryProvider}); + `); assert.lengthOf(manifest.recipes, 1); const recipe = manifest.recipes[0]; recipe.handles[0].id = 'my-things'; @@ -740,7 +737,7 @@ describe('recipe', () => { }); it('clones connections with type variables', async () => { - const recipe = (await Manifest.parse(` + const manifest = await runtime.parse(` schema Thing resource ThingResource start @@ -757,7 +754,8 @@ describe('recipe', () => { P inThing: handle0 outThing: handle1 - `, {memoryProvider})).recipes[0]; + `); + const recipe = manifest.recipes[0]; const verifyRecipe = (recipe, errorPrefix) => { const errors: string[] = []; const resolvedType = recipe.handleConnections[0].type.resolvedType(); diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index 908a25137fb..2b9f8ff521f 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -28,10 +28,13 @@ import {workerPool} from './worker-pool.js'; import {Modality} from './arcs-types/modality.js'; import {StorageKey} from './storage/storage-key.js'; import {StorageKeyFactory} from './storage-key-factory.js'; +import {StorageKeyParser} from './storage/storage-key-parser.js'; +import {DriverFactory} from './storage/drivers/driver-factory.js'; import {RamDiskStorageDriverProvider} from './storage/drivers/ramdisk.js'; import {SimpleVolatileMemoryProvider, VolatileMemoryProvider, VolatileStorageKey, VolatileStorageKeyFactory} from './storage/drivers/volatile.js'; import {StorageEndpointManager} from './storage/storage-manager.js'; import {DirectStorageEndpointManager} from './storage/direct-storage-endpoint-manager.js'; +import {Env} from './env.js'; const {warn} = logsFactory('Runtime', 'orange'); @@ -56,17 +59,37 @@ export type RuntimeArcOptions = Readonly<{ modality?: Modality; }>; +let staticMemoryProvider; + +// TODO(sjmiles): weird layering here due to dancing around global state +const initDrivers = () => { + VolatileStorageKey.register(); + staticMemoryProvider = new SimpleVolatileMemoryProvider(); + RamDiskStorageDriverProvider.register(staticMemoryProvider); +}; + +initDrivers(); + @SystemTrace export class Runtime { public context: Manifest; public readonly pecFactory: PecFactory; + public readonly loader: Loader | null; private cacheService: RuntimeCacheService; - private loader: Loader | null; private composerClass: typeof SlotComposer | null; private memoryProvider: VolatileMemoryProvider; readonly storageManager: StorageEndpointManager; readonly arcById = new Map(); + static resetDrivers(noDefault?: true) { + DriverFactory.providers = new Set(); + StorageKeyParser.reset(); + CapabilitiesResolver.reset(); + if (!noDefault) { + initDrivers(); + } + } + /** * Call `init` to establish a default Runtime environment (capturing the return value is optional). * Systems can use `Runtime.getRuntime()` to access this environment instead of plumbing `runtime` @@ -81,7 +104,7 @@ export class Runtime { loader, composerClass: SlotComposer, pecFactory, - memoryProvider + memoryProvider: staticMemoryProvider }); return runtime; } @@ -112,11 +135,11 @@ export class Runtime { constructor({loader, composerClass, context, pecFactory, memoryProvider, storageManager}: RuntimeOptions = {}) { this.cacheService = new RuntimeCacheService(); - this.pecFactory = pecFactory; this.loader = loader || new Loader(); + this.pecFactory = pecFactory || pecIndustry(loader); this.composerClass = composerClass || SlotComposer; this.context = context || new Manifest({id: 'manifest:default'}); - this.memoryProvider = memoryProvider || new SimpleVolatileMemoryProvider(); + this.memoryProvider = memoryProvider || staticMemoryProvider; // || new SimpleVolatileMemoryProvider(); this.storageManager = storageManager || new DirectStorageEndpointManager(); // user information. One persona per runtime for now. } @@ -134,7 +157,7 @@ export class Runtime { } // Allow dynamic context binding to this runtime. - bindContext(context: Manifest) { + setContext(context: Manifest) { this.context = context; } @@ -154,18 +177,12 @@ export class Runtime { // Should ids be provided to the Arc constructor, or should they be constructed by the Arc? // How best to provide default storage to an arc given whatever we decide? newArc(name: string, storageKeyPrefix?: ((arcId: ArcId) => StorageKey), options?: RuntimeArcOptions): Arc { - const {loader, context} = this; const id = (options && options.id) || IdGenerator.newSession().newArcId(name); const slotComposer = this.composerClass ? new this.composerClass() : null; - let storageKey : StorageKey; - if (storageKeyPrefix == null) { - storageKey = new VolatileStorageKey(id, ''); - } else { - storageKey = storageKeyPrefix(id); - } + const storageKey = storageKeyPrefix ? storageKeyPrefix(id) : new VolatileStorageKey(id, ''); const factories = (options && options.storargeKeyFactories) || [new VolatileStorageKeyFactory()]; const capabilitiesResolver = new CapabilitiesResolver({arcId: id, factories}); - const storageManager = this.storageManager; + const {loader, context, storageManager} = this; return new Arc({id, storageKey, capabilitiesResolver, loader, slotComposer, context, storageManager, ...options}); } @@ -219,9 +236,9 @@ export class Runtime { * a Manifest object. The loader determines the semantics of the fileName. See * the Manifest class for details. */ - static async loadManifest(fileName, loader, options) : Promise { - return Manifest.load(fileName, loader, options); - } + // static async loadManifest(fileName, loader, options) : Promise { + // return Manifest.load(fileName, loader, options); + // } // TODO(sjmiles): These methods represent boilerplate factored out of // various shells.These needs could be filled other ways or represented @@ -273,16 +290,4 @@ export class Runtime { static isNormalized(recipe: Recipe): boolean { return Object.isFrozen(recipe); } - - // static interface for the default runtime environment - - static async parse(content: string, options?): Promise { - return staticRuntime.parse(content, options); - } - - static async parseFile(path: string, options?): Promise { - return staticRuntime.parseFile(path, options); - } } - -const staticRuntime = new Runtime(); diff --git a/src/runtime/storage/drivers/driver-factory.ts b/src/runtime/storage/drivers/driver-factory.ts index c77dd0fee66..99480173173 100644 --- a/src/runtime/storage/drivers/driver-factory.ts +++ b/src/runtime/storage/drivers/driver-factory.ts @@ -9,9 +9,7 @@ */ import {StorageKey} from '../storage-key.js'; -import {StorageKeyParser} from '../storage-key-parser.js'; import {Exists, Driver} from './driver.js'; -import {CapabilitiesResolver} from '../../capabilities-resolver.js'; export interface StorageDriverProvider { // information on the StorageDriver and characteristics @@ -21,11 +19,6 @@ export interface StorageDriverProvider { } export class DriverFactory { - static clearRegistrationsForTesting() { - this.providers = new Set(); - StorageKeyParser.reset(); - CapabilitiesResolver.reset(); - } static providers: Set = new Set(); static async driverInstance(storageKey: StorageKey, exists: Exists) { for (const provider of this.providers) { diff --git a/src/runtime/storage/drivers/firebase.ts b/src/runtime/storage/drivers/firebase.ts index 96943d774d2..41acba8b022 100644 --- a/src/runtime/storage/drivers/firebase.ts +++ b/src/runtime/storage/drivers/firebase.ts @@ -255,7 +255,7 @@ export class FirebaseStorageKeyFactory extends StorageKeyFactory { // If you want to test using the firebase driver you have three options. // (1) for (_slow_) manual testing, call FirebaseStorageDriverProvider.register() // somewhere at the beginning of your test; if you want to be hermetic, -// call DriverFactory.clearRegistrationsForTesting() at the end. +// call Runtime.resetDrivers() at the end. // (2) to use a mock firebase implementation and directly test the driver, // construct your driver using // FakeFirebaseStorageDriverProvider.newDriverForTesting(key, exists); @@ -263,4 +263,4 @@ export class FirebaseStorageKeyFactory extends StorageKeyFactory { // (3) you can also register the FakeFirebaseStorageDriverProvider with // the DriverFactory by calling FakeFirebaseStorageDriverProvider.register(); // again your storageKey databaseURLs must be test-url and don't forget -// to clean up with DriverFactory.clearRegistrationsForTesting(). +// to clean up with Runtime.resetDrivers(). diff --git a/src/runtime/storage/tests/active-store-test.ts b/src/runtime/storage/tests/active-store-test.ts index c28d6f990e9..163fb413474 100644 --- a/src/runtime/storage/tests/active-store-test.ts +++ b/src/runtime/storage/tests/active-store-test.ts @@ -21,6 +21,7 @@ import {noAwait} from '../../../utils/lib-utils.js'; import {StoreInfo} from '../store-info.js'; import {ActiveStore} from '../active-store.js'; import {DirectStorageEndpointManager} from '../direct-storage-endpoint-manager.js'; +import {Runtime} from '../../runtime.js'; let testKey: StorageKey; @@ -33,11 +34,11 @@ describe('Store', async () => { beforeEach(() => { testKey = new MockStorageKey(); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); after(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it(`will throw an exception if an appropriate driver can't be found`, async () => { diff --git a/src/runtime/storage/tests/direct-store-muxer-test.ts b/src/runtime/storage/tests/direct-store-muxer-test.ts index 62af3bc201d..5d96a34b002 100644 --- a/src/runtime/storage/tests/direct-store-muxer-test.ts +++ b/src/runtime/storage/tests/direct-store-muxer-test.ts @@ -21,7 +21,7 @@ import {DirectStore} from '../direct-store.js'; import {StoreInfo} from '../store-info.js'; import {StorageEndpointManager} from '../storage-manager.js'; import {DirectStorageEndpointManager} from '../direct-storage-endpoint-manager.js'; - +import {Runtime} from '../../runtime.js'; /* eslint-disable no-async-promise-executor */ @@ -33,7 +33,7 @@ let storageManager: StorageEndpointManager; describe('Direct Store Muxer', async () => { beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); DriverFactory.register(new MockStorageDriverProvider()); storageManager = new DirectStorageEndpointManager(); }); diff --git a/src/runtime/storage/tests/firebase-store-integration-test.ts b/src/runtime/storage/tests/firebase-store-integration-test.ts index fe9a00f4b8b..1daae0bbdbd 100644 --- a/src/runtime/storage/tests/firebase-store-integration-test.ts +++ b/src/runtime/storage/tests/firebase-store-integration-test.ts @@ -29,13 +29,13 @@ async function createStore(storageKey: StorageKey, exists: Exists): Promise { let runtime; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); runtime = new Runtime(); MockFirebaseStorageDriverProvider.register(runtime.getCacheService()); }); after(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('will store a sequence of model and operation updates as models', async () => { diff --git a/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts b/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts index 585cc9efb92..dae56181491 100644 --- a/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts +++ b/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts @@ -9,9 +9,8 @@ */ import {assert} from '../../../platform/chai-web.js'; -import {StorageMode, ProxyMessageType, ProxyMessage} from '../store-interface.js'; +import {ProxyMessageType, ProxyMessage} from '../store-interface.js'; import {RamDiskStorageKey, RamDiskStorageDriverProvider} from '../drivers/ramdisk.js'; -import {DriverFactory} from '../drivers/driver-factory.js'; import {Exists} from '../drivers/driver.js'; import {Runtime} from '../../runtime.js'; import {DirectStoreMuxer} from '../direct-store-muxer.js'; @@ -31,7 +30,7 @@ function assertHasModel(message: ProxyMessage, model: CRDTEntity< describe('RamDisk + Direct Store Muxer Integration', async () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('will allow storage of a number of objects', async () => { @@ -42,7 +41,7 @@ describe('RamDisk + Direct Store Muxer Integration', async () => { const simpleSchema = manifest.schemas.Simple; const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new RamDiskStorageKey('unique'); const type = new MuxType(new EntityType(simpleSchema)); const storeInfo = new StoreInfo>({ diff --git a/src/runtime/storage/tests/ramdisk-store-integration-test.ts b/src/runtime/storage/tests/ramdisk-store-integration-test.ts index 138fb0072ea..ef31c07ad1e 100644 --- a/src/runtime/storage/tests/ramdisk-store-integration-test.ts +++ b/src/runtime/storage/tests/ramdisk-store-integration-test.ts @@ -12,7 +12,6 @@ import {assert} from '../../../platform/chai-web.js'; import {ProxyMessageType} from '../store-interface.js'; import {CRDTCountTypeRecord, CRDTCount, CountOpTypes} from '../../../crdt/lib-crdt.js'; import {RamDiskStorageKey, RamDiskStorageDriverProvider} from '../drivers/ramdisk.js'; -import {DriverFactory} from '../drivers/driver-factory.js'; import {Exists} from '../drivers/driver.js'; import {Runtime} from '../../runtime.js'; import {CountType} from '../../../types/lib-types.js'; @@ -29,12 +28,12 @@ async function createStore(storageKey: StorageKey, exists: Exists): Promise { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('will store a sequence of model and operation updates as models', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new RamDiskStorageKey('unique'); const activeStore = await createStore(storageKey, Exists.ShouldCreate); @@ -56,7 +55,7 @@ describe('RamDisk + Store Integration', async () => { it('will store operation updates from multiple sources', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new RamDiskStorageKey('unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); const activeStore2 = await createStore(storageKey, Exists.ShouldExist); @@ -94,7 +93,7 @@ describe('RamDisk + Store Integration', async () => { it('will store operation updates from multiple sources with some timing delays', async () => { // store1.onProxyMessage, DELAY, DELAY, DELAY, store1.onProxyMessage, store2.onProxyMessage, DELAY, DELAY, DELAY, store2.onProxyMessage, DELAY, DELAY, DELAY, DELAY, DELAY const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new RamDiskStorageKey('unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); const activeStore2 = await createStore(storageKey, Exists.ShouldExist); diff --git a/src/runtime/storage/tests/reference-mode-store-integration-test.ts b/src/runtime/storage/tests/reference-mode-store-integration-test.ts index 4e7900b7edd..fd6ff96b5e2 100644 --- a/src/runtime/storage/tests/reference-mode-store-integration-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-integration-test.ts @@ -9,8 +9,7 @@ */ import {assert} from '../../../platform/chai-web.js'; -import {RamDiskStorageKey, RamDiskStorageDriverProvider} from '../drivers/ramdisk.js'; -import {DriverFactory} from '../drivers/driver-factory.js'; +import {RamDiskStorageKey} from '../drivers/ramdisk.js'; import {Runtime} from '../../runtime.js'; import {EntityType, Schema} from '../../../types/lib-types.js'; import {ReferenceModeStorageKey} from '../reference-mode-storage-key.js'; @@ -25,12 +24,12 @@ import {StoreInfo} from '../store-info.js'; describe('ReferenceModeStore Integration', async () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('will store and retrieve entities through referenceModeStores (separate stores)', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const type = new EntityType(new Schema(['AnEntity'], {foo: 'Text'})).collectionOf(); @@ -66,7 +65,7 @@ describe('ReferenceModeStore Integration', async () => { it('will store and retrieve entities through referenceModeStores (shared stores)', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); @@ -103,7 +102,7 @@ describe('ReferenceModeStore Integration', async () => { it('will store and retrieve entities through referenceModeStores (shared proxies)', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); @@ -141,7 +140,7 @@ describe('ReferenceModeStore Integration', async () => { it('will send an ordered list from one handle to another (separate store)', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const type = new EntityType(new Schema(['AnEntity'], { @@ -179,7 +178,7 @@ describe('ReferenceModeStore Integration', async () => { it('will send an ordered list from one handle to another (shared store)', async () => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); + //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); diff --git a/src/runtime/storage/tests/reference-mode-store-test.ts b/src/runtime/storage/tests/reference-mode-store-test.ts index a3f4d4568cb..51ea36393f0 100644 --- a/src/runtime/storage/tests/reference-mode-store-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-test.ts @@ -9,6 +9,7 @@ */ import {assert} from '../../../platform/chai-web.js'; +import {Runtime} from '../../runtime.js'; import {ProxyMessageType} from '../store-interface.js'; import {DriverFactory} from '../drivers/driver-factory.js'; import {Exists} from '../drivers/driver.js'; @@ -90,12 +91,12 @@ describe('Reference Mode Store', async () => { testKey = new ReferenceModeStorageKey(new MockHierarchicalStorageKey(), new MockHierarchicalStorageKey()); storeInfo = new StoreInfo({ storageKey: testKey, type: collectionType, exists: Exists.ShouldCreate, id: 'base-store-id'}); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); storageManager = new DirectStorageEndpointManager(); }); after(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it(`will throw an exception if an appropriate driver can't be found`, async () => { diff --git a/src/runtime/storage/tests/storage-key-test.ts b/src/runtime/storage/tests/storage-key-test.ts index ebc7bd3fe2b..a2fdff9b8a4 100644 --- a/src/runtime/storage/tests/storage-key-test.ts +++ b/src/runtime/storage/tests/storage-key-test.ts @@ -14,7 +14,6 @@ import {VolatileStorageKey} from '../drivers/volatile.js'; import {FirebaseStorageKey, FirebaseStorageDriverProvider} from '../drivers/firebase.js'; import {RamDiskStorageKey, RamDiskStorageDriverProvider} from '../drivers/ramdisk.js'; import {ReferenceModeStorageKey} from '../reference-mode-storage-key.js'; -import {DriverFactory} from '../drivers/driver-factory.js'; import {Runtime} from '../../runtime.js'; import {mockFirebaseStorageKeyOptions} from '../testing/mock-firebase.js'; @@ -22,12 +21,11 @@ describe('StorageKey', () => { beforeEach(() => { const runtime = new Runtime(); - RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); FirebaseStorageDriverProvider.register(runtime.getCacheService(), mockFirebaseStorageKeyOptions); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('can round-trip VolatileStorageKey', () => { diff --git a/src/runtime/storage/tests/store-sequence-test.ts b/src/runtime/storage/tests/store-sequence-test.ts index df7b60b6586..cca6cf3f354 100644 --- a/src/runtime/storage/tests/store-sequence-test.ts +++ b/src/runtime/storage/tests/store-sequence-test.ts @@ -62,7 +62,7 @@ describe('Store Sequence', async () => { it('services a model request and applies 2 models', async () => { const sequenceTest = new SequenceTest>(); sequenceTest.setTestConstructor(async () => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); DriverFactory.register(new MockStorageDriverProvider()); return createStore(testKey, Exists.ShouldCreate); @@ -127,7 +127,7 @@ describe('Store Sequence', async () => { const sequenceTest = new SequenceTest>(); sequenceTest.setTestConstructor(async () => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); DriverFactory.register(new MockStorageDriverProvider()); return createStore(testKey, Exists.ShouldCreate); @@ -178,7 +178,7 @@ describe('Store Sequence', async () => { sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); const arc = runtime.newArc('arc', null); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); VolatileStorageDriverProvider.register(arc); const storageKey = new VolatileStorageKey(arc.id, 'unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); @@ -228,7 +228,7 @@ describe('Store Sequence', async () => { const sequenceTest = new SequenceTest(); sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); MockFirebaseStorageDriverProvider.register(runtime.getCacheService()); const storageKey = new FirebaseStorageKey('test', 'test.domain', 'testKey', 'foo'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); @@ -278,7 +278,7 @@ describe('Store Sequence', async () => { sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); const arc = runtime.newArc('arc', id => new VolatileStorageKey(id, '')); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); VolatileStorageDriverProvider.register(arc); const storageKey = new VolatileStorageKey(arc.id, 'unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); diff --git a/src/runtime/tests/arc-test.ts b/src/runtime/tests/arc-test.ts index bfdfa86a8bd..14b240c9f20 100644 --- a/src/runtime/tests/arc-test.ts +++ b/src/runtime/tests/arc-test.ts @@ -65,17 +65,15 @@ async function setup(storageKeyPrefix: (arcId: ArcId) => StorageKey) { describe('Arc new storage', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('preserves data when round-tripping through serialization', async () => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); // TODO(shans): deserialization currently uses a RamDisk store to deserialize into because we don't differentiate // between parsing a manifest for public consumption (e.g. with RamDisk resources in it) and parsing a serialized // arc (with an @activeRecipe). We'll fix this by adding a 'private' keyword to store serializations which will // be used when serializing arcs. Once that is working then the following registration should be removed. - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); const loader = new Loader(null, { './manifest': ` schema Data @@ -100,7 +98,8 @@ describe('Arc new storage', () => { defineParticle(({Particle}) => class Noop extends Particle {}); ` }); - const manifest = await Manifest.load('./manifest', loader, {memoryProvider}); + const runtime = new Runtime({loader}); + const manifest = await runtime.parseFile('./manifest'); const dataClass = Entity.createEntityClass(manifest.findSchemaByName('Data'), null); const id = ArcId.fromString('test'); const storageKey = new VolatileStorageKey(id, 'unique'); @@ -161,16 +160,14 @@ describe('Arc new storage', () => { }); it('supports capabilities - storage protocol', Flags.withDefaultReferenceMode(async () => { - DriverFactory.clearRegistrationsForTesting(); const loader = new Loader(null, { '*': ` defineParticle(({Particle}) => { return class extends Particle {} }); `}); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const manifest = await Manifest.parse(` + const runtime = new Runtime({loader}); + const manifestText = ` schema Thing particle MyParticle in 'MyParticle.js' thing: writes Thing @@ -178,10 +175,11 @@ describe('Arc new storage', () => { handle0: create @tiedToArc MyParticle thing: handle0 - `, {loader, memoryProvider, fileName: process.cwd() + '/input.manifest'}); + `; + const manifest = await runtime.parse(manifestText, {fileName: process.cwd() + '/input.manifest'}); + runtime.context = manifest; const recipe = manifest.recipes[0]; assert.isTrue(recipe.normalize() && recipe.isResolved()); - const runtime = new Runtime({loader, context: manifest, memoryProvider}); const arc = runtime.newArc('test', ramDiskStorageKeyPrefixForTest()); await arc.instantiate(recipe); await arc.idle; @@ -201,7 +199,7 @@ const doSetup = async () => setup(arcId => new VolatileStorageKey(arcId, '')); describe('Arc', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('idle can safely be called multiple times ', async () => { @@ -296,13 +294,17 @@ describe('Arc', () => { assert.isNull(await dHandle.fetch()); }); - it('instantiates recipes only if fate is correct', async () => { + it('FOOBL instantiates recipes only if fate is correct', async () => { + const loader = new Loader(null, { + './a.js': ` + defineParticle(({Particle}) => class Noop extends Particle {}); + ` + }); + const runtime = new Runtime({loader}); + const data = '{"root": {"values": {}, "version": {}}, "locations": {}}'; const type = '![Thing]'; - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` schema Thing particle A in 'a.js' thing: reads Thing @@ -327,24 +329,19 @@ describe('Arc', () => { ${data} store ThingStore of ${type} 'storeInContext' in MyThing - `, {memoryProvider}); + `); assert.isTrue(manifest.recipes.every(r => r.normalize())); assert.isTrue(manifest.recipes[0].isResolved()); assert.isTrue(manifest.recipes[1].isResolved()); - const loader = new Loader(null, { - 'a.js': ` - defineParticle(({Particle}) => class Noop extends Particle {}); - ` - }); - const runtime = new Runtime({loader, context: manifest, memoryProvider}); - // Successfully instantiates a recipe with 'copy' handle for store in a context. + runtime.context = manifest; await runtime.newArc('test0').instantiate(manifest.recipes[0]); // Fails instantiating a recipe with 'use' handle for store in a context. try { - await runtime.newArc('test1').instantiate(manifest.recipes[1]); + const arc1 = runtime.newArc('test1'); + await arc1.instantiate(manifest.recipes[1]); assert.fail(); } catch (e) { assert.isTrue(e.toString().includes('store \'storeInContext\'')); // with "use" fate was not found')); @@ -463,8 +460,8 @@ describe('Arc', () => { it('required provided handles cannot resolve without parent', async () => { await assertThrowsAsync(async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const context = await runtime.parse(` schema Thing value: Text @@ -484,20 +481,22 @@ describe('Arc', () => { a: reads thingA b: writes thingB d: writes maybeThingD - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const loader = runtime.loader; + const slotComposer = new SlotComposer(); + const arc = new Arc({loader, context, id, storageKey, storageManager, slotComposer}); - const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); + const thingClass = Entity.createEntityClass(context.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); const bStore = await arc.createStore(new SingletonType(thingClass.type), 'bStore', 'test:2'); const cStore = await arc.createStore(new SingletonType(thingClass.type), 'cStore', 'test:3'); const dStore = await arc.createStore(new SingletonType(thingClass.type), 'dStore', 'test:4'); - const recipe = manifest.recipes[0]; + const recipe = context.recipes[0]; recipe.handles[0].mapToStorage(aStore); recipe.handles[1].mapToStorage(bStore); recipe.handles[2].mapToStorage(cStore); // These might not be needed? @@ -822,21 +821,22 @@ describe('Arc', () => { const storageKey1 = new VolatileStorageKey(id1, ''); const storageKey2 = new VolatileStorageKey(id2, ''); - DriverFactory.clearRegistrationsForTesting(); - assert.isEmpty(DriverFactory.providers); + Runtime.resetDrivers(); + // runtime creates a default RamDisk with SimpleVolatileMemoryProvider + assert.equal(DriverFactory.providers.size, 1); const storageManager = new DirectStorageEndpointManager(); const arc1 = new Arc({id: id1, storageKey: storageKey1, loader: new Loader(), context: new Manifest({id: id1}), storageManager}); - assert.strictEqual(DriverFactory.providers.size, 1); + assert.strictEqual(DriverFactory.providers.size, 2); const arc2 = new Arc({id: id2, storageKey: storageKey2, loader: new Loader(), context: new Manifest({id: id2}), storageManager}); - assert.strictEqual(DriverFactory.providers.size, 2); + assert.strictEqual(DriverFactory.providers.size, 3); arc1.dispose(); - assert.strictEqual(DriverFactory.providers.size, 1); + assert.strictEqual(DriverFactory.providers.size, 2); arc2.dispose(); - assert.isEmpty(DriverFactory.providers); + assert.equal(DriverFactory.providers.size, 1); }); it('preserves create handle ids if specified', Flags.withDefaultReferenceMode(async () => { @@ -878,7 +878,6 @@ describe('Arc', () => { describe('Arc storage migration', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); }); it('supports new StorageKey type', Flags.withDefaultReferenceMode(async () => { diff --git a/src/runtime/tests/artifacts/Products/Interests.recipes b/src/runtime/tests/artifacts/Products/Interests.recipes index 6b058fe9e13..cb99de50914 100644 --- a/src/runtime/tests/artifacts/Products/Interests.recipes +++ b/src/runtime/tests/artifacts/Products/Interests.recipes @@ -11,7 +11,7 @@ import 'Product.schema' particle Interests in 'source/Interests.js' list: reads [Product] - person: reads Person + person: reads Person {} postamble: consumes? Slot description `find out about ${person.name}'s interests` diff --git a/src/runtime/tests/artifacts/Products/Manufacturer.recipes b/src/runtime/tests/artifacts/Products/Manufacturer.recipes index b53d61ee40f..c1b3da74ec7 100644 --- a/src/runtime/tests/artifacts/Products/Manufacturer.recipes +++ b/src/runtime/tests/artifacts/Products/Manufacturer.recipes @@ -11,7 +11,7 @@ import '../Common/Multiplexer.manifest' import 'Product.schema' particle ManufacturerInfo in 'source/ManufacturerInfo.js' - product: reads Product + product: reads Product {} annotation: consumes Slot description `check manufacturer information` diff --git a/src/runtime/tests/artifacts/Products/ShowProducts.recipes b/src/runtime/tests/artifacts/Products/ShowProducts.recipes index aed95018916..905da462917 100644 --- a/src/runtime/tests/artifacts/Products/ShowProducts.recipes +++ b/src/runtime/tests/artifacts/Products/ShowProducts.recipes @@ -10,7 +10,7 @@ import '../Common/List.manifest' import 'Product.schema' particle ShowProduct in 'source/ShowProduct.js' - product: reads Product + product: reads Product item: consumes Slot recipe MuxedProductItem &muxProduct diff --git a/src/runtime/tests/capabilities-resolver-test.ts b/src/runtime/tests/capabilities-resolver-test.ts index f9f3a29981e..917e308bce0 100644 --- a/src/runtime/tests/capabilities-resolver-test.ts +++ b/src/runtime/tests/capabilities-resolver-test.ts @@ -19,16 +19,15 @@ import {CapabilitiesResolver} from '../capabilities-resolver.js'; import {ArcId} from '../id.js'; import {Capabilities, Persistence, Ttl, Shareable, DeletePropagation} from '../capabilities.js'; import {assertThrowsAsync} from '../../testing/test-util.js'; -import {DriverFactory} from '../storage/drivers/driver-factory.js'; +import {Runtime} from '../runtime.js'; import {Manifest} from '../manifest.js'; import {TestVolatileMemoryProvider} from '../testing/test-volatile-memory-provider.js'; describe('Capabilities Resolver New', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); - type StorageKeyType = typeof VolatileStorageKey|typeof RamDiskStorageKey|typeof DatabaseStorageKey; function verifyReferenceModeStorageKey(key: StorageKey, expectedType: StorageKeyType) { assert.isTrue(key instanceof ReferenceModeStorageKey); @@ -52,94 +51,65 @@ describe('Capabilities Resolver New', () => { it('fails creating keys with no factories', Flags.withDefaultReferenceMode(async () => { const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); // Verify storage keys for none of the capabilities cannot be created. - await assertThrowsAsync(async () => resolver.createStorageKey( - unspecified, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - inMemory, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - inMemoryWithTtls, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDisk, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDiskWithTtl, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(unspecified, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(inMemory, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(inMemoryWithTtls, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(onDisk, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(onDiskWithTtl, entityType, handleId)); })); it('creates volatile keys', Flags.withDefaultReferenceMode(async () => { // Register volatile storage key factory. // Verify only volatile (in-memory, no ttl) storage key can be created. - VolatileStorageKey.register(); + //VolatileStorageKey.register(); const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - unspecified, entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemory, entityType, handleId), VolatileStorageKey); - await assertThrowsAsync(async () => resolver.createStorageKey( - inMemoryWithTtls, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDisk, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDiskWithTtl, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - inMemoryWithDeleteProp, entityType, handleId)); + const createKey = resolver.createStorageKey.bind(resolver); + verifyReferenceModeStorageKey(await createKey(unspecified, entityType, handleId), VolatileStorageKey); + verifyReferenceModeStorageKey(await createKey(inMemory, entityType, handleId), VolatileStorageKey); + await assertThrowsAsync(async () => createKey(inMemoryWithTtls, entityType, handleId)); + await assertThrowsAsync(async () => createKey(onDisk, entityType, handleId)); + await assertThrowsAsync(async () => createKey(onDiskWithTtl, entityType, handleId)); + await assertThrowsAsync(async () => createKey(inMemoryWithDeleteProp, entityType, handleId)); })); it('creates keys with db only factories', Flags.withDefaultReferenceMode(async () => { + Runtime.resetDrivers(true); DatabaseStorageKey.register(); const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - unspecified, entityType, handleId), MemoryDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemory, entityType, handleId), MemoryDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemoryWithDeleteProp, entityType, handleId), MemoryDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - onDisk, entityType, handleId), PersistentDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - onDiskWithTtl, entityType, handleId), PersistentDatabaseStorageKey); + const createKey = resolver.createStorageKey.bind(resolver); + verifyReferenceModeStorageKey(await createKey(unspecified, entityType, handleId), MemoryDatabaseStorageKey); + verifyReferenceModeStorageKey(await createKey(inMemory, entityType, handleId), MemoryDatabaseStorageKey); + verifyReferenceModeStorageKey(await createKey(inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); + verifyReferenceModeStorageKey(await createKey(inMemoryWithDeleteProp, entityType, handleId), MemoryDatabaseStorageKey); + verifyReferenceModeStorageKey(await createKey(onDisk, entityType, handleId), PersistentDatabaseStorageKey); + verifyReferenceModeStorageKey(await createKey(onDiskWithTtl, entityType, handleId), PersistentDatabaseStorageKey); })); it('creates keys with volatile and db factories', Flags.withDefaultReferenceMode(async () => { // Register database storage key factories. Verify all storage keys created as expected. - VolatileStorageKey.register(); - RamDiskStorageDriverProvider.register(new TestVolatileMemoryProvider()); + Runtime.resetDrivers(); DatabaseStorageKey.register(); const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - unspecified, entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - Capabilities.create([new Shareable(false)]), entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - Capabilities.create([new Shareable(true)]), entityType, handleId), RamDiskStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemory, entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - onDisk, entityType, handleId), PersistentDatabaseStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - onDiskWithTtl, entityType, handleId), PersistentDatabaseStorageKey); + const verify = async (a, b, c, d) => verifyReferenceModeStorageKey(await resolver.createStorageKey(a, b, c), d); + await verify(unspecified, entityType, handleId, VolatileStorageKey); + await verify(Capabilities.create([new Shareable(false)]), entityType, handleId, VolatileStorageKey); + await verify(Capabilities.create([new Shareable(true)]), entityType, handleId, RamDiskStorageKey); + await verify(inMemory, entityType, handleId, VolatileStorageKey); + await verify(inMemoryWithTtls, entityType, handleId, MemoryDatabaseStorageKey); + await verify(onDisk, entityType, handleId, PersistentDatabaseStorageKey); + await verify(onDiskWithTtl, entityType, handleId, PersistentDatabaseStorageKey); })); it('creates keys with custom factory', Flags.withDefaultReferenceMode(async () => { - // Register volatile factory, pass in-memory database in constructor. - VolatileStorageKey.register(); const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test'), factories: [new MemoryDatabaseStorageKeyFactory()]}); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - unspecified, entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemory, entityType, handleId), VolatileStorageKey); - verifyReferenceModeStorageKey(await resolver.createStorageKey( - inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDisk, entityType, handleId)); - await assertThrowsAsync(async () => resolver.createStorageKey( - onDiskWithTtl, entityType, handleId)); + verifyReferenceModeStorageKey(await resolver.createStorageKey(unspecified, entityType, handleId), VolatileStorageKey); + verifyReferenceModeStorageKey(await resolver.createStorageKey(inMemory, entityType, handleId), VolatileStorageKey); + verifyReferenceModeStorageKey(await resolver.createStorageKey(inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); + await assertThrowsAsync(async () => resolver.createStorageKey(onDisk, entityType, handleId)); + await assertThrowsAsync(async () => resolver.createStorageKey(onDiskWithTtl, entityType, handleId)); })); it('creates keys for recipe with volatile and db factories', Flags.withDefaultReferenceMode(async () => { - VolatileStorageKey.register(); DatabaseStorageKey.register(); const manifestStr = ` recipe diff --git a/src/runtime/tests/manifest-test.ts b/src/runtime/tests/manifest-test.ts index e1f421f086b..965d3a4134a 100644 --- a/src/runtime/tests/manifest-test.ts +++ b/src/runtime/tests/manifest-test.ts @@ -47,25 +47,20 @@ function verifyPrimitiveType(field, type) { describe('manifest', async () => { - let memoryProvider; + let runtime; let storageManager; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); - memoryProvider = new TestVolatileMemoryProvider(); + Runtime.resetDrivers(); + runtime = new Runtime(); storageManager = new DirectStorageEndpointManager(); - RamDiskStorageDriverProvider.register(memoryProvider); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); - const parseManifest = async (content: string, options: ManifestParseOptions = {memoryProvider}): Promise => { - return Manifest.parse(content, options); - }; - it('can parse a manifest containing a recipe', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S t: Text @@ -113,7 +108,7 @@ describe('manifest', async () => { const type = manifest.recipes[0].handleConnections[0].type; assert.strictEqual('one-s', type.toPrettyString()); assert.strictEqual('many-ses', type.collectionOf().toPrettyString()); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can parse a manifest containing a particle specification', async () => { const schemaStr = ` @@ -121,7 +116,7 @@ schema Product schema Person `; const particleStr0 = -`particle TestParticle in 'testParticle.js' +`particle TestParticle in './testParticle.js' list: reads [Product {}] person: writes Person {} modality dom @@ -137,9 +132,9 @@ schema Person list \`my special list\``; const particleStr1 = -`particle NoArgsParticle in 'noArgsParticle.js' +`particle NoArgsParticle in './noArgsParticle.js' modality dom`; - const manifest = await parseManifest(` + const manifest = await runtime.parse(` ${schemaStr} ${particleStr0} ${particleStr1} @@ -150,7 +145,7 @@ ${particleStr1} assert.strictEqual(particleStr1, manifest.particles[1].toString()); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('SLANDLES can parse a manifest containing a particle specification', async () => { const schemaStr = ` @@ -158,7 +153,7 @@ schema Product schema Person `; const particleStr0 = -`particle TestParticle in 'testParticle.js' +`particle TestParticle in './testParticle.js' list: reads [Product {}] person: writes Person {} root: \`consumes Slot {formFactor:big} #master #main @@ -174,9 +169,9 @@ schema Person list \`my special list\``; const particleStr1 = -`particle NoArgsParticle in 'noArgsParticle.js' +`particle NoArgsParticle in './noArgsParticle.js' modality dom`; - const manifest = await parseManifest(` + const manifest = await runtime.parse(` ${schemaStr} ${particleStr0} ${particleStr1} @@ -187,11 +182,11 @@ ${particleStr1} assert.strictEqual(particleStr1, manifest.particles[1].toString()); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can parse a manifest containing a particle with an argument list', async () => { - const manifest = await parseManifest(` - particle TestParticle in 'a.js' + const manifest = await runtime.parse(` + particle TestParticle in './a.js' list: reads [Product {}] person: writes Person {} thing: consumes @@ -201,8 +196,8 @@ ${particleStr1} assert.lengthOf(manifest.particles[0].handleConnections, 2); }); it('SLANDLES can parse a manifest containing a particle with an argument list', async () => { - const manifest = await parseManifest(` - particle TestParticle in 'a.js' + const manifest = await runtime.parse(` + particle TestParticle in './a.js' list: reads [Product {}] person: writes Person {} thing: \`consumes Slot @@ -212,8 +207,8 @@ ${particleStr1} assert.lengthOf(manifest.particles[0].handleConnections, 4); }); it('can parse a manifest with dependent handles', async () => { - const manifest = await parseManifest(` - particle TestParticle in 'a.js' + const manifest = await runtime.parse(` + particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] thing: consumes @@ -223,8 +218,8 @@ ${particleStr1} assert.lengthOf(manifest.particles[0].handleConnections, 2); }); it('SLANDLES can parse a manifest with dependent handles', async () => { - const manifest = await parseManifest(` - particle TestParticle in 'a.js' + const manifest = await runtime.parse(` + particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] thing: \`consumes Slot @@ -234,58 +229,58 @@ ${particleStr1} assert.lengthOf(manifest.particles[0].handleConnections, 4); }); it('can round-trip particles with dependent handles', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] modality dom thing: consumes Slot otherThing: provides? Slot`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); assert.strictEqual(manifestString, manifest.particles[0].toString()); }); it('SLANDLES can round-trip particles with dependent handles', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] thing: \`consumes? Slot otherThing: \`provides? Slot modality dom`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); assert.strictEqual(manifestString, manifest.particles[0].toString()); }); it('can parse a manifest containing a schema', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Bar value: Text`); const verify = (manifest: Manifest) => verifyPrimitiveType(manifest.schemas.Bar.fields.value, 'Text'); verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can parse a manifest containing an extended schema', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo value: Text schema Bar extends Foo`); const verify = (manifest: Manifest) => verifyPrimitiveType(manifest.schemas.Bar.fields.value, 'Text'); verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can parse a manifest containing an inline schema', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo value: Text particle Fooer foo: reads Foo {value}`); const verify = (manifest: Manifest) => verifyPrimitiveType(manifest.schemas.Foo.fields.value, 'Text'); verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can parse a manifest containing an inline schema with line breaks and a trailing comma', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Fooer foo: reads Foo { // Comments can go here @@ -300,7 +295,7 @@ ${particleStr1} verifyPrimitiveType(connectionEntity.fields.other, 'Number'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('two manifests with stores with the same filename, store name and data have the same ids', async () => { const manifestText = ` @@ -315,10 +310,8 @@ ${particleStr1} "locations": {} } `; - const manifestA = await parseManifest(manifestText, {fileName: 'the.manifest', memoryProvider}); - - const manifestB = await parseManifest(manifestText, {fileName: 'the.manifest', memoryProvider}); - + const manifestA = await runtime.parse(manifestText, {fileName: 'test.arcs'}); + const manifestB = await runtime.parse(manifestText, {fileName: 'test.arcs'}); assert.strictEqual(manifestA.stores[0].id.toString(), manifestB.stores[0].id.toString()); }); it('two manifests with stores with the same filename and store name but different data have different ids', async () => { @@ -334,14 +327,12 @@ ${particleStr1} "locations": {} } `; - const manifestA = await parseManifest(manifestText(12345), {fileName: 'the.manifest', memoryProvider}); - - const manifestB = await parseManifest(manifestText(67890), {fileName: 'the.manifest', memoryProvider}); - + const manifestA = await runtime.parse(manifestText(12345)); + const manifestB = await runtime.parse(manifestText(67890)); assert.notStrictEqual(manifestA.stores[0].id.toString(), manifestB.stores[0].id.toString()); }); it('can parse a recipe with a synthetic join handle', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` recipe people: map #folks other: map #products @@ -362,11 +353,11 @@ ${particleStr1} assert.include(pairs.joinedHandles, places); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('fails to parse a recipe with an invalid synthetic join handle', async () => { try { - await parseManifest(` + await runtime.parse(` recipe people: map #folks things: map #products @@ -377,7 +368,7 @@ ${particleStr1} } }); it('can resolve a recipe with a synthetic join handle', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle JoinReader data: reads [( Person {name: Text}, @@ -403,7 +394,7 @@ ${particleStr1} assert.isTrue(recipe.isResolved()); }); it('can resolve a recipe with a synthetic join handle and multiple readers', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle PeopleReader people: reads [Person {age: Number, name: Text}] particle PlaceReader @@ -444,7 +435,7 @@ ${particleStr1} assert.isTrue(recipe.isResolved()); }); it('does not allow writing to a synthetic join handle', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle JoinReader data: writes [( Person {name: Text}, @@ -463,7 +454,7 @@ ${particleStr1} assert.sameMembers(errors, [`Invalid fate 'join' for handle 'pairs: join (people, places)'; it is used for 'writes' JoinReader::data connection`]); }); it('supports recipes with constraints', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S particle A a: reads S @@ -483,10 +474,10 @@ ${particleStr1} assert.strictEqual(constraint.to.connection, 'b'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('supports recipes with constraints that reference handles', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A a: writes S {} @@ -503,10 +494,10 @@ ${particleStr1} assert.strictEqual(constraint.to.handle.localName, 'localThing'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('supports recipes with local names', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S particle P1 x: writes S @@ -521,11 +512,11 @@ ${particleStr1} x: writes thingHandle P2 x: writes thingHandle`); - const deserializedManifest = (await parseManifest(manifest.toString())); + const deserializedManifest = (await runtime.parse(manifest.toString())); }); // TODO: move these tests to new-recipe tests. it('can normalize simple recipes', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S particle P1 x: writes S @@ -550,10 +541,10 @@ ${particleStr1} assert.deepEqual(recipe1.toString(), recipe2.toString()); assert.strictEqual(await recipe1.digest(), await recipe2.digest()); - const deserializedManifest = await parseManifest(manifest.toString()); + const deserializedManifest = await runtime.parse(manifest.toString()); }); it('can normalize recipes with interdependent ordering of handles and particles', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S particle P1 x: writes S @@ -579,33 +570,33 @@ ${particleStr1} assert.deepEqual(recipe1.toString(), recipe2.toString()); }); it('can resolve recipe particles defined in the same manifest', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Something schema Someother - particle Thing in 'thing.js' + particle Thing in './thing.js' someThings: reads [Something] someOthers: writes [Someother] recipe Thing`); const verify = (manifest: Manifest) => assert(manifest.recipes[0].particles[0].spec); verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('treats a failed import as non-fatal', async () => { // TODO(cypher1): Review this. const loader = new Loader(null, { './a': `import 'b'`, './b': `lol what is this`, }); - const cc = await ConCap.capture(() => Manifest.load('./a', loader, {memoryProvider})); + const cc = await ConCap.capture(() => runtime.parseFile('./a', {loader})); assert.lengthOf(cc.warn, 2); assert.match(cc.warn[0][0], /Parse error in '\.\/b' line 1/); assert.match(cc.warn[1][0], /Error importing '\.\/b'/); }); it('throws an error when a particle has invalid description', async () => { try { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo - particle Thing in 'thing.js' + particle Thing in './thing.js' foo: reads Foo description \`Does thing\` bar \`my-bar\``); @@ -616,20 +607,20 @@ ${particleStr1} }); it('can load a manifest via a loader', async () => { const registry: Dictionary> = {}; - const loader = new Loader(null, { - '*': 'recipe'}); - const manifest = await Manifest.load('some-path', loader, {registry, memoryProvider}); + '*': 'recipe' + }); + const manifest = await runtime.parseFile('some-path', {loader, registry}); assert(manifest.recipes[0]); assert.strictEqual(manifest, await registry['some-path']); }); it('can load a manifest with imports', async () => { const registry: Dictionary> = {}; const loader = new Loader(null, { - './a': `import 'b'`, + './a': `import './b'`, './b': `recipe`, }); - const manifest = await Manifest.load('./a', loader, {registry, memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader, registry}); assert.strictEqual(await registry['./a'], manifest); assert.strictEqual(manifest.imports[0], await registry['./b']); }); @@ -642,10 +633,10 @@ ${particleStr1} ParticleB`, './b': ` schema Thing - particle ParticleB in 'b.js' + particle ParticleB in './b.js' thing: reads Thing` }); - const manifest = await Manifest.load('./a', loader, {registry, memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader, registry}); const particleLit = (await registry['./b']).findParticleByName('ParticleB').toLiteral(); deleteFieldRecursively(particleLit, 'location'); assert.deepEqual( @@ -663,7 +654,7 @@ ${particleStr1} value: Text` }); const registry = {}; - const manifest = await Manifest.load('./a', loader, {registry, memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader, registry}); verifyPrimitiveType(manifest.schemas.Bar.fields.value, 'Text'); }); it('can find all imported recipes', async () => { @@ -677,11 +668,11 @@ ${particleStr1} recipe`, './c': `recipe`, }); - const manifest = await Manifest.load('./a', loader, {memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader}); assert.lengthOf(manifest.allRecipes, 3); }); it('can parse a schema with union typing', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo u: (Text or URL) test: Text @@ -697,14 +688,14 @@ ${particleStr1} verifyPrimitiveType(opt.t.getFieldTypes()[2], 'Boolean'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('generates helpful type errors in recipes', async () => { - const manifest = await parseManifest(` - particle P in 'a.js' + const manifest = await runtime.parse(` + particle P in './a.js' outV: writes * {name: Text, age: Number} - particle Q in 'a.js' + particle Q in './a.js' inV: reads * {first_name: Text, age: Number} recipe @@ -721,10 +712,10 @@ ${particleStr1} assert.sameMembers(errors, [`Type validations failed for handle 'val: create': could not guarantee variable ~ meets read requirements * {first_name: Text, age: Number} with write guarantees * {name: Text, age: Number}`], 'expected type error'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('parses meta namespace section', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` meta name: 'Awesome Arc' namespace: com.some.namespace @@ -734,11 +725,11 @@ ${particleStr1} assert.equal(manifest.meta.namespace, 'com.some.namespace'); }); it('parses and type checks a recipe with nested schemas', async () => { - const manifest = await parseManifest(` - particle P in 'a.js' + const manifest = await runtime.parse(` + particle P in './a.js' writer: writes * {inner: inline {x: Text, y: List}, z: Text} - particle Q in 'b.js' + particle Q in './b.js' reader: reads * {inner: inline {y: List}, z: Text} recipe @@ -753,7 +744,7 @@ ${particleStr1} }); describe('refinement types', async () => { it('can construct manifest containing schema with refinement types', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo num: Number [num < 5]`); const verify = (manifest: Manifest) => { @@ -766,17 +757,17 @@ ${particleStr1} assert.strictEqual(binaryExpression.operator.op, '<'); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); })); it('can construct manifest with particles using already defined schema (with refinements)', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Person name: Text id: Text age: Number [age > 0] schema Ordered index: Number [index >= 0] - particle OrderPeople in 'OrderPeople.js' + particle OrderPeople in './OrderPeople.js' orderedPeople: writes [Ordered Person {name, id, index}]`); const verify = (manifest: Manifest) => { const entity = manifest.particles[0].handleConnectionMap.get('orderedPeople').type['collectionType']; @@ -797,7 +788,7 @@ ${particleStr1} verify(manifest); })); it('can construct manifest with particles using already defined schema (with refinements and double quotes)', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Person name: Text id: Text @@ -825,7 +816,7 @@ ${particleStr1} verify(manifest); })); it('can construct manifest containing a particle with refinement types', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Foo input: reads Something {value: Number [value > 0], price: Number [price > 0]} [value > 10 and price < 2]`); const verify = (manifest: Manifest) => { @@ -847,7 +838,7 @@ ${particleStr1} assert.strictEqual(ref.expression.rightExpr.rightExpr.value, 2); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); })); describe('refinement type checking', async () => { @@ -864,7 +855,7 @@ ${particleStr1} it('checks refinement expressions with out of scope variables', Flags.withFieldRefinementsAllowed(async () => { assertThrowsAsync(async () => { - await parseManifest(` + await runtime.parse(` particle Writer output: writes Something {num: Number [ foo > 5 ] } particle Reader @@ -879,7 +870,7 @@ ${particleStr1} })); it('checks refinement expressions with out of scope field names', Flags.withFieldRefinementsAllowed(async () => { assertThrowsAsync(async () => { - await parseManifest(` + await runtime.parse(` particle Writer output: writes Something {num: Number [ other_num > 5 ], other_num: Number } particle Reader @@ -893,7 +884,7 @@ ${particleStr1} }, 'Unresolved field name \'other_num\' in the refinement expression.'); })); it('checks refinement expressions', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Writer output: writes Something {num: Number [ num > 5 ] } particle Reader @@ -907,7 +898,7 @@ ${particleStr1} verify(manifest, true, []); })); it('checks for unsafe refinement expressions', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle BadWriter output: writes Something {num: Number [ num > 3 ] } particle Reader @@ -922,7 +913,7 @@ ${particleStr1} verify(manifest, false, [refinementError]); })); it('ignores impossible refinement expressions', Flags.withFlags({fieldRefinementsAllowed: true, warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number [ (num < 3) and (num > 3) ] } particle Reader @@ -938,7 +929,7 @@ ${particleStr1} })); it('ignores dynamic query refinement expressions on fields', Flags.withFlags({fieldRefinementsAllowed: true, warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number [ (num > 3) ] } particle Reader @@ -957,7 +948,7 @@ ${particleStr1} })); it('ignores dynamic query refinement expressions on fields (and warns when wall turned on', Flags.withFlags({fieldRefinementsAllowed: true, warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number [ (num > 3) ] } particle Reader @@ -976,7 +967,7 @@ ${particleStr1} })); it('ignores dynamic query refinement expressions', Flags.withFlags({warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number} [ (num > 3) ] particle Reader @@ -994,7 +985,7 @@ ${particleStr1} })); it('applies refinements', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number [ (num > 3) ] } particle Reader @@ -1008,7 +999,7 @@ ${particleStr1} verify(manifest, false, ['Type validations failed for handle \'data: create\': could not guarantee variable ~ meets read requirements Something {num: Number[(num > 5)]} with write guarantees Something {num: Number[(num > 3)]}']); })); it('ignores dynamic query refinement expressions and-ed with refinements', Flags.withFlags({fieldRefinementsAllowed: true, warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number [ (num > 5) ] } particle Reader @@ -1026,7 +1017,7 @@ ${particleStr1} assert.match(cc.warn[1], /Unable to ascertain if .* is at least as specific as .*/); })); it('ignores refinements or-ed with dynamic query refinement expressions', Flags.withFlags({fieldRefinementsAllowed: true, warnOnUnsafeRefinement: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number } particle Reader @@ -1045,7 +1036,7 @@ ${particleStr1} assert.lengthOf(cc.log, 0); })); it('catches unsafe schema level refinements', Flags.withFieldRefinementsAllowed(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number } [num > 0] particle Reader @@ -1059,7 +1050,7 @@ ${particleStr1} verify(manifest, false, ['Type validations failed for handle \'data: create\': could not guarantee variable ~ meets read requirements Something {num: Number[(num > 3)]} with write guarantees Something {num: Number[(num > 0)]}']); })); it('allows safe schema level refinements', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Impossible output: writes Something {num: Number } [num > 5] particle Reader @@ -1077,7 +1068,7 @@ ${particleStr1} describe('relaxed reads and writes', async () => { it('can parse a manifest containing relaxed reads', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing schema NotAThing @@ -1138,15 +1129,15 @@ recipe SomeRecipe foo: writes myFoo PC as particle2 foo: reads someof myFoo`; - const manifest = await parseManifest(manifestStr); + const manifest = await runtime.parse(manifestStr); assert.strictEqual(manifest.toString(), manifestStr, 'round trip failed'); }); }); it('can parse a manifest containing a recipe with slots', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing - particle SomeParticle in 'some-particle.js' + particle SomeParticle in './some-particle.js' someParam: reads Thing mySlot: consumes Slot {formFactor: big} otherSlot: provides Slot {handle: someParam} @@ -1188,12 +1179,12 @@ recipe SomeRecipe assert.strictEqual(mySlot.providedSlots['oneMoreSlot'], recipe.particles[0].getSlotConnectionByName('oneMoreSlot').targetSlot); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('SLANDLES can parse a manifest containing a recipe with slots', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing - particle SomeParticle in 'some-particle.js' + particle SomeParticle in './some-particle.js' someParam: reads Thing mySlot: \`consumes Slot {formFactor: big} otherSlot: \`provides Slot {handle: someParam} @@ -1230,13 +1221,13 @@ recipe SomeRecipe assert.strictEqual(mySlot.connections[0], recipe.particles[0].connections['mySlot']); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('unnamed consume slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: consumes - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: consumes recipe @@ -1250,10 +1241,10 @@ recipe SomeRecipe assert.isEmpty(recipe.slots); }); it('unnamed consume set slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: consumes [Slot] - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: consumes [Slot] recipe @@ -1267,10 +1258,10 @@ recipe SomeRecipe assert.isEmpty(recipe.slots); }); it('unnamed consume set slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: consumes [Slot] - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: consumes [Slot] recipe @@ -1284,10 +1275,10 @@ recipe SomeRecipe assert.isEmpty(recipe.slots); }); it('SLANDLES unnamed consume slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: \`consumes Slot - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: \`consumes Slot recipe @@ -1301,10 +1292,10 @@ recipe SomeRecipe assert.isEmpty(recipe.handles); }); it('SLANDLES unnamed consume set slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: \`consumes [Slot] - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: \`consumes [Slot] recipe @@ -1318,10 +1309,10 @@ recipe SomeRecipe assert.isEmpty(recipe.handles); }); it('SLANDLES unnamed consume set slots', async () => { - const manifest = await parseManifest(` - particle SomeParticle &work in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle &work in './some-particle.js' slotA: \`consumes [Slot] - particle SomeParticle1 &rest in 'some-particle.js' + particle SomeParticle1 &rest in './some-particle.js' slotC: \`consumes [Slot] recipe @@ -1336,8 +1327,8 @@ recipe SomeRecipe }); it('resolves in context with multiple consumed slots', async () => { const parseRecipe = async (arg: {label: string, isRequiredSlotA: boolean, isRequiredSlotB: boolean, expectedIsResolved: boolean}) => { - const recipe = (await parseManifest(` - particle SomeParticle in 'some-particle.js' + const recipe = (await runtime.parse(` + particle SomeParticle in './some-particle.js' slotA: consumes${arg.isRequiredSlotA ? '' : '?'} Slot slotB: consumes${arg.isRequiredSlotB ? '' : '?'} Slot @@ -1357,8 +1348,8 @@ recipe SomeRecipe }); it('SLANDLES resolves in context with multiple consumed slots', async () => { const parseRecipe = async (arg: {label: string, isRequiredSlotA: boolean, isRequiredSlotB: boolean, expectedIsResolved: boolean}) => { - const recipe = (await parseManifest(` - particle SomeParticle in 'some-particle.js' + const recipe = (await runtime.parse(` + particle SomeParticle in './some-particle.js' slotA: \`consumes${arg.isRequiredSlotA ? '' : '?'} Slot slotB: \`consumes${arg.isRequiredSlotB ? '' : '?'} Slot @@ -1378,8 +1369,8 @@ recipe SomeRecipe }); it('SLANDLES resolves & consumes in context with multiple set slots', async () => { const parseRecipe = async (arg: {label: string, isRequiredSlotA: boolean, isRequiredSlotB: boolean, expectedIsResolved: boolean}) => { - const recipe = (await parseManifest(` - particle SomeParticle in 'some-particle.js' + const recipe = (await runtime.parse(` + particle SomeParticle in './some-particle.js' slotA: \`consumes${arg.isRequiredSlotA ? '' : '?'} [Slot] slotB: \`consumes${arg.isRequiredSlotB ? '' : '?'} [Slot] @@ -1399,12 +1390,12 @@ recipe SomeRecipe }); it('SLANDLES resolves with dependent slandles', async () => { - const manifest = await parseManifest(` - particle Parent in 'parent.js' + const manifest = await runtime.parse(` + particle Parent in './parent.js' root: \`consumes Slot mySlot: \`provides Slot - particle Child in 'child.js' + particle Child in './child.js' childSlot: \`consumes Slot recipe SlandleRenderSlotsRecipe @@ -1424,12 +1415,12 @@ recipe SomeRecipe }); it('SLANDLES doesn\'t resolve mismatching dependencies dependent slandles', async () => { - const manifest = await parseManifest(` - particle Parent in 'parent.js' + const manifest = await runtime.parse(` + particle Parent in './parent.js' root: \`consumes Slot mySlot: \`provides Slot - particle Child in 'child.js' + particle Child in './child.js' childSlot: \`consumes Slot recipe SlandleRenderSlotsRecipe @@ -1448,8 +1439,8 @@ recipe SomeRecipe }); it('recipe slots with tags', async () => { - const manifest = await parseManifest(` - particle SomeParticle in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle in './some-particle.js' slotA: consumes #aaa slotB: provides #bbb recipe @@ -1481,8 +1472,8 @@ recipe SomeRecipe assert.lengthOf(Object.keys(slotConn.providedSlots), 1); }); it('SLANDLES recipe slots with tags', async () => { - const manifest = await parseManifest(` - particle SomeParticle in 'some-particle.js' + const manifest = await runtime.parse(` + particle SomeParticle in './some-particle.js' slotA: \`consumes Slot #aaa slotB: \`provides Slot #bbb recipe @@ -1512,10 +1503,10 @@ recipe SomeRecipe assert.deepEqual(['aa', 'hello'], slotConn.tags); }); it('recipe slots with different names', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: consumes - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: consumes slotB2: provides recipe @@ -1536,10 +1527,10 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved()); }); it('SLANDLES recipe slots with different names', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes Slot - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes Slot slotB2: \`provides Slot recipe @@ -1563,10 +1554,10 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved(options), `Expected recipe to be resolved.\n\t ${JSON.stringify([...options.errors])}`); }); it('SLANDLES recipe set slots with different names (passing a single slot to a set slot)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes Slot slotB2: \`provides Slot recipe @@ -1587,10 +1578,10 @@ recipe SomeRecipe assert.isFalse(recipe.normalize(), 'does not normalize'); }); it('SLANDLES recipe set slots with different names (passing a slot as a set slot)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes Slot slotB2: \`provides [Slot] recipe @@ -1614,10 +1605,10 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved(options), `Expected recipe to be resolved.\n\t ${JSON.stringify([...options.errors])}`); }); it('SLANDLES recipe set slots with different names (passing set slots)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes [Slot] slotB2: \`provides [Slot] recipe @@ -1641,10 +1632,10 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved(options), `Expected recipe to be resolved.\n\t ${JSON.stringify([...options.errors])}`); }); it('SLANDLES recipe set slots with different names (passing a single slot to a set slot)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes Slot slotB2: \`provides Slot recipe @@ -1665,10 +1656,10 @@ recipe SomeRecipe assert.isFalse(recipe.normalize(), 'does not normalize'); }); it('SLANDLES recipe set slots with different names (passing a slot as a set slot)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes Slot slotB2: \`provides [Slot] recipe @@ -1692,10 +1683,10 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved(options), `Expected recipe to be resolved.\n\t ${JSON.stringify([...options.errors])}`); }); it('SLANDLES recipe set slots with different names (passing set slots)', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA: \`consumes [Slot] - particle ParticleB in 'some-particle.js' + particle ParticleB in './some-particle.js' slotB1: \`consumes [Slot] slotB2: \`provides [Slot] recipe @@ -1719,8 +1710,8 @@ recipe SomeRecipe assert.isTrue(recipe.isResolved(options), `Expected recipe to be resolved.\n\t ${JSON.stringify([...options.errors])}`); }); it('recipe provided slot with no local name', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA1: consumes slotA2: provides recipe @@ -1738,8 +1729,8 @@ recipe SomeRecipe assert.isFalse(recipe.isResolved()); }); it('SLANDLES recipe provided slot with no local name', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA1: \`consumes Slot slotA2: \`provides Slot recipe @@ -1768,8 +1759,8 @@ recipe SomeRecipe assert.isFalse(recipe.isResolved()); }); it('SLANDLES recipe provided set slots with no local name', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA1: \`consumes [Slot] slotA2: \`provides [Slot] recipe @@ -1798,8 +1789,8 @@ recipe SomeRecipe assert.isFalse(recipe.isResolved()); }); it('SLANDLES recipe provided set slots with no local name', async () => { - const manifest = await parseManifest(` - particle ParticleA in 'some-particle.js' + const manifest = await runtime.parse(` + particle ParticleA in './some-particle.js' slotA1: \`consumes [Slot] slotA2: \`provides [Slot] recipe @@ -1828,11 +1819,11 @@ recipe SomeRecipe assert.isFalse(recipe.isResolved()); }); it('incomplete aliasing', async () => { - const recipe = (await parseManifest(` - particle P1 in 'some-particle.js' + const recipe = (await runtime.parse(` + particle P1 in './some-particle.js' slotA: consumes slotB: provides - particle P2 in 'some-particle.js' + particle P2 in './some-particle.js' slotB: consumes recipe P1 @@ -1856,11 +1847,11 @@ recipe SomeRecipe assert.strictEqual(slotB.sourceConnection, slotConnA); }); it('SLANDLES incomplete aliasing', async () => { - const recipe = (await parseManifest(` - particle P1 in 'some-particle.js' + const recipe = (await runtime.parse(` + particle P1 in './some-particle.js' slotA: \`consumes Slot slotB: \`provides Slot - particle P2 in 'some-particle.js' + particle P2 in './some-particle.js' slotB: \`consumes Slot recipe P1 @@ -1888,11 +1879,11 @@ recipe SomeRecipe assert.include(directions, '`consumes'); }); it('parses local slots with IDs', async () => { - const recipe = (await parseManifest(` - particle P1 in 'some-particle.js' + const recipe = (await runtime.parse(` + particle P1 in './some-particle.js' slotA: consumes slotB: provides - particle P2 in 'some-particle.js' + particle P2 in './some-particle.js' slotB: consumes recipe slot0: slot 'rootslot-0' @@ -1907,11 +1898,11 @@ recipe SomeRecipe assert.lengthOf(recipe.slots, 2); }); it('SLANDLES parses local slots with IDs', async () => { - const recipe = (await parseManifest(` - particle P1 in 'some-particle.js' + const recipe = (await runtime.parse(` + particle P1 in './some-particle.js' slotA: \`consumes Slot slotB: \`provides Slot - particle P2 in 'some-particle.js' + particle P2 in './some-particle.js' slotB: \`consumes Slot recipe slot0: \`slot 'rootslot-0' @@ -1942,7 +1933,7 @@ recipe SomeRecipe } }(); - await Manifest.load('./somewhere/a', loader, {registry, memoryProvider}); + await runtime.parseFile('./somewhere/a', {loader, registry}); assert(registry['./somewhere/a path/b']); }); it('parses all particles manifests', async () => { @@ -1985,7 +1976,7 @@ recipe SomeRecipe const manifestSource = ` schema Thing someProp: Text - store Store0 of [Thing] in 'entities.json'`; + store Store0 of [Thing] in './entities.json'`; const entitySource = JSON.stringify( {root: {values: { @@ -1997,7 +1988,7 @@ recipe SomeRecipe './the.manifest': manifestSource, './entities.json': entitySource }); - const manifest = await Manifest.load('./the.manifest', loader, {memoryProvider}); + const manifest = await runtime.parseFile('./the.manifest', {loader}); const storageStub = manifest.findStoreByName('Store0') as StoreInfo; assert(storageStub); const handle = await handleForStoreInfo(storageStub, {...manifest, storageManager}); @@ -2013,17 +2004,16 @@ recipe SomeRecipe rawData: {someProp: 'someValue2'}, } ]); - DriverFactory.clearRegistrationsForTesting(); }); it('throws an error when a store has invalid json', async () => { try { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing resource EntityList start this is not json? - store Store0 of [Thing] in EntityList`); + store Store0 of [Thing] in EntityList`, {fileName: 'foo.arcs'}); assert(false); } catch (e) { assert.deepEqual(e.message, `Post-parse processing error caused by line 7. @@ -2035,7 +2025,7 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' it('loads entities from a resource section', async () => { const now = new Date().getTime(); const later = new Date(now + 1000).getTime(); - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing someProp: Text @@ -2053,7 +2043,7 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' } store Store0 of [Thing] in EntityList - `, {fileName: 'the.manifest', memoryProvider}); + `); const storeInfo = manifest.findStoreByName('Store0') as StoreInfo; assert(storeInfo); const handle = await handleForStoreInfo(storeInfo, {...manifest, storageManager}); @@ -2070,16 +2060,15 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' rawData: {someProp: 'someValue2'}, } ]); - DriverFactory.clearRegistrationsForTesting(); }); it('loads inline entities with primitive and reference fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store X of [{n: Number, t: Text, u: URL, f: Boolean, b: Bytes, r: &{z: Text}}] with { {n: 0, t: '', u: '', f: false, b: ||, r: <'i1', 'reference-mode://{volatile://!1:test/backing@}{volatile://!2:test/container@}'>}, {n: 4.5, t: 'abc', u: 'site', f: true, b: |5a, 7, d|, r: <'i2', 'reference-mode://{volatile://!3:test/backing2@}{volatile://!4:test/container2@}'>}, } - `, {fileName: 'the.manifest', memoryProvider}); + `); const store = manifest.findStoreByName('X') as StoreInfo; const handle = await handleForStoreInfo(store, {...manifest, storageManager}); const entities = (await handle.toList()).map(Entity.serialize); @@ -2095,11 +2084,10 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' n: 4.5, t: 'abc', u: 'site', f: true, b: new Uint8Array([0x5a, 0x07, 0x0d]), r: {id: 'i2', entityStorageKey: 'reference-mode://{volatile://!3:test/backing2@}{volatile://!4:test/container2@}', creationTimestamp: null, expirationTimestamp: null} }); - DriverFactory.clearRegistrationsForTesting(); }); it('loads inline entities with collection fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store X of [{ n: [Number], t: [Text], u: [URL], f: [Boolean], b: [Bytes], r: [&{z: Text}] }] with { @@ -2113,7 +2101,7 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' r: [<'i1', 'reference-mode://{volatile://!1:test/backing@}{volatile://!2:test/container@}'>], } } - `, {fileName: 'the.manifest', memoryProvider}); + `); const store = manifest.findStoreByName('X') as StoreInfo; const handle = await handleForStoreInfo(store, {...manifest, storageManager}); const entities = (await handle.toList()).map(Entity.serialize); @@ -2132,11 +2120,10 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' b: [new Uint8Array([0x17, 0xb0]), new Uint8Array()], r: [{id: 'i1', entityStorageKey: 'reference-mode://{volatile://!1:test/backing@}{volatile://!2:test/container@}', creationTimestamp: null, expirationTimestamp: null}], }); - DriverFactory.clearRegistrationsForTesting(); }); it('loads inline entities with tuple fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store X of [{ a: (Number, Number, Bytes, URL), b: (Boolean, Text) @@ -2150,7 +2137,7 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' b: (true, 'xyz'), }, } - `, {fileName: 'the.manifest', memoryProvider}); + `); const store = manifest.findStoreByName('X') as StoreInfo; const handle = await handleForStoreInfo(store, {...manifest, storageManager}); const entities = (await handle.toList()).map(Entity.serialize); @@ -2166,18 +2153,17 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' a: [6.5, -2, new Uint8Array([0x5e, 0x06]), 'link'], b: [true, 'xyz'], }); - DriverFactory.clearRegistrationsForTesting(); }); it('loads inline entities with union fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store X of [{u: (Text or Number or Boolean or Bytes)}] with { {u: 'str'}, {u: 52}, {u: true}, {u: |1e, e7|}, } - `, {fileName: 'the.manifest', memoryProvider}); + `); const store = manifest.findStoreByName('X') as StoreInfo; const handle = await handleForStoreInfo(store, {...manifest, storageManager}); const entities = (await handle.toList()).map(e => Entity.serialize(e).rawData); @@ -2188,12 +2174,11 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' {u: true}, {u: new Uint8Array([0x1e, 0xe7])}, ]); - DriverFactory.clearRegistrationsForTesting(); }); it('throws an error when inline entities do not match the store schema', async () => { const check = async (manifestStr, msg) => { - const manifest = await parseManifest(manifestStr, {fileName: 'the.manifest', memoryProvider}); + const manifest = await runtime.parse(manifestStr); const store = manifest.findStoreByName('X') as StoreInfo; const handle = await handleForStoreInfo(store, {...manifest, storageManager}); await assertThrowsAsync(async () => handle.toList(), msg); @@ -2216,14 +2201,12 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' // Incorrect field name await check(`store X of [{a: Text}] with { {b: 'abc'} }`, `Can't set field b; not in schema`); - - DriverFactory.clearRegistrationsForTesting(); }); it('resolves store names to ids', async () => { const manifestSource = ` schema Thing - store Store0 of [Thing] in 'entities.json' + store Store0 of [Thing] in './entities.json' recipe myStore: map Store0`; const entitySource = JSON.stringify({root: {}, locations: {}}); @@ -2231,27 +2214,25 @@ Error parsing JSON from 'EntityList' (Unexpected token h in JSON at position 1)' './the.manifest': manifestSource, './entities.json': entitySource, }); - const manifest = await Manifest.load('./the.manifest', loader, {memoryProvider}); + const manifest = await runtime.parseFile('./the.manifest', {loader}); const recipe = manifest.recipes[0]; assert.deepEqual(recipe.toString(), `recipe\n myStore: map '!manifest:./the.manifest:store0:${await digest(entitySource)}'`); }); it('has prettyish syntax errors', async () => { try { - await parseManifest('recipe ?', {fileName: 'bad-file', memoryProvider}); + await runtime.parse('recipe ?', {fileName: 'bad-file'}); assert(false); } catch (e) { - assert.deepEqual(e.message, `Parse error in 'bad-file' line 1. -Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found. - recipe ? - ^`); + const expected = 'Parse error in \'bad-file\' line 1.\nExpected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found.\n recipe ?\n ^'; + assert.deepEqual(e.message, expected); } }); it('errors when the manifest connects a particle incorrectly', async () => { const manifest = ` schema Thing - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' iny: reads Thing outy: writes Thing inouty: reads writes Thing @@ -2262,7 +2243,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found outy: writes x inouty: writes x`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail(); } catch (e) { assert.match(e.message, /'writes' not compatible with 'reads' param of 'TestParticle'/); @@ -2272,7 +2253,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found it('errors when the manifest references a missing particle param', async () => { const manifest = ` schema Thing - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' a: reads Thing recipe x: create @@ -2280,7 +2261,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found a: reads x b: reads x`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail(); } catch (e) { assert.match(e.message, /param 'b' is not defined by 'TestParticle'/); @@ -2289,13 +2270,13 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found it('errors when the manifest references a missing consumed slot', async () => { const manifest = ` - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' root: consumes recipe TestParticle other: consumes`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail(); } catch (e) { assert.match(e.message, /Consumed slot 'other' is not defined by 'TestParticle'/); @@ -2303,13 +2284,13 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found }); it('SLANDLES errors when the manifest references a missing consumed slot', async () => { const manifest = ` - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' root: \`consumes Slot recipe TestParticle other: \`consumes`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail(); } catch (e) { assert.match(e.message, /param 'other' is not defined by 'TestParticle'/); @@ -2318,7 +2299,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found it('errors when the manifest references a missing provided slot', async () => { const manifest = ` - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' root: consumes Slot action: provides Slot recipe @@ -2326,7 +2307,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found root: consumes noAction: provides`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail('did not throw'); } catch (e) { assert.match(e.message, /Provided slot 'noAction' is not defined by 'TestParticle'/); @@ -2334,7 +2315,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found }); it('SLANDLES errors when the manifest references a missing provided slot', async () => { const manifest = ` - particle TestParticle in 'tp.js' + particle TestParticle in './tp.js' root: \`consumes Slot action: \`provides Slot recipe @@ -2342,7 +2323,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found root: \`consumes noAction: \`provides`; try { - await parseManifest(manifest); + await runtime.parse(manifest); assert.fail(); } catch (e) { assert.match(e.message, /param 'noAction' is not defined by 'TestParticle'/); @@ -2355,7 +2336,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found recipe NoParticle.paramA: writes OtherParticle.paramB`; try { - await parseManifest(manifestFrom); + await runtime.parse(manifestFrom); assert.fail(); } catch (e) { assert.match(e.message, /could not find particle 'NoParticle'/); @@ -2367,7 +2348,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found recipe ParticleA.paramA: writes OtherParticle.paramB`; try { - await parseManifest(manifestTo); + await runtime.parse(manifestTo); assert.fail(); } catch (e) { assert.match(e.message, /could not find particle 'OtherParticle'/); @@ -2379,7 +2360,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found recipe ParticleA.paramA: writes ParticleB.paramB`; try { - await parseManifest(manifestFromParam); + await runtime.parse(manifestFromParam); assert.fail(); } catch (e) { assert.match(e.message, /'paramA' is not defined by 'ParticleA'/); @@ -2393,7 +2374,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found recipe ParticleA.paramA: writes ParticleB.paramB`; try { - await parseManifest(manifestToParam); + await runtime.parse(manifestToParam); assert.fail(); } catch (e) { assert.match(e.message, /'paramB' is not defined by 'ParticleB'/); @@ -2405,7 +2386,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found const manifestSource = ` recipe search \`Hello dear world\``; - let recipe = (await parseManifest(manifestSource)).recipes[0]; + let recipe = (await runtime.parse(manifestSource)).recipes[0]; assert.isNotNull(recipe.search); let search = checkNotNull(recipe.search); assert.strictEqual('Hello dear world', search.phrase); @@ -2421,7 +2402,7 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found search \`Hello dear world\` tokens \`dear\` \`hello\` \`world\` // unresolved search tokens`); - recipe = (await parseManifest(manifestSource)).recipes[0]; + recipe = (await runtime.parse(manifestSource)).recipes[0]; // resolve some tokens. search = checkNotNull(recipe.search); search.resolveToken('hello'); @@ -2449,9 +2430,9 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found }); it('merge recipes with search strings', async () => { - const recipe1 = (await parseManifest(`recipe + const recipe1 = (await runtime.parse(`recipe search \`Hello world\``)).recipes[0]; - const recipe2 = (await parseManifest(`recipe + const recipe2 = (await runtime.parse(`recipe search \`good morning\` tokens \`morning\` // \`good\``)).recipes[0]; @@ -2465,10 +2446,10 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found const loader = new Loader(null, { '*': '{"root": {}, "locations": {}}' }); - const parseOptions = {loader, memoryProvider}; - const manifest = await parseManifest(` + const parseOptions = {loader}; + const manifest = await runtime.parse(` schema Product - store ClairesWishlist of [Product] #wishlist in 'wishlist.json' + store ClairesWishlist of [Product] #wishlist in './wishlist.json' description \`Claire's wishlist\``, parseOptions); const verify = (manifest: Manifest) => { assert.lengthOf(manifest.stores, 1); @@ -2476,11 +2457,11 @@ Expected a verb (e.g. &Verb) or an uppercase identifier (e.g. Foo) but "?" found }; verify(manifest); assert.strictEqual(manifest.stores[0].toManifestString(), - (await parseManifest(manifest.stores[0].toManifestString(), parseOptions)).toString()); - verify(await parseManifest(manifest.toString(), parseOptions)); + (await runtime.parse(manifest.stores[0].toManifestString(), parseOptions)).toString()); + verify(await runtime.parse(manifest.toString(), parseOptions)); }); it('can parse a manifest containing resources', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` resource SomeName start {'foo': 'bar'} @@ -2489,7 +2470,7 @@ resource SomeName assert.deepEqual(manifest.resources['SomeName'], `{'foo': 'bar'}\nhello\n`); }); it('can parse a manifest containing incomplete interfaces', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo interface FullInterface foo: reads Foo @@ -2518,7 +2499,7 @@ resource SomeName assert(manifest.findInterfaceByName('FullInterface')); }); it('can parse a manifest containing interfaces', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo interface Bar foo: reads Foo @@ -2532,7 +2513,7 @@ resource SomeName assert(manifest.recipes[0].normalize()); }); it('can parse a manifest containing a warning', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo value: Text particle Particle @@ -2541,7 +2522,7 @@ resource SomeName assert.lengthOf(manifest.allParticles, 1); }); it('can parse interfaces using new-style body syntax', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo interface Bar foo: reads Foo @@ -2556,9 +2537,9 @@ resource SomeName assert(manifest.recipes[0].normalize()); }); it('can resolve optional handles', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Something - particle Thing in 'thing.js' + particle Thing in './thing.js' inThing: reads [Something] maybeOutThings: writes? [Something] recipe @@ -2574,10 +2555,10 @@ resource SomeName assert.isTrue(recipe.isResolved()); }; verify(manifest); - verify(await parseManifest(manifest.toString())); + verify(await runtime.parse(manifest.toString())); }); it('can resolve an immediate handle specified by a particle target', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema S interface HostedInterface foo: reads S @@ -2597,7 +2578,7 @@ resource SomeName assert(recipe.isResolved()); }); it('can resolve a particle with an inline schema', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle P foo: reads * {value: Text} recipe @@ -2610,7 +2591,7 @@ resource SomeName assert(recipe.isResolved()); }); it('can resolve a particle with a schema reference', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo far: Text particle P @@ -2634,7 +2615,7 @@ resource SomeName modality dom`); }); it('can resolve a particle with an inline schema reference', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo particle P bar: reads Bar {foo: &Foo {far: Text}} @@ -2657,7 +2638,7 @@ resource SomeName modality dom`); }); it('can resolve a particle with a collection of schema references', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo far: Text particle P @@ -2681,7 +2662,7 @@ resource SomeName modality dom`); }); it('can resolve a particle with a collection of inline schema references', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle P bar: reads Bar {foo: [&Foo {far: Text}]} recipe @@ -2703,7 +2684,7 @@ resource SomeName modality dom`); }); it('can resolve inline schemas against out of line schemas', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema T value: Text particle P @@ -2723,7 +2704,7 @@ resource SomeName assert(validRecipe.isResolved()); }); it('can resolve handle types from inline schemas', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle P foo: reads * {value: Text} particle P2 @@ -2771,7 +2752,7 @@ resource SomeName }); it('can infer field types of inline schemas from external schemas', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing value: Text particle P @@ -2792,7 +2773,7 @@ resource SomeName }); it('supports inline schemas with multiple names', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing1 value1: Text schema Thing2 @@ -2821,7 +2802,7 @@ resource SomeName FirebaseStorageDriverProvider.register( new Runtime().getCacheService(), mockFirebaseStorageKeyOptions); - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Bar value: Text @@ -2839,12 +2820,11 @@ resource SomeName assert.isTrue(validRecipe.normalize()); assert.isTrue(validRecipe.isResolved()); assert.strictEqual(manifest.stores[0].toManifestString(), - (await parseManifest(manifest.stores[0].toManifestString())).toString()); - DriverFactory.clearRegistrationsForTesting(); + (await runtime.parse(manifest.stores[0].toManifestString())).toString()); }); it('can process a schema alias', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` alias schema This That as SchemaAlias alias schema * extends SchemaAlias as Extended `); @@ -2853,12 +2833,12 @@ resource SomeName }); it('expands schema aliases', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` alias schema Name1 as Thing1 field1: Text alias schema Name2 as Thing2 field2: Text - particle P in 'p.js' + particle P in './p.js' param: reads Thing1 Thing2 Name3 {field1: Text, field3: Text} `); const paramSchema = checkNotNull(manifest.findParticleByName('P').inputs[0].type.getEntitySchema()); @@ -2868,12 +2848,12 @@ resource SomeName it('fails when expanding conflicting schema aliases', async () => { try { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` alias schema Name1 as Thing1 field1: Text alias schema Name2 as Thing2 field1: Number - particle P in 'p.js' + particle P in './p.js' param: reads Thing1 Thing2 {} `); assert.fail(); @@ -2884,10 +2864,10 @@ resource SomeName it('fails when inline schema specifies a field type that does not match alias expansion', async () => { try { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` alias schema Name1 as Thing1 field1: Text - particle P in 'p.js' + particle P in './p.js' param: reads Thing1 {field1: Number} `); assert.fail(); @@ -2898,7 +2878,7 @@ resource SomeName it('can relate inline schemas to generic connections', async () => { const data = '{"root": {}, "locations": {}}'; - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing value: Text num: Number @@ -2926,7 +2906,7 @@ resource SomeName }); it('can parse a recipe with slot constraints on verbs', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` recipe &verb consumeSlot: consumes @@ -2944,7 +2924,7 @@ resource SomeName }); it('SLANDLES can parse a recipe with slot constraints on verbs', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` recipe provideSlot: \`slot &verb @@ -2965,11 +2945,11 @@ resource SomeName }); it('can parse particle arguments with tags', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Dog schema Sled schema DogSled - particle DogSledMaker in 'thing.js' + particle DogSledMaker in './thing.js' leader: reads Dog #leader team: reads [Dog] sled: reads Sled #dogsled @@ -2994,7 +2974,7 @@ resource SomeName }); it('can parse a particle with tuples', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle P foo: reads [( &Bar {photo: URL}, @@ -3013,32 +2993,32 @@ resource SomeName }); it('can round-trip particles with tags', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] modality dom thing: consumes Slot #main #tagname otherThing: provides Slot #testtag`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); assert.strictEqual(manifestString, manifest.particles[0].toString()); }); it('SLANDLES can round-trip particles with tags', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] thing: \`consumes Slot {formFactor:big} #main #tagname otherThing: \`provides Slot {handle:thingy} #testtag modality dom`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); assert.strictEqual(manifestString, manifest.particles[0].toString()); }); it('SLANDLES can round-trip particles with fields', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input: reads [Product {}] output: writes [Product {}] thingy: reads ~a @@ -3046,13 +3026,13 @@ resource SomeName otherThing: \`provides Slot {handle:thingy} #testtag modality dom`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); assert.strictEqual(manifestString, manifest.particles[0].toString()); }); it('can parse recipes with an implicit create handle', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A a: writes S {} particle B @@ -3070,7 +3050,7 @@ resource SomeName }); it('can parse recipes with a require section', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle P1 a: writes S {} root: consumes @@ -3097,7 +3077,7 @@ resource SomeName }); it('recipe resolution checks the require sections', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads S {} @@ -3116,7 +3096,7 @@ resource SomeName describe('trust claims and checks', () => { it('supports multiple claim statements', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A output1: writes T {} output2: writes T {} @@ -3138,7 +3118,7 @@ resource SomeName }); it('supports field-level claim statements', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A output1: writes T {aaa: Text, bbb: Number} output2: writes T {ccc: Text, ddd: &Foo {eee: Number}} @@ -3177,7 +3157,7 @@ resource SomeName }); it('supports field-level claim statements involving collections', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A output1: writes [T {aaa: Text, bbb: [Number]}] output2: writes [T {ccc: [&Foo {ddd: [Number]}]}] @@ -3211,19 +3191,19 @@ resource SomeName }); it('rejects invalid fields in field-level claims', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A output: writes T {foo: Text} claim output.bar is something `), `Schema 'T {foo: Text}' does not contain field 'bar'.`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A output: writes T {foo: &Bar {bar: Number}} claim output.foo.baz is something `), `Schema 'Bar {bar: Number}' does not contain field 'baz'.`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A output: writes [T {foo: [&Bar {bar: Number}]}] claim output.foo.bar.baz is something @@ -3231,7 +3211,7 @@ resource SomeName }); it('supports claim statement with multiple tags', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A output1: writes T {} claim output1 is property1 and is property2 @@ -3247,7 +3227,7 @@ resource SomeName }); it('supports "is not" tag claims', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A output1: writes T {} output2: writes T {} @@ -3265,7 +3245,7 @@ resource SomeName }); it('supports "derives from" claims with multiple parents', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3283,7 +3263,7 @@ resource SomeName }); it('supports field-level "derives from" claims', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {foo: Text} output: writes T {bar: Text} @@ -3304,7 +3284,7 @@ resource SomeName }); it('rejects invalid fields in field-level "derives from" claims', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: writes T {foo: Text} output: writes T {foo: Text} @@ -3313,7 +3293,7 @@ resource SomeName }); it('supports mixed claims with multiple tags, not tags, and "derives from"', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3339,7 +3319,7 @@ resource SomeName }); it('supports multiple check statements', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3365,7 +3345,7 @@ resource SomeName }); it('supports checks and claims with labels starting with "not"', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3398,7 +3378,7 @@ resource SomeName }); it('supports field-level check statements', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {aaa: Text, bbb: Number} input2: reads T {ccc: Text, ddd: &Foo {eee: Number}} @@ -3441,19 +3421,19 @@ resource SomeName }); it('rejects invalid fields in field-level checks', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads T {foo: Text} check input.bar is something `), `Schema 'T {foo: Text}' does not contain field 'bar'.`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads T {foo: &Bar {bar: Number}} check input.foo.baz is something `), `Schema 'Bar {bar: Number}' does not contain field 'baz'.`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads [T {foo: [&Bar {bar: Number}]}] check input.foo.bar.baz is something @@ -3461,7 +3441,7 @@ resource SomeName }); it(`supports 'is from store' checks`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3485,7 +3465,7 @@ resource SomeName }); it(`supports 'is from output' checks`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input1: reads T {} input2: reads T {} @@ -3510,7 +3490,7 @@ resource SomeName }); it('supports checks on provided slots', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A root: consumes mySlot: provides @@ -3530,7 +3510,7 @@ resource SomeName }); it(`supports checks with the 'or' operation`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {} check input is property1 or is not property2 or is property3 @@ -3551,7 +3531,7 @@ resource SomeName }); it(`supports checks with the 'and' operation`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {} check input is property1 and is not property2 and is property3 @@ -3572,7 +3552,7 @@ resource SomeName }); it(`supports arbitrary nesting of 'and' and 'or' operations and conditions`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {} check input (is property1 and ((is not property2))) or ((is property2) or is not property3) @@ -3600,7 +3580,7 @@ resource SomeName }); it(`supports checks with the implication operator`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {} check input (is property1 => is property2) @@ -3622,7 +3602,7 @@ resource SomeName }); it(`supports checks with complex nesting of implications`, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads T {} check input ((is property1 and is not property2) => (is property3 => (is not property4 or is property5))) or is property6 @@ -3657,7 +3637,7 @@ resource SomeName }); it('supports field-level checks and claims with type variables', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A input: reads [~a with {foo: Number}] output: writes [~a] @@ -3679,7 +3659,7 @@ resource SomeName }); it('parses max type variables into the appropriate data structure', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Foo data: reads ~a with {age: Number, *} @@ -3709,7 +3689,7 @@ resource SomeName }); it('supports field-level checks and claims with resolved type variables', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle OrderIngestion in '.OrderIngestion' data: writes [Product {sku: Text, name: Text, price: Number}] @@ -3759,19 +3739,19 @@ resource SomeName }); it('rejects unknown fields in type variables', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads ~a check input.foo is trusted `), `Type variable ~a does not contain field 'foo'`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A output: writes ~a claim output.foo is trusted `), `Type variable ~a does not contain field 'foo'`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads ~a output: writes Result {foo: Text} @@ -3780,7 +3760,7 @@ resource SomeName }); it('fails to parse concrete types with inline schema field of `*`', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle Foo data: reads {*} `), `\ @@ -3788,7 +3768,7 @@ Post-parse processing error caused by line 3. Only type variables may have '*' fields. data: reads {*} ^^^`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle Foo data: reads {name: Text, *} `), `\ @@ -3799,7 +3779,7 @@ Only type variables may have '*' fields. }); it('warns about using multiple `*` in a single variable constraint', async () => { - const cc = await ConCap.capture(() => parseManifest(` + const cc = await ConCap.capture(() => runtime.parse(` particle Foo data: reads ~a with {*, *} `)); @@ -3810,7 +3790,7 @@ Only type variables may have '*' fields. }); it('supports field-level checks and claims with max type variables', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle SkuRedactor in '.SkuRedactor' input: reads [~a with {sku: Text, *}] output: writes [~a] @@ -3831,7 +3811,7 @@ Only type variables may have '*' fields. it('data stores can make claims', async () => { const data = '{"root": {}, "locations": {}}'; - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store NobId of NobIdStore {nobId: Text} in NobIdJson claim is property1 and is property2 resource NobIdJson @@ -3852,7 +3832,7 @@ Only type variables may have '*' fields. it('data stores can make field-level claims', async () => { const data = '{"root": {}, "locations": {}}'; - const manifest = await parseManifest(` + const manifest = await runtime.parse(` store NobId of NobIdStore {nobId: Text, someRef: [&Foo {foo: [Text]}]} in NobIdJson claim field nobId is property1 and is property2 claim field someRef.foo is property3 @@ -3880,7 +3860,7 @@ Only type variables may have '*' fields. it('rejects invalid fields in field-level claims on stores', async () => { const data = '{"root": {}, "locations": {}}'; - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` store NobId of NobIdStore {nobId: Text} in NobIdJson claim field foo is property1 and is property2 resource NobIdJson @@ -3888,7 +3868,7 @@ Only type variables may have '*' fields. ${data} `), `Schema 'NobIdStore {nobId: Text}' does not contain field 'foo'.`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` store NobId of NobIdStore {nobId: Text, someRef: [&Foo {foo: [Text]}]} in NobIdJson claim field someRef.bar is property1 and is property2 resource NobIdJson @@ -3898,7 +3878,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow mixing 'and' and 'or' operations without nesting`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads T {} check input is property1 or is property2 and is property3 @@ -3906,7 +3886,7 @@ Only type variables may have '*' fields. }); it('SLANDLES can round-trip particles with checks and claims', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input1: reads T {} input2: reads T {} input3: reads T {} @@ -3934,11 +3914,11 @@ Only type variables may have '*' fields. check childSlot is not somewhatTrusted modality dom`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.strictEqual(manifest.toString(), manifestString); }); it('can round-trip particles with checks and claims', async () => { - const manifestString = `particle TestParticle in 'a.js' + const manifestString = `particle TestParticle in './a.js' input1: reads T {} input2: reads T {} input3: reads T {} @@ -3966,18 +3946,18 @@ Only type variables may have '*' fields. parentSlot: consumes Slot childSlot: provides Slot`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.strictEqual(manifest.toString(), manifestString); }); it('fails for unknown handle names', async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A output: writes T {} claim oops is trusted `), `Can't make a claim on unknown handle oops`); - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A input: reads T {} check oops is trusted @@ -3985,7 +3965,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow claims on inputs`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: reads T {} claim foo is trusted @@ -3993,7 +3973,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow checks on outputs`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: writes T {} check foo is trusted @@ -4001,7 +3981,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow multiple different claims for the same handle`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: writes T {} claim foo is trusted @@ -4010,7 +3990,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow multiple different claims for the same field`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: writes T {bar: Text} claim foo.bar is trusted @@ -4019,7 +3999,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow multiple different checks for the same handle`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: reads T {} check foo is trusted @@ -4028,7 +4008,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow multiple different checks for the same field`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A foo: reads T {bar: Text} check foo.bar is trusted @@ -4037,7 +4017,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow checks on consumed slots`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A someOtherSlot: consumes mySlot: provides @@ -4046,7 +4026,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow checks on unknown slots`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A someOtherSlot: consumes mySlot: provides @@ -4055,7 +4035,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow multiple provided slots with the same name`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A firstSlot: consumes mySlot: provides @@ -4065,7 +4045,7 @@ Only type variables may have '*' fields. }); it(`doesn't allow checks on fields in slots`, async () => { - await assertThrowsAsync(async () => parseManifest(` + await assertThrowsAsync(async () => runtime.parse(` particle A someOtherSlot: consumes mySlot: provides @@ -4077,12 +4057,12 @@ Only type variables may have '*' fields. describe('all schemas', () => { describe('handles manifests with no schemas', () => { it('handles an empty manifest', async () => { - const emptyManifest = await parseManifest(``); + const emptyManifest = await runtime.parse(``); const emptyResult = emptyManifest.allSchemas; assert.isEmpty(emptyResult); }); it('handles a non-empty manifest', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle A recipe Foo A @@ -4093,7 +4073,7 @@ Only type variables may have '*' fields. }); describe('handles manifests with simple schemas', () => { it('handles a schema with no fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo particle Bar recipe Food @@ -4107,7 +4087,7 @@ Only type variables may have '*' fields. }); it('handles a schema with fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo a: Text b: Number @@ -4125,7 +4105,7 @@ Only type variables may have '*' fields. assert.deepEqual(result[0].names, ['Foo']); }); it('handles schemas with no fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo schema Boo schema Roo @@ -4140,7 +4120,7 @@ Only type variables may have '*' fields. assert.isEmpty(result[2].fields); }); it('handles multiple schemas with fields', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Foo a: Text schema Boo @@ -4181,7 +4161,7 @@ Only type variables may have '*' fields. assert.isDefined(result[3].fields.f); }); it('handles out of order schemas declarations', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Baz t: Text schema Foo @@ -4226,7 +4206,7 @@ Only type variables may have '*' fields. './a': manifestStr, './b': jsonStr, }); - const manifest = await Manifest.load('./a', loader, {memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader}); const result = manifest.allSchemas; assert.lengthOf(result, 1); assert.lengthOf(result[0].names, 1); @@ -4300,7 +4280,7 @@ Only type variables may have '*' fields. './a': manifestStr, './b': jsonStr, }); - const manifest = await Manifest.load('./a', loader, {memoryProvider}); + const manifest = await runtime.parseFile('./a', {loader}); const result = manifest.allSchemas; assert.equal(result.length, 2); @@ -4320,7 +4300,7 @@ Only type variables may have '*' fields. }); }); it('handles out of order schemas declarations', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Baz t: Text schema Foo @@ -4339,7 +4319,7 @@ Only type variables may have '*' fields. }); it('catches unsupported recursive schemas declarations', Flags.withFlags({recursiveSchemasAllowed: false}, async () => { assertThrowsAsync(async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Baz t: Text bar: &Bar @@ -4360,7 +4340,7 @@ Only type variables may have '*' fields. }); })); it('handles recursive schemas declarations', Flags.withFlags({recursiveSchemasAllowed: true}, async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Baz t: Text bar: &Bar @@ -4381,7 +4361,7 @@ Only type variables may have '*' fields. })); }); it('warns about using external schemas', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` schema Thing value: Text @@ -4397,7 +4377,7 @@ particle A input: reads [Product {}] modality dom`; - const manifest = await parseManifest(manifestString); + const manifest = await runtime.parse(manifestString); assert.lengthOf(manifest.particles, 1); const particle = manifest.particles[0]; assert.isTrue(particle.external); @@ -4405,14 +4385,14 @@ particle A assert.strictEqual(manifestString, particle.toString()); }); it('parses JVM class path', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` particle Particle in 'com.wow.Particle' `); assert.equal(manifest.particles[0].implFile, 'com.wow.Particle'); }); it('derives JVM class path from namespace', async () => { - const manifest = await parseManifest(` + const manifest = await runtime.parse(` meta namespace: com.wow particle Particle in '.Particle' @@ -4425,7 +4405,7 @@ particle A ]); }); it('derives JVM implFile from namespace across imports', async () => { - const manifest = await Manifest.load('/c.arcs', new Loader(null, { + const staticFiles = { '/a.arcs': ` meta namespace: com.wow @@ -4441,11 +4421,10 @@ particle A namespace: com.abc import './a.arcs' import './b.arcs' - particle Boom in '.Boom' - `, - })); - + ` + }; + const manifest = await runtime.parseFile('/c.arcs', {loader: new Loader(null, staticFiles)}); assert.sameMembers(manifest.allParticles.map(p => p.implFile), [ 'com.abc.Boom', 'com.wow.Wow', @@ -4455,11 +4434,9 @@ particle A }); describe('Manifest storage migration', () => { - let memoryProvider; - beforeEach(() => { memoryProvider = new TestVolatileMemoryProvider(); }); it('inflates into RamDisk', async () => { - const manifest = await Manifest.parse(` + const manifest = await new Runtime().parse(` store NobId of NobIdStore {nobId: Text} in NobIdJson resource NobIdJson start @@ -4472,7 +4449,7 @@ resource NobIdJson }, "locations": {} } -`, {memoryProvider}); +`); assert.lengthOf(manifest.stores, 1); const store = manifest.stores[0]; @@ -4484,17 +4461,15 @@ resource NobIdJson }); }); describe('annotations', async () => { - let memoryProvider; const loader = new Loader(null, { '*': '{"root": {}, "locations": {}}' }); + const runtime = new Runtime(); beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('parses annotations', async () => { @@ -4527,7 +4502,8 @@ recipe One recipe Two @goodForAll recipe Three`; - const manifest = await Manifest.parse(manifestStr); + const runtime = new Runtime({loader}); + const manifest = await runtime.parse(manifestStr); assert.equal(Object.keys(manifest.annotations).length, 4); assert.sameMembers(Object.keys(manifest.annotations), ['noParam', 'oneParam', 'multiParam', 'goodForAll']); const noParam = manifest.annotations['noParam']; @@ -4569,7 +4545,7 @@ recipe Three`; `), `annotation not found: 'nonexistent'`); }); it('throws when wrong annotation target', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` annotation noParam retention: Source targets: [Particle] @@ -4584,24 +4560,24 @@ annotation oneParam(foo: Text) retention: Source doc: 'doc'`; it('throws when wrong annotation param', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` ${oneParamAnnotation} @oneParam(wrong: 'hello') recipe `), `unexpected annotation param: 'wrong'`); - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` ${oneParamAnnotation} @oneParam(foo: 'hello', wrong: 'world') recipe `), `unexpected annotation param: 'wrong'`); }); it('throws when annotation param value of incorrect type', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` ${oneParamAnnotation} @oneParam(foo: 5) recipe `), `expected 'Text' for param 'foo', instead got 5`); - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` ${oneParamAnnotation} @oneParam(foo: false) recipe @@ -4626,7 +4602,7 @@ recipe ${oneParamAnnotation} @oneParam(foo: 'hello') recipe`; - const manifest = await Manifest.parse(manifestStr); + const manifest = await runtime.parse(manifestStr); const recipe = manifest.recipes[0]; assert.lengthOf(recipe.annotations, 1); assert.equal(recipe.annotations[0].name, 'oneParam'); @@ -4640,7 +4616,7 @@ recipe`; ${oneParamAnnotation} @oneParam recipe`; - const manifest = await Manifest.parse(manifestStr); + const manifest = await runtime.parse(manifestStr); const recipe = manifest.recipes[0]; assert.lengthOf(recipe.annotations, 1); assert.equal(recipe.annotations[0].name, 'oneParam'); @@ -4664,7 +4640,7 @@ particle Fooer foos4: writes [Foo {value: Text}] modality dom `; - const manifest = await Manifest.parse(manifestStr); + const manifest = await runtime.parse(manifestStr); const particle = manifest.particles[0]; assert.lengthOf(particle.handleConnections, 4); assert.lengthOf(particle.getConnectionByName('foos1').annotations, 1); @@ -4678,7 +4654,7 @@ particle Fooer assert.equal(manifest.toString(), manifestStr.trim()); }); it('fails schema annotations with wrong target', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` annotation foo(bar: Text) targets: [Handle, HandleConnection] retention: Source @@ -4698,7 +4674,7 @@ annotation foo(bar: Text, baz: Number) schema Foo value: Text `; - const manifest = await Manifest.parse(manifestStr); + const manifest = await runtime.parse(manifestStr); const schema = manifest.schemas['Foo']; assert.lengthOf(schema.annotations, 1); assert.isUndefined(schema.annotations[0].params['bar']); @@ -4717,9 +4693,9 @@ annotation baz(qux: Number) doc: 'a' @foo(bar: 'hello') @baz(qux: 123) -store Store0 of [Thing {blah: Text}] 'my-things' in 'Things.json' +store Store0 of [Thing {blah: Text}] 'my-things' in './Things.json' `; - const manifest = await Manifest.parse(manifestStr, {loader, memoryProvider}); + const manifest = await runtime.parse(manifestStr, {loader}); const annotations = manifest.stores[0].annotations; assert.lengthOf(annotations, 2); assert.equal(annotations.find(a => a.name === 'foo').params['bar'], 'hello'); @@ -4727,7 +4703,7 @@ store Store0 of [Thing {blah: Text}] 'my-things' in 'Things.json' assert.equal(manifest.toString(), manifestStr.trim()); }); it('parses recipe handle annotations', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` annotation hello(world: Text) targets: [Handle, Store] retention: Source @@ -4742,7 +4718,7 @@ recipe assert.lengthOf(quzHandle.annotations, 3); }); it('parses annotation with single param and simple value', async () => { - const connection = (await Manifest.parse(` + const connection = (await runtime.parse(` annotation hello(txt: Text) targets: [Handle, HandleConnection] retention: Source @@ -4758,7 +4734,7 @@ recipe assert.equal(connection.getAnnotation('world').params['txt'], 'bye'); }); it('fails parsing annotation simple value when multiple params', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` annotation hello(txt1: Text, txt2: Text) targets: [Handle, HandleConnection] retention: Source @@ -4767,7 +4743,7 @@ recipe foo: reads [* {bar: Text}] @hello('hi')`), `annotation 'hello' has unexpected unnamed param 'hi'`); }); it('fails parsing annotation simple value when wrong type', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` annotation hello(txt: Text) targets: [Handle, HandleConnection] retention: Source @@ -4776,21 +4752,21 @@ recipe foo: reads [* {bar: Text}] @hello(5)`), `expected 'Text' for param 'txt', instead got 5`); }); it('fails parsing invalid canonical annotation ttl', async () => { - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` recipe foo: create @persistent @ttl('300') `), `Invalid ttl: 300`); - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` recipe foo: create @persistent @ttl(300) `), `expected 'Text' for param 'value', instead got 300`); - await assertThrowsAsync(async () => Manifest.parse(` + await assertThrowsAsync(async () => runtime.parse(` recipe foo: create @persistent @ttl('day') `), `Invalid ttl: day`); }); it('parses canonical annotations', async () => { - const manifest = (await Manifest.parse(` + const manifest = (await runtime.parse(` @arcId('myFavoriteArc') recipe foo: create @persistent @ttl('3d') @@ -4834,7 +4810,7 @@ recipe }); it('parses @policy annotation', async () => { - const manifest = (await Manifest.parse(` + const manifest = (await runtime.parse(` policy MyPolicy {} @policy('MyPolicy') @@ -4853,7 +4829,7 @@ recipe }); it('fails when the @policy annotation mentions an unknown policy name', async () => { - const manifest = await ConCap.silence(() => Manifest.parse(` + const manifest = await ConCap.silence(() => runtime.parse(` @policy('ThisPolicyDoesNotExist') recipe foo: create @@ -4867,7 +4843,7 @@ recipe }); it('fails when the @policy annotation is missing its argument', async () => { - assertThrowsAsync(async () => Manifest.parse(` + assertThrowsAsync(async () => runtime.parse(` @policy recipe foo: create @@ -4875,7 +4851,7 @@ recipe }); it('supports importing policies from another file', async () => { - const loader = new Loader(null, { + const staticFiles = { '/policy.arcs': ` policy MyPolicy {} `, @@ -4886,15 +4862,16 @@ recipe recipe foo: create `, - }); - const manifest = await Manifest.load('/recipe.arcs', loader); + }; + const runtime = new Runtime({loader: new Loader(null, staticFiles)}); + const manifest = await runtime.parseFile('/recipe.arcs'); const recipe = manifest.recipes[0]; assert.strictEqual(recipe.policy.name, 'MyPolicy'); }); describe('isolated, ingress and egress particles', () => { it('particles are ingress and egress by default', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` particle P `); assert.deepEqual(manifest.particles[0].dataflowType, ParticleDataflowType.INGRESS_AND_EGRESS); @@ -4902,7 +4879,7 @@ recipe }); it('egress annotation works', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @egress particle P `); @@ -4911,7 +4888,7 @@ recipe }); it('combination of egress and ingress annotations works', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @egress @ingress particle P @@ -4921,7 +4898,7 @@ recipe }); it('egress annotation with type works', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @egress('MyEgressType') particle P `); @@ -4930,7 +4907,7 @@ recipe }); it('isolated annotation works', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @isolated particle P `); @@ -4939,7 +4916,7 @@ recipe }); it('throws if both isolated and egress annotations are applied', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @egress @isolated particle P @@ -4950,7 +4927,7 @@ recipe }); it('throws if both isolated and ingress annotations are applied', async () => { - const manifest = await Manifest.parse(` + const manifest = await runtime.parse(` @ingress @isolated particle P @@ -5160,7 +5137,7 @@ describe('validateUniqueDefinitions', () => { import './b.arcs' `, }); - return Manifest.load('/c.arcs', loader, {memoryProvider: new TestVolatileMemoryProvider()}); + return new Runtime().parseFile('/c.arcs', {loader}); } it('rejects duplicate particle names', async () => { @@ -5256,7 +5233,7 @@ describe('validateUniqueDefinitions', () => { describe('expressions', () => { it('does not allow mixing implementation and result expressions', async () => { try { - const manifest = await Manifest.parse(` + const manifest = await new Runtime().parse(` particle Converter in 'converter.js' foo: reads Foo {x: Number} bar: writes Bar {y: Number} = @@ -5270,7 +5247,7 @@ A particle with implementation cannot use result expressions. } }); it('saves result expressions on handle connection specs', async () => { - const manifest = await Manifest.parse(` + const manifest = await new Runtime().parse(` particle Converter foo: reads Foo {x: Number} bar: writes Bar {y: Number} = diff --git a/src/runtime/tests/runtime-test.ts b/src/runtime/tests/runtime-test.ts index 9ca3028587b..57a446f36c0 100644 --- a/src/runtime/tests/runtime-test.ts +++ b/src/runtime/tests/runtime-test.ts @@ -20,7 +20,6 @@ import {RamDiskStorageDriverProvider} from '../storage/drivers/ramdisk.js'; import {TestVolatileMemoryProvider} from '../testing/test-volatile-memory-provider.js'; import {ramDiskStorageKeyPrefixForTest, volatileStorageKeyPrefixForTest} from '../testing/handle-for-test.js'; import {Flags} from '../flags.js'; -import {DriverFactory} from '../storage/drivers/driver-factory.js'; import {DirectStorageEndpointManager} from '../storage/direct-storage-endpoint-manager.js'; // tslint:disable-next-line: no-any @@ -47,7 +46,7 @@ function assertManifestsEqual(actual: Manifest, expected: Manifest) { describe('Runtime', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('gets an arc description for an arc', async () => { @@ -64,29 +63,29 @@ describe('Runtime', () => { const actual = await Runtime.getArcDescription(arc); assert.strictEqual(expected, actual); }); - it('parses a Manifest', async () => { - const content = ` - schema Greeting - value: Text + // it('parses a Manifest', async () => { + // const content = ` + // schema Greeting + // value: Text - particle Hello in 'hello.js' - text: writes Greeting {value} + // particle Hello in 'hello.js' + // text: writes Greeting {value} - recipe - handleA: create * - Hello - text: writes handleA`; - const expected = await Manifest.parse(content); - const actual = await Runtime.parseManifest(content); - assertManifestsEqual(actual, expected); - }); - it('loads a Manifest', async () => { - const registry = {}; - const loader = new Loader(); - const expected = await Manifest.load('./src/runtime/tests/artifacts/test.manifest', loader, registry); - const actual = await Runtime.loadManifest('./src/runtime/tests/artifacts/test.manifest', loader, registry); - assertManifestsEqual(actual, expected); - }); + // recipe + // handleA: create * + // Hello + // text: writes handleA`; + // const expected = await Manifest.parse(content); + // const actual = await Runtime.parseManifest(content); + // assertManifestsEqual(actual, expected); + // }); + // it('loads a Manifest', async () => { + // const registry = {}; + // const loader = new Loader(); + // const expected = await Manifest.load('./src/runtime/tests/artifacts/test.manifest', loader, registry); + // const actual = await Runtime.loadManifest('./src/runtime/tests/artifacts/test.manifest', loader, registry); + // assertManifestsEqual(actual, expected); + // }); it('runs arcs', async () => { const runtime = new Runtime(); assert.equal(runtime.arcById.size, 0); @@ -99,9 +98,6 @@ describe('Runtime', () => { assert.hasAllKeys(runtime.arcById, ['test-arc', 'other-test-arc']); }); it('registers and unregisters stores', Flags.withDefaultReferenceMode(async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.parse(``, {memoryProvider}); const loader = new Loader(null, { manifest: ` schema Thing @@ -126,8 +122,8 @@ describe('Runtime', () => { `, '*': 'defineParticle(({Particle}) => class extends Particle {});', }); - const runtime = new Runtime({loader, context, memoryProvider}); - const manifest = await Manifest.load('manifest', loader, {memoryProvider}); + const runtime = new Runtime({loader}); + const manifest = await runtime.parseFile('manifest'); manifest.recipes[0].normalize(); const volatileArc = runtime.runArc('test-arc-1', volatileStorageKeyPrefixForTest()); const ramdiskArc = runtime.runArc('test-arc-2', ramDiskStorageKeyPrefixForTest()); diff --git a/src/runtime/tests/test-environment-test.ts b/src/runtime/tests/test-environment-test.ts index 1cc1dc80981..86ef8a3fb23 100644 --- a/src/runtime/tests/test-environment-test.ts +++ b/src/runtime/tests/test-environment-test.ts @@ -8,7 +8,7 @@ * http://polymer.github.io/PATENTS.txt */ import {registerSystemExceptionHandler, removeSystemExceptionHandler, defaultSystemExceptionHandler} from '../arc-exceptions.js'; -import {DriverFactory} from '../storage/drivers/driver-factory.js'; +import {Runtime} from '../runtime.js'; let exceptions: Error[] = []; @@ -24,5 +24,5 @@ afterEach(function() { // Error function not yet included in mocha typescript declarations... this.test['error'](exception); } - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); diff --git a/src/tests/arc-integration-test.ts b/src/tests/arc-integration-test.ts index 6223c8c65cb..6c161b2160c 100644 --- a/src/tests/arc-integration-test.ts +++ b/src/tests/arc-integration-test.ts @@ -15,25 +15,24 @@ import {RamDiskStorageDriverProvider} from '../runtime/storage/drivers/ramdisk.j import {Loader} from '../platform/loader.js'; import {TestVolatileMemoryProvider} from '../runtime/testing/test-volatile-memory-provider.js'; import {storageKeyPrefixForTest} from '../runtime/testing/handle-for-test.js'; -import {DriverFactory} from '../runtime/storage/drivers/driver-factory.js'; describe('Arc integration', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('copies store tags', async () => { const loader = new Loader(null, { - 'p.js': `defineParticle(({Particle}) => class P extends Particle { + './p.js': `defineParticle(({Particle}) => class P extends Particle { async setHandles(handles) { } });` }); - const memoryProvider = new TestVolatileMemoryProvider(); - const manifest = await Manifest.parse(` + const runtime = new Runtime({loader}); + const manifest = await runtime.parse(` schema Thing name: Text - particle P in 'p.js' + particle P in './p.js' thing: reads writes Thing recipe thingHandle: copy 'mything' @@ -45,9 +44,8 @@ describe('Arc integration', () => { {"name": "mything"} ] store ThingStore of Thing 'mything' #best in ThingResource - `, {memoryProvider}); - const runtime = new Runtime({loader, context: manifest, memoryProvider}); - RamDiskStorageDriverProvider.register(memoryProvider); + `); + runtime.context = manifest; const arc = runtime.newArc('demo', storageKeyPrefixForTest()); assert.lengthOf(arc.stores, 0); diff --git a/src/tests/particles/common-test.ts b/src/tests/particles/common-test.ts index 2fc41cc56db..39ed6931342 100644 --- a/src/tests/particles/common-test.ts +++ b/src/tests/particles/common-test.ts @@ -17,11 +17,10 @@ import {StrategyTestHelper} from '../../planning/testing/strategy-test-helper.js import {RamDiskStorageDriverProvider} from '../../runtime/storage/drivers/ramdisk.js'; import {storageKeyPrefixForTest} from '../../runtime/testing/handle-for-test.js'; import {ActiveCollectionEntityStore, handleForActiveStore} from '../../runtime/storage/storage.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; describe('common particles test', () => { afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); it('resolves after cloning', async () => { const memoryProvider = new TestVolatileMemoryProvider(); @@ -76,11 +75,8 @@ describe('common particles test', () => { it('copy handle test', async () => { - const loader = new Loader(); - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const context = await Manifest.load('./src/tests/particles/artifacts/copy-collection-test.recipes', loader, {memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + const runtime = new Runtime(); + runtime.context = await runtime.parseFile('./src/tests/particles/artifacts/copy-collection-test.recipes'); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); const suggestions = await StrategyTestHelper.planForArc(runtime, arc); diff --git a/src/tests/particles/dataflow-test.ts b/src/tests/particles/dataflow-test.ts index 9b20bb3e753..676bd3a7dd2 100644 --- a/src/tests/particles/dataflow-test.ts +++ b/src/tests/particles/dataflow-test.ts @@ -8,25 +8,18 @@ * http://polymer.github.io/PATENTS.txt */ import glob from 'glob'; -import {Loader} from '../../platform/loader.js'; -import {Manifest} from '../../runtime/manifest.js'; -import {analyseDataflow} from '../../dataflow/analysis/analysis.js'; import {assert} from '../../platform/chai-web.js'; -import {RamDiskStorageDriverProvider} from '../../runtime/storage/drivers/ramdisk.js'; -import {TestVolatileMemoryProvider} from '../../runtime/testing/test-volatile-memory-provider.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../runtime/runtime.js'; +import {analyseDataflow} from '../../dataflow/analysis/analysis.js'; // Checks that all of the Dataflow example recipes successfully pass dataflow // analysis. describe('Dataflow example recipes', () => { - const loader = new Loader(); + const runtime = new Runtime(); const filenames = glob.sync('particles/Dataflow/*.arcs'); - for (const filename of filenames) { it(`passes dataflow analysis: ${filename}`, async () => { - const memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - const manifest = await Manifest.load(filename, loader, {memoryProvider}); + const manifest = await runtime.parseFile(filename); for (const recipe of manifest.recipes) { recipe.normalize(); const [_graph, result] = analyseDataflow(recipe, manifest); @@ -34,5 +27,5 @@ describe('Dataflow example recipes', () => { } }); } - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); diff --git a/src/tests/particles/particles-test.ts b/src/tests/particles/particles-test.ts index 137966ffd2f..3c443e673d4 100644 --- a/src/tests/particles/particles-test.ts +++ b/src/tests/particles/particles-test.ts @@ -12,24 +12,19 @@ import {Manifest} from '../../runtime/manifest.js'; import glob from 'glob'; import {Loader} from '../../platform/loader.js'; import {assert} from '../../platform/chai-web.js'; -import {RamDiskStorageDriverProvider} from '../../runtime/storage/drivers/ramdisk.js'; -import {TestVolatileMemoryProvider} from '../../runtime/testing/test-volatile-memory-provider.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../runtime/runtime.js'; /** Tests that all .schema, .recipe(s) and .manifest files in the particles folder compile successfully. */ describe('Particle definitions', () => { - const loader = new Loader(); + const runtime = new Runtime(); const filenames = glob.sync('particles/**/*.arcs'); - let memoryProvider; beforeEach(() => { - DriverFactory.clearRegistrationsForTesting(); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); + Runtime.resetDrivers(); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); filenames @@ -39,7 +34,7 @@ describe('Particle definitions', () => { return; } it(`parses successfully: ${filename}`, async () => { - const manifest = await Manifest.load(filename, loader, {memoryProvider}); + const manifest = await runtime.parseFile(filename); for (const particle of manifest.particles) { if (particle.implFile == null) { // It's ok for some particles to not have implementation files (e.g. diff --git a/src/tests/recipe-descriptions-test.ts b/src/tests/recipe-descriptions-test.ts index 608b3a5a27a..d152d616fd5 100644 --- a/src/tests/recipe-descriptions-test.ts +++ b/src/tests/recipe-descriptions-test.ts @@ -25,7 +25,6 @@ describe('recipe descriptions test', () => { // Avoid initialising non-POD variables globally, since they would be constructed even when // these tests are not going to be executed (i.e. another test file uses 'only'). let loader; - let memoryProvider; beforeEach(() => { loader = new Loader(null, { 'test.js': `defineParticle(({Particle}) => { @@ -34,12 +33,10 @@ describe('recipe descriptions test', () => { } });` }); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); }); afterEach(() => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }); @@ -122,10 +119,11 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} } async function generateRecipeDescription(options) { - const context = await Manifest.parse( - options.manifestString || createManifestString(options), - {loader, memoryProvider, fileName: 'foo.js'}); - const runtime = new Runtime({loader, context, memoryProvider}); + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse( + options.manifestString || createManifestString(options), + {fileName: 'foo.js'} + ); const key = (id: ArcId) => new VolatileStorageKey(id, ''); const arc = runtime.newArc('demo', key); @@ -182,7 +180,8 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} }); it('fails generating recipe description with duplicate particles', async () => { - const context = await Manifest.parse(` + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse(` schema Foo particle ShowFoo in 'test.js' foo: writes Foo @@ -193,8 +192,7 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} ShowFoo foo: writes fooHandle description \`cannot show duplicate \${ShowFoo.foo}\` - `, {loader, fileName: '', memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + `, {fileName: ''}); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); await StrategyTestHelper.planForArc(runtime, arc).then(() => assert('expected exception for duplicate particles')) @@ -224,7 +222,8 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} }); it('generates recipe description with duplicate particles', async () => { - const context = await Manifest.parse(` + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse(` schema Foo particle ShowFoo in 'test.js' foo: writes Foo @@ -242,8 +241,7 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} foo: writes fooHandle Dummy description \`show \${ShowFoo.foo} with dummy\` - `, {loader, fileName: '', memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + `, {fileName: ''}); const key = (id: ArcId) => new VolatileStorageKey(id, ''); const arc = runtime.newArc('demo', key); // Plan for arc @@ -262,7 +260,8 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} }); it('joins recipe descriptions', async () => { - const context = await Manifest.parse(` + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse(` particle A in 'test.js' particle B in 'test.js' particle C in 'test.js' @@ -276,8 +275,7 @@ store BoxesStore of [Box] 'allboxes' in AllBoxes` : ''} recipe C description \`do C\` - `, {loader, fileName: '', memoryProvider}); - const runtime = new Runtime({loader, context, memoryProvider}); + `, {fileName: ''}); const key = (id: ArcId) => new VolatileStorageKey(id, ''); const arc = runtime.newArc('demo', key); diff --git a/src/tools/allocator-recipe-resolver.ts b/src/tools/allocator-recipe-resolver.ts index f5f3a2e5c9c..38098a6a2a9 100644 --- a/src/tools/allocator-recipe-resolver.ts +++ b/src/tools/allocator-recipe-resolver.ts @@ -20,7 +20,6 @@ import {DatabaseStorageKey} from '../runtime/storage/database-storage-key.js'; import {Handle} from '../runtime/recipe/lib-recipe.js'; import {digest} from '../platform/digest-web.js'; import {VolatileStorageKey} from '../runtime/storage/drivers/volatile.js'; -import {CRDTTypeRecord} from '../crdt/lib-crdt.js'; import {StoreInfo} from '../runtime/storage/store-info.js'; export class AllocatorRecipeResolverError extends Error { diff --git a/src/tools/tests/allocator-recipe-resolver-test.ts b/src/tools/tests/allocator-recipe-resolver-test.ts index 471138f4b72..a1276fa7995 100644 --- a/src/tools/tests/allocator-recipe-resolver-test.ts +++ b/src/tools/tests/allocator-recipe-resolver-test.ts @@ -18,7 +18,7 @@ import { } from '../allocator-recipe-resolver.js'; import {assertThrowsAsync} from '../../testing/test-util.js'; import {Flags} from '../../runtime/flags.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../runtime/runtime.js'; import {VolatileStorageKey} from '../../runtime/storage/drivers/volatile.js'; import {PersistentDatabaseStorageKey} from '../../runtime/storage/database-storage-key.js'; import {CreatableStorageKey} from '../../runtime/storage/creatable-storage-key.js'; @@ -27,7 +27,7 @@ import {TestVolatileMemoryProvider} from '../../runtime/testing/test-volatile-me const randomSalt = 'random_salt'; describe('allocator recipe resolver', () => { - afterEach(() => DriverFactory.clearRegistrationsForTesting()); + afterEach(() => Runtime.resetDrivers()); it('detects long running arc', async () => { const manifest = (await Manifest.parse(` recipe Zero @@ -370,7 +370,6 @@ describe('allocator recipe resolver', () => { ); }); it('fails to resolve when user maps to a volatile create handle', Flags.withDefaultReferenceMode(async () => { - VolatileStorageKey.register(); const manifest = await Manifest.parse(`\ particle Reader data: reads Thing {name: Text} @@ -645,7 +644,7 @@ describe('allocator recipe resolver', () => { }); }); describe('allocator recipe resolver - ingress restricting', () => { - afterEach(() => DriverFactory.clearRegistrationsForTesting()); + afterEach(() => Runtime.resetDrivers()); const particleSpec = ` particle Writer thing: writes Thing {a: Text, b: Text, c: Text, d: Text, e: Text} @@ -741,7 +740,7 @@ particle ReaderB const recipes = await resolver.resolve(); const writingRecipe = recipes.find(recipe => recipe.name === 'WritingRecipe'); assert.equal(writingRecipe.handles[0].type.resolvedType().toString(), expectedSchema); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); }; it('restricts writer fields by one writer-reader recipe', async () => { diff --git a/src/tools/tests/codegen-unit-test-base.ts b/src/tools/tests/codegen-unit-test-base.ts index 38164c35bfa..7fe53675fc0 100644 --- a/src/tools/tests/codegen-unit-test-base.ts +++ b/src/tools/tests/codegen-unit-test-base.ts @@ -11,7 +11,7 @@ import fs from 'fs'; import {Manifest} from '../../runtime/manifest.js'; import {Flags} from '../../runtime/flags.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; +import {Runtime} from '../../runtime/runtime.js'; type Test = { name: string; @@ -95,7 +95,7 @@ export abstract class ManifestCodegenUnitTest extends CodegenUnitTest { */ export async function runCompute(testCase: CodegenUnitTest, test: Test): Promise { Flags.reset(); - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); const result = await testCase.compute(test.input, test.options, test); return Array.isArray(result) ? result : [result]; } diff --git a/src/tools/tests/manifest2proto-test.ts b/src/tools/tests/manifest2proto-test.ts index acae3e23d79..fdb1923f5b3 100644 --- a/src/tools/tests/manifest2proto-test.ts +++ b/src/tools/tests/manifest2proto-test.ts @@ -1518,6 +1518,7 @@ describe('manifest2proto', () => { // This ensures that at least all the constructs used in the .arcs file can be serialized in TS // and deserialized in Kotlin to the extent that they are present in the .textproto file. it('encodes the Manifest2ProtoTest manifest', async () => { + const runtime = new Runtime(); assert.deepStrictEqual( await encodeManifestToProto(new Runtime(), 'java/arcs/core/data/testdata/Manifest2ProtoTest.arcs'), fs.readFileSync('java/arcs/core/data/testdata/Manifest2ProtoTest.binarypb'), diff --git a/src/tools/tests/recipe2plan-test.ts b/src/tools/tests/recipe2plan-test.ts index 2842c0a0c38..7db0df1db90 100644 --- a/src/tools/tests/recipe2plan-test.ts +++ b/src/tools/tests/recipe2plan-test.ts @@ -14,12 +14,13 @@ import {Flags} from '../../runtime/flags.js'; import {ManifestProto} from '../manifest-proto.js'; import {Runtime} from '../../runtime/runtime.js'; import {Manifest} from '../../runtime/manifest.js'; -import {DriverFactory} from '../../runtime/storage/drivers/driver-factory.js'; import {assertThrowsAsync} from '../../testing/test-util.js'; const inputManifestPath = 'java/arcs/core/data/testdata/WriterReaderExample.arcs'; const policiesManifestPath = 'java/arcs/core/data/testdata/WriterReaderPoliciesExample.arcs'; -const readManifest = async (manifestPath) => Runtime.parseFile(manifestPath); + +const runtime = Runtime.init('../..'); +const readManifest = async (manifestPath) => runtime.parseFile(manifestPath); describe('recipe2plan', () => { it('generates Kotlin plans from recipes in a manifest', Flags.withDefaultReferenceMode(async () => { @@ -460,7 +461,7 @@ policy PolicyBarBr2Br3 { const assertSuccess = async (recipeStr) => verifyRecipeIngress(recipeStr, true); const assertFailure = async (recipeStr) => verifyRecipeIngress(recipeStr, false); const verifyRecipeIngress = async (recipeStr: string, expectedSuccess: boolean) => { - DriverFactory.clearRegistrationsForTesting(); + Runtime.resetDrivers(); const recipesManifest = await Manifest.parse(` ${manifestMetaAndParticleSpecs} ${recipeStr} From 16a0961c8b1eca7efbc675f942771d03684f5391 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 19 Nov 2020 19:42:17 -0800 Subject: [PATCH 09/31] cleanups --- .../pipes-shell/node/deploy/source/paths.js | 2 +- shells/pipes-shell/node/paths.js | 2 +- shells/pipes-shell/source/pipe.js | 11 ++++--- .../surface/deploy/source/paths.js | 2 +- shells/pipes-shell/web/deploy/source/paths.js | 2 +- shells/pipes-shell/web/paths.js | 4 +-- shells/tools/broker-shell/index.js | 2 +- shells/tools/diagnostic/user-context/index.js | 4 +-- shells/tools/planner-shell/planner-shell.js | 4 +-- shells/tools/single-shell/index.html | 2 +- shells/tools/smoke-shell/smoke.html | 2 +- shells/tools/smoke-shell/smoke.js | 2 +- shells/web-shell/elements/web-shell.js | 4 +-- src/dataflow/cli/flowcheck.ts | 2 +- src/planning/planner.ts | 30 +++++++++++-------- src/platform/pec-industry-web.ts | 2 +- src/runtime/env.ts | 2 -- src/runtime/runtime.ts | 30 ++++++++++++------- src/tools/paths.oss.ts | 2 +- src/tools/recipe2plan-cli.ts | 2 +- src/tools/schema2base.ts | 2 +- src/tools/tests/recipe2plan-test.ts | 2 +- 22 files changed, 64 insertions(+), 53 deletions(-) diff --git a/shells/pipes-shell/node/deploy/source/paths.js b/shells/pipes-shell/node/deploy/source/paths.js index a234394d3c0..79b2c90383e 100644 --- a/shells/pipes-shell/node/deploy/source/paths.js +++ b/shells/pipes-shell/node/deploy/source/paths.js @@ -10,7 +10,7 @@ export const paths = { root: '.', map: { - 'https://$build/': `./` + 'https://$worker/': `./` } }; diff --git a/shells/pipes-shell/node/paths.js b/shells/pipes-shell/node/paths.js index 7b431ffefa3..89dc8c45512 100644 --- a/shells/pipes-shell/node/paths.js +++ b/shells/pipes-shell/node/paths.js @@ -12,7 +12,7 @@ export const paths = { map: { 'https://$arcs/': `../../../`, 'https://$particles/': `../../../particles/`, - 'https://$build/': `../../lib/worker/dist/` + 'https://$worker/': `../../lib/worker/dist/` } }; diff --git a/shells/pipes-shell/source/pipe.js b/shells/pipes-shell/source/pipe.js index 4767ef365f6..dc3267f20f6 100644 --- a/shells/pipes-shell/source/pipe.js +++ b/shells/pipes-shell/source/pipe.js @@ -44,14 +44,13 @@ export const busReady = async (bus, {manifest}) => { const configureRuntime = async ({rootPath, urlMap, storage, manifest}, bus) => { // configure arcs runtime environment - runtime = Runtime.init(rootPath, urlMap); - // marshal and bind context - const context = await requireContext(manifest); - runtime.bindContext(context); + runtime = new Runtime({rootPath, urlMap}); + // marshal context + runtime.context = await requireContext(manifest); // attach verb-handlers to dispatcher - populateDispatcher(dispatcher, storage, context); + populateDispatcher(dispatcher, storage, runtime.context); // send pipe identifiers to client - contextReady(bus, context); + contextReady(bus, runtime.context); }; const requireContext = async manifest => { diff --git a/shells/pipes-shell/surface/deploy/source/paths.js b/shells/pipes-shell/surface/deploy/source/paths.js index fd11411b6ae..1ddd19ea4b7 100644 --- a/shells/pipes-shell/surface/deploy/source/paths.js +++ b/shells/pipes-shell/surface/deploy/source/paths.js @@ -10,6 +10,6 @@ export const paths = { root: '.', map: { - 'https://$build/': `./` + 'https://$worker/': `./` } }; diff --git a/shells/pipes-shell/web/deploy/source/paths.js b/shells/pipes-shell/web/deploy/source/paths.js index fd11411b6ae..1ddd19ea4b7 100644 --- a/shells/pipes-shell/web/deploy/source/paths.js +++ b/shells/pipes-shell/web/deploy/source/paths.js @@ -10,6 +10,6 @@ export const paths = { root: '.', map: { - 'https://$build/': `./` + 'https://$worker/': `./` } }; diff --git a/shells/pipes-shell/web/paths.js b/shells/pipes-shell/web/paths.js index 4a6dfeff588..1fd296e6e4c 100644 --- a/shells/pipes-shell/web/paths.js +++ b/shells/pipes-shell/web/paths.js @@ -8,11 +8,11 @@ * http://polymer.github.io/PATENTS.txt */ export const paths = { - root: '.', + //root: '.', map: { 'https://$arcs/': `../../../`, 'https://$shells/': `../../`, - 'https://$build/': `../../lib/worker/dist/`, + 'https://$worker/': `../../lib/worker/dist/`, 'https://$particles/': { root: `../../../`, path: `/particles/`, diff --git a/shells/tools/broker-shell/index.js b/shells/tools/broker-shell/index.js index c27b1ea8875..d2cfd4ebe2e 100644 --- a/shells/tools/broker-shell/index.js +++ b/shells/tools/broker-shell/index.js @@ -15,7 +15,7 @@ const config = { urlMap: { 'https://$arcs/': `../../../`, 'https://$particles/': `../../../particles/`, - 'https://$build/': `../../lib/build/` + 'https://$worker/': `../../lib/build/` }, storage: 'volatile://', manifest: ` diff --git a/shells/tools/diagnostic/user-context/index.js b/shells/tools/diagnostic/user-context/index.js index 29ab1921ba2..6eb4a64d05d 100644 --- a/shells/tools/diagnostic/user-context/index.js +++ b/shells/tools/diagnostic/user-context/index.js @@ -19,7 +19,7 @@ import '../../../modalities/dom/components/arc-tools/store-explorer.js'; const storage = `firebase://arcs-storage.firebaseio.com/AIzaSyBme42moeI-2k8WgXh-6YK_wYyjEXo4Oz8/0_7_0/sjmiles`; // configure arcs environment -Runtime.init('../../../'); +const runtime = new Runtime({rootPath: '../../..'}); let context; let UserObserverImpl; @@ -27,7 +27,7 @@ let UserObserverImpl; const observe = async () => { // prepare context if (!context) { - context = await Runtime.parse(''); + context = await runtime.parse(''); // const ArcHandleListenerImpl = ArcHandleDisplayMixin(ArcHandleListener); // diff --git a/shells/tools/planner-shell/planner-shell.js b/shells/tools/planner-shell/planner-shell.js index acfd4de32fc..6d739f85ef9 100644 --- a/shells/tools/planner-shell/planner-shell.js +++ b/shells/tools/planner-shell/planner-shell.js @@ -46,11 +46,11 @@ export class PlannerShellInterface { // connect to DevTools if running with --explore await maybeConnectToDevTools(); // create an arcs environment - Runtime.init(assetsPath); + const runtime = new Runtime({rootPath: assetsPath}); // observe user's arc list const userArcs = new UserArcs(storage, userid); // base context (particles & recipes) from static manifest - const context = await Runtime.parse(contextManifest); + const context = await runtime.parse(contextManifest); // userContext continually updates context based on user's arcs const userContext = new UserContext(); // wait for context to spin up diff --git a/shells/tools/single-shell/index.html b/shells/tools/single-shell/index.html index 36d31df145c..b6b5a6e8d70 100644 --- a/shells/tools/single-shell/index.html +++ b/shells/tools/single-shell/index.html @@ -42,7 +42,7 @@ const getUrlParam = name => { return new URL(document.location.href).searchParams.get(name); } - const runtime = Runtime.init('../..'); + const runtime = new Runtime({rootPath: '../..'}); const composer = new DomSlotComposer({ rootContainer: document.body diff --git a/shells/tools/smoke-shell/smoke.html b/shells/tools/smoke-shell/smoke.html index 7e9c94a424d..93ed0f706a1 100644 --- a/shells/tools/smoke-shell/smoke.html +++ b/shells/tools/smoke-shell/smoke.html @@ -24,7 +24,7 @@ // read intent const manifest = getUrlParam('manifest') || getUrlParam('m') || `Arcs/Login.recipe`; // configure arcs environment - Runtime.init('../..'); + const runtime = new Runtime({rootPath: '../..'}); // construct renderer const composer = new SlotComposer(); // establish surface diff --git a/shells/tools/smoke-shell/smoke.js b/shells/tools/smoke-shell/smoke.js index 187767c7efc..3f7cc127702 100644 --- a/shells/tools/smoke-shell/smoke.js +++ b/shells/tools/smoke-shell/smoke.js @@ -18,7 +18,7 @@ console.log('\n--- Arcs Shell ---\n'); (async () => { try { // configure arcs environment - Runtime.init('../..'); + const runtime = new Runtime({rootPath: '../..'}); // create a composer const composer = new SlotComposer(); await App(composer, `Arcs/Login.recipe`); diff --git a/shells/web-shell/elements/web-shell.js b/shells/web-shell/elements/web-shell.js index f9f0aa5147c..9e8de1442e9 100644 --- a/shells/web-shell/elements/web-shell.js +++ b/shells/web-shell/elements/web-shell.js @@ -171,11 +171,11 @@ export class WebShell extends Xen.Debug(Xen.Async, log) { }; return [props, state, renderModel]; } - async configureEnv(root) { + async configureEnv(rootPath) { // capture anchor-clicks for SPA behavior linkJack(document, anchor => this.routeLink(anchor)); // configure arcs environment - return Runtime.init(root); + const runtime = new Runtime({rootPath}); } routeLink(anchor) { const url = new URL(anchor.href, document.location); diff --git a/src/dataflow/cli/flowcheck.ts b/src/dataflow/cli/flowcheck.ts index eb6c2a4563b..36cb12ebe6f 100644 --- a/src/dataflow/cli/flowcheck.ts +++ b/src/dataflow/cli/flowcheck.ts @@ -15,7 +15,7 @@ import {Runtime} from '../../runtime/runtime.js'; // TODO make this a function and test it; it's big enough now (async () => { - const runtime = Runtime.init('../../..'); + const runtime = new Runtime({rootPath: '../../..'}); const filenames = process.argv.slice(2); if (filenames.length === 0) { console.error('Usage: flowcheck '); diff --git a/src/planning/planner.ts b/src/planning/planner.ts index a54191af784..94c58a56ddc 100644 --- a/src/planning/planner.ts +++ b/src/planning/planner.ts @@ -61,8 +61,7 @@ export interface Generation { // TODO: suggestionByHash is runtime dependent, but is used in static methods, forcing the global. // Instead this function and its static dependents should be methods. -let staticRuntime = new Runtime(); -const suggestionByHash = () => staticRuntime.getCacheService().getOrCreateCache('suggestionByHash'); +let lastRuntime: Runtime; export interface PlannerInitOptions { strategies?: StrategyDerived[]; @@ -92,9 +91,9 @@ export class Planner implements InspectablePlanner { this.speculator = speculator; this.inspector = inspectorFactory ? inspectorFactory.create(this) : null; this.noSpecEx = noSpecEx; - // TODO(sjmiles): remove static methods that depend on runtime + // TODO(sjmiles): remove static method `clearCache` that actually depends on runtime if (runtime) { - staticRuntime = runtime; + lastRuntime = runtime; } } @@ -104,8 +103,19 @@ export class Planner implements InspectablePlanner { return new Strategizer(strategyImpls, [], ruleset); } - suggestionByHash() { - return this.runtime.getCacheService().getOrCreateCache('suggestionByHash'); + // TODO(sjmiles): problematic as caches are now per-runtime. Stopgap: use the last runtime any Planner has seen. + static clearCache() { + if (lastRuntime) { + Planner.getRuntimeCache(lastRuntime).clear(); + } + } + + getCache() { + return Planner.getRuntimeCache(this.runtime); + } + + static getRuntimeCache(runtime: Runtime) { + return runtime.getCacheService().getOrCreateCache('suggestionByHash'); } // Specify a timeout value less than zero to disable timeouts. @@ -265,12 +275,8 @@ export class Planner implements InspectablePlanner { console.log(JSON.stringify(dump, null, ' ')); } - static clearCache() { - suggestionByHash().clear(); - } - private async retrieveOrCreateSuggestion(hash: string, plan: Recipe, arc: Arc) : Promise { - const cachedSuggestion = suggestionByHash().get(hash); + const cachedSuggestion = this.getCache().get(hash); if (cachedSuggestion && cachedSuggestion.isUpToDate(arc, plan)) { return cachedSuggestion; } @@ -294,7 +300,7 @@ export class Planner implements InspectablePlanner { } const suggestion = Suggestion.create(plan, hash, relevance); suggestion.setDescription(description, this.arc.modality); - suggestionByHash().set(hash, suggestion); + this.getCache().set(hash, suggestion); return suggestion; } diff --git a/src/platform/pec-industry-web.ts b/src/platform/pec-industry-web.ts index 8cfc38569c2..27889d5f34b 100644 --- a/src/platform/pec-industry-web.ts +++ b/src/platform/pec-industry-web.ts @@ -12,7 +12,7 @@ import {PecFactory} from '../runtime/particle-execution-context.js'; import {Id, IdGenerator} from '../runtime/id.js'; import {workerPool} from '../runtime/worker-pool.js'; -const WORKER_PATH = `https://$build/worker.js`; +const WORKER_PATH = `https://$worker/worker.js`; export const pecIndustry = (loader): PecFactory => { // worker paths are relative to worker location, remap urls from there to here diff --git a/src/runtime/env.ts b/src/runtime/env.ts index 2e31ef97f0c..5f8a68d2c94 100644 --- a/src/runtime/env.ts +++ b/src/runtime/env.ts @@ -24,8 +24,6 @@ export class Env { return { // important: path to `worker.js` 'https://$worker/': `${root}/shells/lib/worker/dist/`, - // TODO(sjmiles): for backward compat - 'https://$build/': `${root}/shells/lib/worker/dist/`, // these are optional (?) 'https://$arcs/': `${root}/`, 'https://$shells': `${root}/shells`, diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index 2b9f8ff521f..bc87a7e2699 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -40,11 +40,13 @@ const {warn} = logsFactory('Runtime', 'orange'); export type RuntimeOptions = Readonly<{ loader?: Loader; - composerClass?: typeof SlotComposer; - context?: Manifest; pecFactory?: PecFactory; memoryProvider?: VolatileMemoryProvider; storageManager?: StorageEndpointManager, + composerClass?: typeof SlotComposer; + context?: Manifest; + rootPath?: string, + urlMap?: {} }>; export type RuntimeArcOptions = Readonly<{ @@ -70,6 +72,8 @@ const initDrivers = () => { initDrivers(); +const nob = Object.create(null); + @SystemTrace export class Runtime { public context: Manifest; @@ -119,8 +123,6 @@ export class Runtime { return { // important: path to `worker.js` 'https://$worker/': `${root}/shells/lib/worker/dist/`, - // TODO(sjmiles): for backward compat - 'https://$build/': `${root}/shells/lib/worker/dist/`, // these are optional (?) 'https://$arcs/': `${root}/`, 'https://$shells': `${root}/shells`, @@ -133,14 +135,17 @@ export class Runtime { }; } - constructor({loader, composerClass, context, pecFactory, memoryProvider, storageManager}: RuntimeOptions = {}) { + constructor(opts: RuntimeOptions = {}) { + const rootMap = opts.rootPath && Runtime.mapFromRootPath(opts.rootPath) || nob; + const urlMap = opts.urlMap || nob; + const map = {...rootMap, ...urlMap}; + this.loader = opts.loader || new Loader(map); + this.pecFactory = opts.pecFactory || pecIndustry(this.loader); + this.composerClass = opts.composerClass || SlotComposer; this.cacheService = new RuntimeCacheService(); - this.loader = loader || new Loader(); - this.pecFactory = pecFactory || pecIndustry(loader); - this.composerClass = composerClass || SlotComposer; - this.context = context || new Manifest({id: 'manifest:default'}); - this.memoryProvider = memoryProvider || staticMemoryProvider; // || new SimpleVolatileMemoryProvider(); - this.storageManager = storageManager || new DirectStorageEndpointManager(); + this.memoryProvider = opts.memoryProvider || staticMemoryProvider; + this.storageManager = opts.storageManager || new DirectStorageEndpointManager(); + this.context = opts.context || new Manifest({id: 'manifest:default'}); // user information. One persona per runtime for now. } @@ -260,6 +265,9 @@ export class Runtime { return this.parse(content, {id: path, fileName: path, ...options}); } + // TODO(sjmiles): static methods represent boilerplate. + // There's no essential reason they are part of Runtime. + static async resolveRecipe(arc: Arc, recipe: Recipe): Promise { if (this.normalize(recipe)) { if (recipe.isResolved()) { diff --git a/src/tools/paths.oss.ts b/src/tools/paths.oss.ts index 8b85c9ea191..18a9ed2c78e 100644 --- a/src/tools/paths.oss.ts +++ b/src/tools/paths.oss.ts @@ -7,7 +7,7 @@ * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ -// Path mappings to add to `Runtime.init(.)` +// Path mappings to add to `new Runtime({urlMap})` // // This constant is overridden in Google internal repo to allow absolute imports // in manifest files. diff --git a/src/tools/recipe2plan-cli.ts b/src/tools/recipe2plan-cli.ts index f65a452139e..24a8d467a7d 100644 --- a/src/tools/recipe2plan-cli.ts +++ b/src/tools/recipe2plan-cli.ts @@ -70,7 +70,7 @@ const outFormat = (() => { void Flags.withDefaultReferenceMode(async () => { try { - const runtime = Runtime.init('../..', PATHS); + const runtime = new Runtime({rootPath: '../..', urlMap: PATHS}); fs.mkdirSync(opts.outdir, {recursive: true}); const manifest = await runtime.parseFile(opts._[0]); diff --git a/src/tools/schema2base.ts b/src/tools/schema2base.ts index 5b6334a97c2..221296ca016 100755 --- a/src/tools/schema2base.ts +++ b/src/tools/schema2base.ts @@ -16,7 +16,7 @@ import {SchemaGraph, SchemaNode} from './schema2graph.js'; import {ParticleSpec} from '../runtime/arcs-types/particle-spec.js'; import {PATHS} from './paths.oss.js'; -const runtime = Runtime.init('../..', PATHS); +const runtime = new Runtime({rootPath: '../..', urlMap: PATHS}); export interface EntityGenerator { generate(): string; diff --git a/src/tools/tests/recipe2plan-test.ts b/src/tools/tests/recipe2plan-test.ts index 7db0df1db90..75a8ab9846f 100644 --- a/src/tools/tests/recipe2plan-test.ts +++ b/src/tools/tests/recipe2plan-test.ts @@ -19,7 +19,7 @@ import {assertThrowsAsync} from '../../testing/test-util.js'; const inputManifestPath = 'java/arcs/core/data/testdata/WriterReaderExample.arcs'; const policiesManifestPath = 'java/arcs/core/data/testdata/WriterReaderPoliciesExample.arcs'; -const runtime = Runtime.init('../..'); +const runtime = new Runtime({rootPath: '../..'}); const readManifest = async (manifestPath) => runtime.parseFile(manifestPath); describe('recipe2plan', () => { From ab675d4711bbe27706a508fad0e9ee52866363c4 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 19 Nov 2020 22:05:56 -0800 Subject: [PATCH 10/31] cleanups --- shells/dev-shell/index.js | 9 +- .../ts/runtime/hotreload-integration-test.ts | 8 +- src/runtime/runtime.ts | 105 ++++++++++-------- 3 files changed, 63 insertions(+), 59 deletions(-) diff --git a/shells/dev-shell/index.js b/shells/dev-shell/index.js index 00fd8125480..ed6f97a38ca 100644 --- a/shells/dev-shell/index.js +++ b/shells/dev-shell/index.js @@ -99,7 +99,8 @@ async function wrappedExecute() { document.dispatchEvent(new Event('clear-arcs-explorer')); outputPane.reset(); // establish a runtime using custom parameters - const runtime = await createRuntime(); + const runtime = new Runtime({rootPath: root, staticMap: filePane.getFileMap()}); + runtime.loader.flushCaches(); // attempt to parse the context manifest try { runtime.context = await runtime.parse(filePane.getManifest(), {fileName: './manifest', throwImportErrors: true}); @@ -118,12 +119,6 @@ async function wrappedExecute() { } } -async function createRuntime(context) { - const runtime = Runtime.create({root, staticMap: filePane.getFileMap(), context}); - runtime.loader.flushCaches(); - return runtime; -} - async function createRecipeArc(recipe, runtime, index) { // ask runtime to assemble arc parameter boilerplate (argument is the arc name) const params = runtime.buildArcParams(`arc${index}`); diff --git a/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts b/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts index 1869c272be5..7a9ed4cf0ce 100644 --- a/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts +++ b/shells/tests/arcs/ts/runtime/hotreload-integration-test.ts @@ -17,7 +17,7 @@ import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('Hot Code Reload for JS Particle', async () => { it('updates model and template', async () =>{ const context = await Manifest.parse(` - particle A in 'A.js' + particle A in './A.js' root: consumes Slot recipe @@ -25,7 +25,7 @@ describe('Hot Code Reload for JS Particle', async () => { A root: consumes slot0`); const loader = new Loader(null, { - 'A.js': `defineParticle(({UiParticle}) => { + './A.js': `defineParticle(({UiParticle}) => { return class extends UiParticle { get template() { return 'Hello {{name}}, old age: {{age}}'; } @@ -35,8 +35,8 @@ describe('Hot Code Reload for JS Particle', async () => { }; });` }); - const runtime = new Runtime({loader, context}); + const runtime = new Runtime({loader, context}); const arc = runtime.newArc('HotReload'); const [recipe] = arc.context.recipes; @@ -49,7 +49,7 @@ describe('Hot Code Reload for JS Particle', async () => { //assert.deepStrictEqual(slotConsumer.getRendering().model, {name: 'Jack', age: '10'}); //assert.deepStrictEqual(slotConsumer._content.template, `Hello {{name}}, old age: {{age}}`); - loader.staticMap['A.js'] = `defineParticle(({UiParticle}) => { + loader.staticMap['./A.js'] = `defineParticle(({UiParticle}) => { return class extends UiParticle { get template() { return 'Hello {{name}}, new age: {{age}}'; } diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index bc87a7e2699..e7ad4fe72cd 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -46,7 +46,8 @@ export type RuntimeOptions = Readonly<{ composerClass?: typeof SlotComposer; context?: Manifest; rootPath?: string, - urlMap?: {} + urlMap?: {}, + staticMap?: {} }>; export type RuntimeArcOptions = Readonly<{ @@ -61,9 +62,8 @@ export type RuntimeArcOptions = Readonly<{ modality?: Modality; }>; -let staticMemoryProvider; - // TODO(sjmiles): weird layering here due to dancing around global state +let staticMemoryProvider; const initDrivers = () => { VolatileStorageKey.register(); staticMemoryProvider = new SimpleVolatileMemoryProvider(); @@ -76,14 +76,9 @@ const nob = Object.create(null); @SystemTrace export class Runtime { - public context: Manifest; - public readonly pecFactory: PecFactory; - public readonly loader: Loader | null; - private cacheService: RuntimeCacheService; - private composerClass: typeof SlotComposer | null; - private memoryProvider: VolatileMemoryProvider; - readonly storageManager: StorageEndpointManager; - readonly arcById = new Map(); + // TODO(sjmiles): static methods represent boilerplate. + // There's no essential reason they are part of Runtime. + // Consider! static resetDrivers(noDefault?: true) { DriverFactory.providers = new Set(); @@ -135,11 +130,59 @@ export class Runtime { }; } + /** + * Given an arc, returns it's description as a string. + */ + static async getArcDescription(arc: Arc) : Promise { + // Verify that it's one of my arcs, and make this non-static, once I have + // Runtime objects in the calling code. + return (await Description.create(arc)).getArcDescription(); + } + + static async resolveRecipe(arc: Arc, recipe: Recipe): Promise { + if (this.normalize(recipe)) { + if (recipe.isResolved()) { + return recipe; + } + const resolver = new RecipeResolver(arc); + const plan = await resolver.resolve(recipe); + if (plan && plan.isResolved()) { + return plan; + } + warn('failed to resolve:\n', (plan || recipe).toString({showUnresolved: true})); + } + return null; + } + + static normalize(recipe: Recipe): boolean { + if (Runtime.isNormalized(recipe)) { + return true; + } + const errors = new Map(); + if (recipe.normalize({errors})) { + return true; + } + warn('failed to normalize:\n', errors, recipe.toString()); + return false; + } + + static isNormalized(recipe: Recipe): boolean { + return Object.isFrozen(recipe); + } + + public context: Manifest; + public readonly pecFactory: PecFactory; + public readonly loader: Loader | null; + private cacheService: RuntimeCacheService; + private composerClass: typeof SlotComposer | null; + private memoryProvider: VolatileMemoryProvider; + readonly storageManager: StorageEndpointManager; + readonly arcById = new Map(); + constructor(opts: RuntimeOptions = {}) { + const customMap = opts.urlMap || nob; const rootMap = opts.rootPath && Runtime.mapFromRootPath(opts.rootPath) || nob; - const urlMap = opts.urlMap || nob; - const map = {...rootMap, ...urlMap}; - this.loader = opts.loader || new Loader(map); + this.loader = opts.loader || new Loader({...rootMap, ...customMap}, opts.staticMap); this.pecFactory = opts.pecFactory || pecIndustry(this.loader); this.composerClass = opts.composerClass || SlotComposer; this.cacheService = new RuntimeCacheService(); @@ -264,38 +307,4 @@ export class Runtime { const content = await this.loader.loadResource(path); return this.parse(content, {id: path, fileName: path, ...options}); } - - // TODO(sjmiles): static methods represent boilerplate. - // There's no essential reason they are part of Runtime. - - static async resolveRecipe(arc: Arc, recipe: Recipe): Promise { - if (this.normalize(recipe)) { - if (recipe.isResolved()) { - return recipe; - } - const resolver = new RecipeResolver(arc); - const plan = await resolver.resolve(recipe); - if (plan && plan.isResolved()) { - return plan; - } - warn('failed to resolve:\n', (plan || recipe).toString({showUnresolved: true})); - } - return null; - } - - static normalize(recipe: Recipe): boolean { - if (Runtime.isNormalized(recipe)) { - return true; - } - const errors = new Map(); - if (recipe.normalize({errors})) { - return true; - } - warn('failed to normalize:\n', errors, recipe.toString()); - return false; - } - - static isNormalized(recipe: Recipe): boolean { - return Object.isFrozen(recipe); - } } From 328ac92ae52265da52968c7bb86b5efea64bd6e5 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Wed, 25 Nov 2020 23:31:20 -0800 Subject: [PATCH 11/31] serious degloballing --- shells/tests/arcs/ts/runtime/arc-test.ts | 61 ++++---- .../runtime/multiplexer-integration-test.ts | 2 - .../arcs/ts/runtime/plan-consumer-test.ts | 8 -- shells/tests/arcs/ts/runtime/products-test.ts | 5 - .../arcs/ts/runtime/slot-composer-test.ts | 5 - .../ts/runtime/transformation-slots-test.ts | 4 - src/planning/plan/tests/plan-consumer-test.ts | 12 -- src/planning/plan/tests/plan-producer-test.ts | 7 - src/planning/plan/tests/planificator-test.ts | 25 ++-- .../plan/tests/planning-result-test.ts | 19 +-- .../plan/tests/test-environment-test.ts | 1 - src/planning/recipe-index.ts | 3 +- .../strategies/tests/coalesce-recipes-test.ts | 7 - .../tests/search-tokens-to-handles-test.ts | 4 - src/planning/tests/planner-test.ts | 7 - src/runtime/arc.ts | 58 +++++--- src/runtime/capabilities-resolver.ts | 58 ++++---- src/runtime/manifest.ts | 3 +- src/runtime/particle-execution-context.ts | 13 +- src/runtime/recipe/tests/recipe-test.ts | 4 - src/runtime/runtime.ts | 91 ++++++------ src/runtime/storage/database-storage-key.ts | 7 +- .../direct-storage-endpoint-manager.ts | 8 +- src/runtime/storage/direct-store-muxer.ts | 3 +- src/runtime/storage/drivers/driver-factory.ts | 39 ++--- src/runtime/storage/drivers/firebase.ts | 18 ++- src/runtime/storage/drivers/ramdisk.ts | 13 +- src/runtime/storage/drivers/volatile.ts | 20 +-- src/runtime/storage/storage-key-parser.ts | 37 +++-- src/runtime/storage/testing/mock-firebase.ts | 15 +- .../storage/tests/active-store-test.ts | 41 +++--- .../storage/tests/direct-store-muxer-test.ts | 4 +- .../tests/firebase-store-integration-test.ts | 17 +-- ...isk-direct-store-muxer-integration-test.ts | 4 - .../tests/ramdisk-store-integration-test.ts | 4 - .../reference-mode-store-integration-test.ts | 14 -- .../tests/reference-mode-store-test.ts | 32 ++--- src/runtime/storage/tests/storage-key-test.ts | 6 +- .../storage/tests/store-sequence-test.ts | 19 +-- src/runtime/tests/arc-test.ts | 136 +++++++++--------- .../tests/capabilities-resolver-test.ts | 34 ++--- src/runtime/tests/description-test.ts | 7 +- src/runtime/tests/manifest-test.ts | 21 +-- src/runtime/tests/particle-api-test.ts | 2 +- .../tests/particle-interface-loading-test.ts | 15 +- src/runtime/tests/runtime-test.ts | 10 +- src/runtime/tests/test-environment-test.ts | 1 - src/tests/arc-integration-test.ts | 4 - src/tests/particles/common-test.ts | 3 - src/tests/particles/dataflow-test.ts | 1 - src/tests/particles/particles-test.ts | 8 -- src/tests/recipe-descriptions-test.ts | 9 -- src/tools/allocator-recipe-resolver.ts | 8 +- .../tests/allocator-recipe-resolver-test.ts | 3 - src/tools/tests/codegen-unit-test-base.ts | 1 - src/tools/tests/recipe2plan-test.ts | 1 - src/wasm/tests/wasm-api-test.ts | 117 ++++++++------- 57 files changed, 475 insertions(+), 604 deletions(-) diff --git a/shells/tests/arcs/ts/runtime/arc-test.ts b/shells/tests/arcs/ts/runtime/arc-test.ts index 22693ffc721..32b4e00250c 100644 --- a/shells/tests/arcs/ts/runtime/arc-test.ts +++ b/shells/tests/arcs/ts/runtime/arc-test.ts @@ -24,13 +24,9 @@ import {handleForStoreInfo, CollectionEntityType} from '../../../../../build/run import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('Arc', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('deserializing a serialized arc with a Transformation produces that arc', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + runtime.context = await runtime.parse(` import 'src/runtime/tests/artifacts/Common/Multiplexer.manifest' import 'src/runtime/tests/artifacts/test-particles.manifest' @@ -42,17 +38,15 @@ describe('Arc', () => { annotation: consumes slot0 list: reads handle0 - `, {loader, fileName: ''}); + `); - const recipe = manifest.recipes[0]; - const slotComposer = new SlotComposer(); - const id = Id.fromString('test2'); - const storageKey = new VolatileStorageKey(id, ''); - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id, storageKey, context: manifest, slotComposer, loader: new Loader(), storageManager}); + const params = runtime.buildArcParams('test2'); + const arc = new Arc(params); - const barType = manifest.findTypeByName('Bar') as EntityType; + const barType = runtime.context.findTypeByName('Bar') as EntityType; let store = await arc.createStore(barType.collectionOf(), undefined, 'test:1'); + + const recipe = runtime.context.recipes[0]; recipe.handles[0].mapToStorage(store); assert(recipe.normalize()); @@ -64,7 +58,8 @@ describe('Arc', () => { const serialization = await arc.serialize(); arc.dispose(); - const newArc = await Arc.deserialize({serialization, loader, slotComposer, fileName: './manifest.manifest', context: manifest, storageManager}); + const {loader, context, slotComposer, storageManager, driverFactory} = params; + const newArc = await Arc.deserialize({serialization, loader, slotComposer, fileName: './manifest.manifest', context, storageManager, driverFactory}); await newArc.idle; store = newArc.findStoreById(store.id) as StoreInfo; const handle = await handleForStoreInfo(store, newArc); @@ -123,7 +118,6 @@ describe('Arc', () => { current = next; } - const slotComposer = new SlotComposer(); const loader = new Loader(null, { ...sources, 'Z.js': `defineParticle(({UiParticle}) => { @@ -132,7 +126,8 @@ describe('Arc', () => { }; });`, }); - const context = await Manifest.parse(` + const runtime = new Runtime({loader}); + runtime.context = await runtime.parse(` particle A in 'A.js' root: consumes Slot @@ -141,9 +136,8 @@ describe('Arc', () => { A root: consumes root `); - const id = IdGenerator.newSession().newArcId('arcid'); - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id, loader, slotComposer, context, storageManager}); + const opts = runtime.buildArcParams('arcid'); + const arc = new Arc(opts); const [recipe] = arc.context.recipes; recipe.normalize(); @@ -151,10 +145,9 @@ describe('Arc', () => { }); it('handles serialization/deserialization of empty arcs handles', async () => { - const id = ArcId.newForTest('test'); - const loader = new Loader(); - - const manifest = await Manifest.parse(` + //const id = ArcId.newForTest('test'); + const runtime = new Runtime(); + runtime.context = await runtime.parse(` schema FavoriteFood food: Text @@ -167,20 +160,20 @@ describe('Arc', () => { foods: create #favoriteFoods FavoriteFoodPicker foods: foods - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `); - const storageKey = new VolatileStorageKey(id, ''); - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id, storageKey, loader: new Loader(), context: manifest, storageManager}); + const opts = runtime.buildArcParams('test'); + //opts.id = id; + const arc = new Arc(opts); assert.isNotNull(arc); - const favoriteFoodClass = Entity.createEntityClass(manifest.findSchemaByName('FavoriteFood'), null); + const favoriteFoodClass = Entity.createEntityClass(runtime.context.findSchemaByName('FavoriteFood'), null); assert.isNotNull(favoriteFoodClass); - const recipe = manifest.recipes[0]; + const recipe = runtime.context.recipes[0]; assert.isNotNull(recipe); - const favoriteFoodType = manifest.findTypeByName('FavoriteFood'); + const favoriteFoodType = runtime.context.findTypeByName('FavoriteFood'); assert.isNotNull(favoriteFoodType, 'FavoriteFood type is found'); const options = {errors: new Map()}; @@ -190,10 +183,8 @@ describe('Arc', () => { await arc.instantiate(recipe); const serialization = await arc.serialize(); - - const slotComposer = new SlotComposer(); - - const newArc = await Arc.deserialize({serialization, loader, slotComposer, context: manifest, fileName: 'foo.manifest', storageManager}); + const {loader, slotComposer, context, storageManager, driverFactory} = opts; + const newArc = await Arc.deserialize({serialization, loader, slotComposer, context, fileName: 'foo.manifest', storageManager, driverFactory}); assert.strictEqual(newArc.stores.length, 1); assert.strictEqual(newArc.activeRecipe.toString(), `@active\n${arc.activeRecipe.toString()}`); assert.strictEqual(newArc.id.idTreeAsString(), 'test'); diff --git a/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts b/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts index 66b37b8405e..68d9db28371 100644 --- a/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts +++ b/shells/tests/arcs/ts/runtime/multiplexer-integration-test.ts @@ -114,8 +114,6 @@ describe('Multiplexer', () => { const entity = Entity.identify(entityClass, '4', null); await postsHandle2.add(entity); await arc.idle; - - Runtime.resetDrivers(); }); // TODO(sjmiles): probably should be in particles/tests/* because of Multiplexer.js diff --git a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts index a6e4cb8355f..246a43c85fa 100644 --- a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts +++ b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts @@ -33,14 +33,6 @@ async function storeResults(consumer: PlanConsumer, suggestions: Suggestion[]) { } describe('planFOOB consumer', () => { - beforeEach(() => { - Runtime.resetDrivers(); - }); - - afterEach(() => { - Runtime.resetDrivers(); - }); - it('consumes', async () => { const manifestText = ` import './shells/tests/artifacts/Products/Products.recipes' diff --git a/shells/tests/arcs/ts/runtime/products-test.ts b/shells/tests/arcs/ts/runtime/products-test.ts index 66867b00353..921e91ef1dd 100644 --- a/shells/tests/arcs/ts/runtime/products-test.ts +++ b/shells/tests/arcs/ts/runtime/products-test.ts @@ -18,11 +18,6 @@ import {StoreInfo} from '../../../../../build/runtime/storage/store-info.js'; import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('products test', () => { - - afterEach(() => { - Runtime.resetDrivers(); - }); - const manifestFilename = './shells/tests/artifacts/ProductsTestNg.arcs'; const verifyFilteredBook = async (arc: Arc) => { diff --git a/shells/tests/arcs/ts/runtime/slot-composer-test.ts b/shells/tests/arcs/ts/runtime/slot-composer-test.ts index 675ea4c6267..9ded4517195 100644 --- a/shells/tests/arcs/ts/runtime/slot-composer-test.ts +++ b/shells/tests/arcs/ts/runtime/slot-composer-test.ts @@ -58,11 +58,6 @@ async function initSlotComposer(recipeStr) { } describe('slot composer', () => { - - afterEach(() => { - Runtime.resetDrivers(); - }); - it('initialize recipe and render slots', async () => { const manifestStr = ` particle A in 'a.js' diff --git a/shells/tests/arcs/ts/runtime/transformation-slots-test.ts b/shells/tests/arcs/ts/runtime/transformation-slots-test.ts index d003760fd44..8467b03e768 100644 --- a/shells/tests/arcs/ts/runtime/transformation-slots-test.ts +++ b/shells/tests/arcs/ts/runtime/transformation-slots-test.ts @@ -16,10 +16,6 @@ import {StrategyTestHelper} from '../../../../../build/planning/testing/strategy import '../../../../lib/arcs-ui/dist/install-ui-classes.js'; describe('transformation slots', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('combines hosted particles provided singleton slots into transformation provided set slot', async () => { const runtime = new Runtime(); runtime.context = await runtime.parseFile('./shells/tests/artifacts/provide-hosted-particle-slots.manifest'); diff --git a/src/planning/plan/tests/plan-consumer-test.ts b/src/planning/plan/tests/plan-consumer-test.ts index 85aab2d3a72..8bc7c3fa39d 100644 --- a/src/planning/plan/tests/plan-consumer-test.ts +++ b/src/planning/plan/tests/plan-consumer-test.ts @@ -35,15 +35,6 @@ async function storeResults(consumer: PlanConsumer, suggestions: Suggestion[]) { } describe('plan consumer', () => { - - beforeEach(() => { - Runtime.resetDrivers(); - }); - - afterEach(() => { - Runtime.resetDrivers(); - }); - it('filters suggestions by modality', async () => { const initConsumer = async (modality) => { const addRecipe = (particles) => { @@ -97,17 +88,14 @@ ${addRecipe(['ParticleTouch', 'ParticleBoth'])} assert.lengthOf(domSuggestions, 2); assert.deepEqual(domSuggestions.map(s => s.plan.particles.map(p => p.name)), [['ParticleDom'], ['ParticleDom', 'ParticleBoth']]); - Runtime.resetDrivers(); const consumerVr = await initConsumer(Modality.vr); assert.isEmpty(consumerVr.getCurrentSuggestions()); - Runtime.resetDrivers(); const consumerTouch = await initConsumer(Modality.domTouch); const touchSuggestions = consumerTouch.getCurrentSuggestions(); assert.lengthOf(touchSuggestions, 2); assert.deepEqual(touchSuggestions.map(s => s.plan.particles.map(p => p.name)), [['ParticleTouch'], ['ParticleTouch', 'ParticleBoth']]); - Runtime.resetDrivers(); }); }); diff --git a/src/planning/plan/tests/plan-producer-test.ts b/src/planning/plan/tests/plan-producer-test.ts index 3fc39bd95ac..bce5c47ac2a 100644 --- a/src/planning/plan/tests/plan-producer-test.ts +++ b/src/planning/plan/tests/plan-producer-test.ts @@ -79,13 +79,6 @@ class TestPlanProducer extends PlanProducer { // Run test suite for each storageKeyBase describe('plan producer', () => { - beforeEach(() => { - Runtime.resetDrivers(); - }); - - afterEach(() => { - Runtime.resetDrivers(); - }); async function createProducer() { const runtime = new Runtime(); diff --git a/src/planning/plan/tests/planificator-test.ts b/src/planning/plan/tests/planificator-test.ts index 055187b2d2b..bfffbde2831 100644 --- a/src/planning/plan/tests/planificator-test.ts +++ b/src/planning/plan/tests/planificator-test.ts @@ -18,7 +18,6 @@ import {TestVolatileMemoryProvider} from '../../../runtime/testing/test-volatile import {Planificator} from '../../plan/planificator.js'; import {PlanningResult} from '../../plan/planning-result.js'; import {floatingPromiseToAudit} from '../../../utils/lib-utils.js'; -import {DriverFactory} from '../../../runtime/storage/drivers/driver-factory.js'; import {storageKeyPrefixForTest, storageKeyForTest} from '../../../runtime/testing/handle-for-test.js'; import {MockFirebaseStorageKey} from '../../../runtime/storage/testing/mock-firebase.js'; import {DirectStorageEndpointManager} from '../../../runtime/storage/direct-storage-endpoint-manager.js'; @@ -48,29 +47,22 @@ describe('planificator', () => { describe.skip('remote planificator', () => { // TODO: support arc storage key be in PouchDB as well. let arcStorageKey; + let runtime; - let memoryProvider; + //let memoryProvider; beforeEach(() => { - Runtime.resetDrivers(); arcStorageKey = storageKeyPrefixForTest(); - memoryProvider = new TestVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(memoryProvider); - }); - - afterEach(() => { - Runtime.resetDrivers(); + runtime = new Runtime(); }); async function createArc(options, storageKey) { const {manifestString, manifestFilename} = options; - const loader = new Loader(); - const context = manifestString - ? await Manifest.parse(manifestString, {loader, fileName: '', memoryProvider}) - : await Manifest.load(manifestFilename, loader, {memoryProvider}); - const storageManager = new DirectStorageEndpointManager(); - const runtime = new Runtime({loader, context, memoryProvider, storageManager}); + runtime.context = manifestString + ? await runtime.parse(manifestString) + : await runtime.parseFile(manifestFilename); return runtime.newArc('demo', storageKey); } + async function createConsumePlanificator(manifestFilename) { const arc = await createArc({manifestFilename}, arcStorageKey); const storageKeyBase = storageKeyForTest(arc.id); @@ -105,7 +97,8 @@ describe.skip('remote planificator', () => { fileName: '', pecFactories: undefined, context: consumePlanificator.arc.context, - storageManager + storageManager, + driverFactory: runtime.driverFactory }); // producePlanificator = new Planificator( diff --git a/src/planning/plan/tests/planning-result-test.ts b/src/planning/plan/tests/planning-result-test.ts index 8079e777570..8a1cd3cb4fd 100644 --- a/src/planning/plan/tests/planning-result-test.ts +++ b/src/planning/plan/tests/planning-result-test.ts @@ -22,20 +22,12 @@ import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; import {VolatileStorageDriverProvider} from '../../../runtime/storage/drivers/volatile.js'; describe('planning result', () => { - let memoryProvider; - beforeEach(() => { - Runtime.resetDrivers(); - }); - afterEach(() => { - Runtime.resetDrivers(); - }); - it('serializes and deserializes Products recipes', async () => { const runtime = new Runtime(); runtime.context = await runtime.parseFile('./src/runtime/tests/artifacts/Products/Products.recipes'); const arc = runtime.newArc('demo', storageKeyPrefixForTest()); - VolatileStorageDriverProvider.register(arc); + VolatileStorageDriverProvider.register(runtime, arc); const suggestions = await StrategyTestHelper.planForArc(runtime, arc); assert.isNotEmpty(suggestions); @@ -94,15 +86,6 @@ describe('planning result', () => { }); describe('planning result merge', () => { - let memoryProvider; - beforeEach(() => { - Runtime.resetDrivers(); - }); - - afterEach(() => { - Runtime.resetDrivers(); - }); - const commonManifestStr = ` schema Thing foo: Text diff --git a/src/planning/plan/tests/test-environment-test.ts b/src/planning/plan/tests/test-environment-test.ts index 04efd83b5af..29291555e6e 100644 --- a/src/planning/plan/tests/test-environment-test.ts +++ b/src/planning/plan/tests/test-environment-test.ts @@ -24,5 +24,4 @@ afterEach(function() { // Error function not yet included in mocha typescript declarations... this.test['error'](exception); } - Runtime.resetDrivers(); }); diff --git a/src/planning/recipe-index.ts b/src/planning/recipe-index.ts index d0a0da86918..462c2a03bdd 100644 --- a/src/planning/recipe-index.ts +++ b/src/planning/recipe-index.ts @@ -99,7 +99,8 @@ export class RecipeIndex { loader: arc.loader, slotComposer: new SlotComposer({noRoot: true}), stub: true, - storageManager: arc.storageManager + storageManager: arc.storageManager, + driverFactory: arc.driverFactory }); const strategizer = new Strategizer( [ diff --git a/src/planning/strategies/tests/coalesce-recipes-test.ts b/src/planning/strategies/tests/coalesce-recipes-test.ts index fd5a726ace0..9aeacecc027 100644 --- a/src/planning/strategies/tests/coalesce-recipes-test.ts +++ b/src/planning/strategies/tests/coalesce-recipes-test.ts @@ -14,13 +14,6 @@ import {StrategyTestHelper} from '../../testing/strategy-test-helper.js'; import {Runtime} from '../../../runtime/runtime.js'; describe('CoalesceRecipes', () => { - let memoryProvider; - beforeEach(() => { - }); - afterEach(() => { - Runtime.resetDrivers(); - }); - async function tryCoalesceRecipes(manifestStr: string) { const runtime = new Runtime(); const manifest = await runtime.parse(manifestStr); diff --git a/src/planning/strategies/tests/search-tokens-to-handles-test.ts b/src/planning/strategies/tests/search-tokens-to-handles-test.ts index f108c13ac9a..6f4599a31dc 100644 --- a/src/planning/strategies/tests/search-tokens-to-handles-test.ts +++ b/src/planning/strategies/tests/search-tokens-to-handles-test.ts @@ -20,10 +20,6 @@ describe('SearchTokensToHandles', () => { beforeEach(() => { runtime = new Runtime(); }); - afterEach(() => { - Runtime.resetDrivers(); - }); - it('finds local handle by tags', async () => { const manifest = await runtime.parse(` schema Thing diff --git a/src/planning/tests/planner-test.ts b/src/planning/tests/planner-test.ts index 57833f5e74e..4916f844eff 100644 --- a/src/planning/tests/planner-test.ts +++ b/src/planning/tests/planner-test.ts @@ -38,7 +38,6 @@ async function planFromManifest(manifest, {arcFactory, testSteps}: {arcFactory?, planner.init(arc, options); const result = await testSteps(planner); - Runtime.resetDrivers(); return result; } @@ -565,12 +564,6 @@ ${recipeManifest} }); describe('Type variable resolution', () => { - beforeEach(() => { - }); - afterEach(() => { - Runtime.resetDrivers(); - }); - const loadAndPlan = async manifestStr => { const runtime = new Runtime({loader: new NullLoader()}); const manifest = await runtime.parse(manifestStr); diff --git a/src/runtime/arc.ts b/src/runtime/arc.ts index 7cc83a0e491..590b270e128 100644 --- a/src/runtime/arc.ts +++ b/src/runtime/arc.ts @@ -14,7 +14,8 @@ import {FakePecFactory} from './fake-pec-factory.js'; import {Id, IdGenerator} from './id.js'; import {Loader} from '../platform/loader.js'; import {Capabilities} from './capabilities.js'; -import {CapabilitiesResolver} from './capabilities-resolver.js'; +import {_CapabilitiesResolver} from './capabilities-resolver.js'; +import {StorageKeyParser} from './storage/storage-key-parser.js'; import {Dictionary, Runnable, compareComparables, Mutex} from '../utils/lib-utils.js'; import {Manifest} from './manifest.js'; import {MessagePort} from './message-channel.js'; @@ -26,20 +27,17 @@ import {SlotComposer} from './slot-composer.js'; import {CollectionType, EntityType, InterfaceInfo, InterfaceType, TupleType, ReferenceType, SingletonType, Type, TypeVariable} from '../types/lib-types.js'; import {PecFactory} from './particle-execution-context.js'; -import {VolatileMemory, VolatileStorageDriverProvider, VolatileStorageKey} from './storage/drivers/volatile.js'; -import {DriverFactory} from './storage/drivers/driver-factory.js'; -import {Exists} from './storage/drivers/driver.js'; import {StorageKey} from './storage/storage-key.js'; import {ArcSerializer, ArcInterface} from './arc-serializer.js'; import {ReferenceModeStorageKey} from './storage/reference-mode-storage-key.js'; import {SystemTrace} from '../tracelib/systrace.js'; -import {StorageKeyParser} from './storage/storage-key-parser.js'; import {SingletonInterfaceHandle, handleForStoreInfo, TypeToCRDTTypeRecord} from './storage/storage.js'; import {StoreInfo} from './storage/store-info.js'; -import {CRDTTypeRecord} from '../crdt/lib-crdt.js'; import {ActiveStore} from './storage/active-store.js'; -import {ProxyCallback, ProxyMessageType} from './storage/store-interface.js'; import {StorageEndpointManager} from './storage/storage-manager.js'; +import {Exists} from './storage/drivers/driver.js'; +import {DriverFactory} from './storage/drivers/driver-factory.js'; +import {VolatileMemory, VolatileStorageDriverProvider, VolatileStorageKey} from './storage/drivers/volatile.js'; export type ArcOptions = Readonly<{ id: Id; @@ -54,25 +52,28 @@ export type ArcOptions = Readonly<{ stub?: boolean; inspectorFactory?: ArcInspectorFactory; ports?: MessagePort[]; - capabilitiesResolver?: CapabilitiesResolver; + capabilitiesResolver?: _CapabilitiesResolver; modality?: Modality; + driverFactory: DriverFactory; }>; type DeserializeArcOptions = Readonly<{ serialization: string; + fileName: string; + context: Manifest; storageManager: StorageEndpointManager; pecFactories?: PecFactory[]; slotComposer?: SlotComposer; loader: Loader; - fileName: string; - context: Manifest; inspectorFactory?: ArcInspectorFactory; + driverFactory: DriverFactory; }>; @SystemTrace export class Arc implements ArcInterface { private readonly _context: Manifest; private readonly pecFactories: PecFactory[]; + //private readonly driverFactory: DriverFactory; public readonly isSpeculative: boolean; public readonly isInnerArc: boolean; public readonly isStub: boolean; @@ -85,7 +86,7 @@ export class Arc implements ArcInterface { // storage keys for referenced handles private storeInfoById: Dictionary> = {}; public readonly storageKey?: StorageKey; - private readonly capabilitiesResolver?: CapabilitiesResolver; + private readonly capabilitiesResolver?: _CapabilitiesResolver; // Map from each store ID to a set of tags. public for debug access public readonly storeTagsById: Dictionary> = {}; // Map from each store to its description (originating in the manifest). @@ -102,22 +103,22 @@ export class Arc implements ArcInterface { readonly peh: ParticleExecutionHost; public readonly storageManager: StorageEndpointManager; + public driverFactory: DriverFactory; // Volatile storage local to this Arc instance. readonly volatileMemory = new VolatileMemory(); private readonly volatileStorageDriverProvider: VolatileStorageDriverProvider; - constructor({id, context, storageManager, pecFactories, slotComposer, loader, storageKey, speculative, innerArc, stub, capabilitiesResolver, inspectorFactory, modality} : ArcOptions) { + constructor({id, context, storageManager, pecFactories, slotComposer, loader, storageKey, speculative, innerArc, stub, capabilitiesResolver, inspectorFactory, modality, driverFactory} : ArcOptions) { this._context = context; this.modality = modality; + this.driverFactory = driverFactory; // TODO: pecFactories should not be optional. update all callers and fix here. this.pecFactories = pecFactories && pecFactories.length > 0 ? pecFactories.slice() : [FakePecFactory(loader).bind(null)]; - // TODO(sjmiles): currently some UiBrokers need to recover arc from composer in order to forward events if (slotComposer && !slotComposer['arc']) { slotComposer['arc'] = this; } - this.id = id; this.isSpeculative = !!speculative; // undefined => false this.isInnerArc = !!innerArc; // undefined => false @@ -128,10 +129,10 @@ export class Arc implements ArcInterface { this.storageKey = storageKey; const ports = this.pecFactories.map(f => f(this.generateID(), this.idGenerator)); this.peh = new ParticleExecutionHost({slotComposer, arc: this, ports}); - this.volatileStorageDriverProvider = new VolatileStorageDriverProvider(this); - DriverFactory.register(this.volatileStorageDriverProvider); this.capabilitiesResolver = capabilitiesResolver; this.storageManager = storageManager; + this.volatileStorageDriverProvider = new VolatileStorageDriverProvider(this); + driverFactory.register(this.volatileStorageDriverProvider); } get loader(): Loader { @@ -171,7 +172,7 @@ export class Arc implements ArcInterface { this.peh.slotComposer.dispose(); } - DriverFactory.unregister(this.volatileStorageDriverProvider); + //this.driverFactory.unregister(this.volatileStorageDriverProvider); } // Returns a promise that spins sending a single `AwaitIdle` message until it @@ -232,7 +233,18 @@ export class Arc implements ArcInterface { createInnerArc(transformationParticle: Particle): Arc { const id = this.generateID('inner'); - const innerArc = new Arc({id, storageManager: this.storageManager, pecFactories: this.pecFactories, slotComposer: this.peh.slotComposer, loader: this._loader, context: this.context, innerArc: true, speculative: this.isSpeculative, inspectorFactory: this.inspectorFactory}); + const innerArc = new Arc({ + id, + storageManager: this.storageManager, + pecFactories: this.pecFactories, + slotComposer: this.peh.slotComposer, + loader: this._loader, + context: this.context, + innerArc: true, + speculative: this.isSpeculative, + inspectorFactory: this.inspectorFactory, + driverFactory: this.driverFactory + }); let particleInnerArcs = this.innerArcsByParticle.get(transformationParticle); if (!particleInnerArcs) { @@ -255,11 +267,11 @@ export class Arc implements ArcInterface { throw new Error('persistSerialization unimplemented, pending synthetic type support in new storage stack'); } - static async deserialize({serialization, pecFactories, slotComposer, loader, fileName, context, inspectorFactory, storageManager}: DeserializeArcOptions): Promise { + static async deserialize({serialization, pecFactories, slotComposer, loader, fileName, context, inspectorFactory, storageManager, driverFactory}: DeserializeArcOptions): Promise { const manifest = await Manifest.parse(serialization, {loader, fileName, context}); const id = Id.fromString(manifest.meta.name); - const storageKey = StorageKeyParser.parse(manifest.meta.storageKey); - const arc = new Arc({id, storageKey, slotComposer, pecFactories, loader, context, inspectorFactory, storageManager}); + const storageKey = StorageKeyParser['parse'](manifest.meta.storageKey); + const arc = new Arc({id, storageKey, slotComposer, pecFactories, loader, context, inspectorFactory, storageManager, driverFactory}); await Promise.all(manifest.stores.map(async storeStub => { const tags = [...manifest.storeTagsById[storeStub.id]]; @@ -366,7 +378,9 @@ export class Arc implements ArcInterface { speculative: true, innerArc: this.isInnerArc, inspectorFactory: this.inspectorFactory, - storageManager: this.storageManager}); + storageManager: this.storageManager, + driverFactory: this.driverFactory + }); const storeMap: Map, StoreInfo> = new Map(); for (const storeInfo of this.stores) { // TODO(alicej): Should we be able to clone a StoreMux as well? diff --git a/src/runtime/capabilities-resolver.ts b/src/runtime/capabilities-resolver.ts index 6b48e23e401..4407d9013cc 100644 --- a/src/runtime/capabilities-resolver.ts +++ b/src/runtime/capabilities-resolver.ts @@ -12,37 +12,39 @@ import {assert} from '../platform/assert-web.js'; import {Dictionary} from '../utils/lib-utils.js'; import {StorageKey} from './storage/storage-key.js'; import {Type} from '../types/lib-types.js'; +import {ArcId} from './id.js'; +import {Flags} from './flags.js'; import {Capabilities} from './capabilities.js'; import {ReferenceModeStorageKey} from './storage/reference-mode-storage-key.js'; -import {Flags} from './flags.js'; -import {StorageKeyFactory, FactorySelector, ContainerStorageKeyOptions, BackingStorageKeyOptions, +import {StorageKeyFactory, + FactorySelector, + ContainerStorageKeyOptions, + BackingStorageKeyOptions, SimpleCapabilitiesSelector} from './storage-key-factory.js'; -import {ArcId} from './id.js'; export type CapabilitiesResolverOptions = Readonly<{ arcId: ArcId; }>; -export class CapabilitiesResolver { - private static defaultStorageKeyFactories: Dictionary = {}; - private static readonly defaultSelector = new SimpleCapabilitiesSelector(); - +export class _CapabilitiesResolver { + private defaultStorageKeyFactories: Dictionary = {}; + private readonly defaultSelector = new SimpleCapabilitiesSelector(); private readonly factories: Dictionary = {}; - constructor(public readonly options: CapabilitiesResolverOptions & { + constructor(public readonly options?: CapabilitiesResolverOptions & { factories?: StorageKeyFactory[], selector? : FactorySelector}) { - for (const factory of (options.factories || [])) { + for (const factory of (options?.factories || [])) { assert(!this.factories[factory.protocol], `Duplicated factory for '${factory.protocol}'.`); this.factories[factory.protocol] = factory; } - for (const factory of Object.values(CapabilitiesResolver.defaultStorageKeyFactories)) { + for (const factory of Object.values(this.defaultStorageKeyFactories)) { if (!this.factories[factory.protocol]) { this.factories[factory.protocol] = factory; } } } - get selector() { return this.options.selector || CapabilitiesResolver.defaultSelector; } + get selector() { return this.options.selector || this.defaultSelector; } async createStorageKey(capabilities: Capabilities, type: Type, handleId: string): Promise { const factory = this.selectStorageKeyFactory(capabilities, handleId); @@ -54,39 +56,41 @@ export class CapabilitiesResolver { return factory.supports(capabilities); }); if (selectedFactories.length === 0) { - throw new Error(`Cannot create a suitable storage key for handle '${handleId}' with capabilities ${capabilities.toDebugString()}`); + throw new Error(`Cannot create a suitable storage key for handle '${ + handleId}' with capabilities ${capabilities.toDebugString()}`); } return this.selector.select(selectedFactories); } private async createStorageKeyWithFactory(factory: StorageKeyFactory, type: Type, handleId: string): Promise { - const schemaHash = await type.getEntitySchema().hash(); - const containerKey = factory.create(new ContainerStorageKeyOptions( - this.options.arcId, schemaHash, type.getEntitySchema().name)); + const schema = type.getEntitySchema(); + const schemaHash = await schema.hash(); + const options = new ContainerStorageKeyOptions(this.options.arcId, schemaHash, schema.name); + const containerKey = factory.create(options); const containerChildKey = containerKey.childKeyForHandle(handleId); if (!Flags.defaultReferenceMode) { return containerChildKey; } - if (type.isReference || - (type.getContainedType() && type.getContainedType().isReference)) { + const containedType = type.getContainedType(); + if (type.isReference || (containedType && containedType.isReference)) { return containerChildKey; } - const backingKey = factory.create(new BackingStorageKeyOptions( - this.options.arcId, schemaHash, type.getEntitySchema().name)); - + const backingKey = factory.create(new BackingStorageKeyOptions(this.options.arcId, schemaHash, schema.name)); // ReferenceModeStorageKeys in different drivers can cause problems with garbage collection. assert(backingKey.protocol === containerKey.protocol); - return new ReferenceModeStorageKey(backingKey, containerChildKey); } - static registerStorageKeyFactory(factory: StorageKeyFactory) { - assert(!CapabilitiesResolver.defaultStorageKeyFactories[factory.protocol], - `Storage key factory for '${factory.protocol}' already registered`); - CapabilitiesResolver.defaultStorageKeyFactories[factory.protocol] = factory; + registerStorageKeyFactory(factory: StorageKeyFactory) { + const defaultFactories = this.defaultStorageKeyFactories; + assert(!defaultFactories[factory.protocol], `Storage key factory for '${factory.protocol}' already registered`); + defaultFactories[factory.protocol] = factory; } - static reset() { - CapabilitiesResolver.defaultStorageKeyFactories = {}; + reset() { + this.defaultStorageKeyFactories = {}; } } + +//export const CapabilitiesResolver = new _CapabilitiesResolver(); + diff --git a/src/runtime/manifest.ts b/src/runtime/manifest.ts index ac837fb425c..a5e92b4912c 100644 --- a/src/runtime/manifest.ts +++ b/src/runtime/manifest.ts @@ -28,7 +28,6 @@ import {TypeChecker} from './type-checker.js'; import {ClaimIsTag} from './arcs-types/claim.js'; import {StorageKey} from './storage/storage-key.js'; import {Exists} from './storage/drivers/driver.js'; -import {StorageKeyParser} from './storage/storage-key-parser.js'; import {VolatileMemoryProvider, VolatileStorageKey} from './storage/drivers/volatile.js'; import {RamDiskStorageKey} from './storage/drivers/ramdisk.js'; import {ReferenceModeStorageKey} from './storage/reference-mode-storage-key.js'; @@ -39,6 +38,8 @@ import {canonicalManifest} from './canonical-manifest.js'; import {Policy} from './policy/policy.js'; import {resolveFieldPathType} from './field-path.js'; import {StoreInfo, StoreClaims} from './storage/store-info.js'; +import {StorageKeyParser} from './storage/storage-key-parser.js'; + export enum ErrorSeverity { Error = 'error', diff --git a/src/runtime/particle-execution-context.ts b/src/runtime/particle-execution-context.ts index 4afffcecc6b..455fc72d44d 100644 --- a/src/runtime/particle-execution-context.ts +++ b/src/runtime/particle-execution-context.ts @@ -18,7 +18,7 @@ import {ParticleSpec} from './arcs-types/particle-spec.js'; import {Particle, Capabilities} from './particle.js'; import {StorageProxy} from './storage/storage-proxy.js'; import {CRDTTypeRecord} from '../crdt/lib-crdt.js'; -import {ProxyCallback, ProxyMessage, StorageCommunicationEndpoint, StorageCommunicationEndpointProvider} from './storage/store-interface.js'; +import {StorageCommunicationEndpoint, StorageCommunicationEndpointProvider} from './storage/store-interface.js'; import {PropagatedException} from './arc-exceptions.js'; import {Type, MuxType} from '../types/lib-types.js'; import {MessagePort} from './message-channel.js'; @@ -26,18 +26,19 @@ import {WasmContainer, WasmParticle} from './wasm.js'; import {UserException} from './arc-exceptions.js'; import {SystemTrace} from '../tracelib/systrace.js'; import {delegateSystemTraceApis} from '../tracelib/systrace-helpers.js'; -import {ChannelConstructor} from './channel-constructor.js'; import {Ttl} from './capabilities.js'; import {Handle} from './storage/handle.js'; import {StorageProxyMuxer} from './storage/storage-proxy-muxer.js'; import {EntityHandleFactory} from './storage/entity-handle-factory.js'; -import {CRDTMuxEntity, TypeToCRDTTypeRecord, CRDTTypeRecordToType} from './storage/storage.js'; -import {StorageEndpointImpl, StorageMuxerEndpointImpl, createStorageEndpoint} from './storage/storage-endpoint.js'; +import {CRDTMuxEntity, CRDTTypeRecordToType} from './storage/storage.js'; +import {createStorageEndpoint} from './storage/storage-endpoint.js'; import {StorageFrontend} from './storage/storage-frontend.js'; import {StoreInfo} from './storage/store-info.js'; import {VolatileStorageKey} from './storage/drivers/volatile.js'; import {StorageKeyParser} from './storage/storage-key-parser.js'; +//StorageKeyParser.addDefaultParser(VolatileStorageKey.protocol, VolatileStorageKey.fromString); + export type PecFactory = (pecId: Id, idGenerator: IdGenerator) => MessagePort; export type InnerArcHandle = { @@ -70,7 +71,7 @@ export class ParticleExecutionContext { private readonly pecId: Id; private readonly loader: Loader; private readonly pendingLoads = []>[]; - private readonly keyedProxies: Dictionary | Promise>> = {}; + //private readonly keyedProxies: Dictionary | Promise>> = {}; private readonly keyedProxyMuxers: Dictionary | Promise>> = {}; private readonly wasmContainers: Dictionary = {}; @@ -446,4 +447,4 @@ export class ParticleExecutionContext { } } -StorageKeyParser.addDefaultParser(VolatileStorageKey.protocol, VolatileStorageKey.fromString); +//StorageKeyParser.addDefaultParser(VolatileStorageKey.protocol, VolatileStorageKey.fromString); diff --git a/src/runtime/recipe/tests/recipe-test.ts b/src/runtime/recipe/tests/recipe-test.ts index cabdf04e027..746cfa8b902 100644 --- a/src/runtime/recipe/tests/recipe-test.ts +++ b/src/runtime/recipe/tests/recipe-test.ts @@ -25,10 +25,6 @@ describe('recipe', () => { runtime = new Runtime(); }); - afterEach(() => { - Runtime.resetDrivers(); - }); - it('normalize errors', async () => { const manifest = await runtime.parse(` schema S1 diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index e7ad4fe72cd..6f595cd7a5d 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -11,8 +11,7 @@ import {assert} from '../platform/assert-web.js'; import {Description} from './description.js'; import {Manifest} from './manifest.js'; -import {Arc} from './arc.js'; -import {CapabilitiesResolver} from './capabilities-resolver.js'; +import {ArcOptions, Arc} from './arc.js'; import {RuntimeCacheService} from './runtime-cache.js'; import {IdGenerator, ArcId, Id} from './id.js'; import {PecFactory} from './particle-execution-context.js'; @@ -28,10 +27,9 @@ import {workerPool} from './worker-pool.js'; import {Modality} from './arcs-types/modality.js'; import {StorageKey} from './storage/storage-key.js'; import {StorageKeyFactory} from './storage-key-factory.js'; -import {StorageKeyParser} from './storage/storage-key-parser.js'; import {DriverFactory} from './storage/drivers/driver-factory.js'; -import {RamDiskStorageDriverProvider} from './storage/drivers/ramdisk.js'; -import {SimpleVolatileMemoryProvider, VolatileMemoryProvider, VolatileStorageKey, VolatileStorageKeyFactory} from './storage/drivers/volatile.js'; +import {StorageKeyParser} from './storage/storage-key-parser.js'; +import {_CapabilitiesResolver} from './capabilities-resolver.js'; import {StorageEndpointManager} from './storage/storage-manager.js'; import {DirectStorageEndpointManager} from './storage/direct-storage-endpoint-manager.js'; import {Env} from './env.js'; @@ -62,31 +60,16 @@ export type RuntimeArcOptions = Readonly<{ modality?: Modality; }>; -// TODO(sjmiles): weird layering here due to dancing around global state -let staticMemoryProvider; -const initDrivers = () => { - VolatileStorageKey.register(); - staticMemoryProvider = new SimpleVolatileMemoryProvider(); - RamDiskStorageDriverProvider.register(staticMemoryProvider); -}; - -initDrivers(); +type StorageKeyPrefixer = (arcId: ArcId) => StorageKey; const nob = Object.create(null); @SystemTrace export class Runtime { - // TODO(sjmiles): static methods represent boilerplate. - // There's no essential reason they are part of Runtime. - // Consider! + // TODO(sjmiles): patching over layer problems due to static objects static resetDrivers(noDefault?: true) { - DriverFactory.providers = new Set(); - StorageKeyParser.reset(); - CapabilitiesResolver.reset(); - if (!noDefault) { - initDrivers(); - } + console.log('!FrOnK'); } /** @@ -175,9 +158,12 @@ export class Runtime { public readonly loader: Loader | null; private cacheService: RuntimeCacheService; private composerClass: typeof SlotComposer | null; - private memoryProvider: VolatileMemoryProvider; - readonly storageManager: StorageEndpointManager; - readonly arcById = new Map(); + public memoryProvider: VolatileMemoryProvider; + public readonly storageManager: StorageEndpointManager; + public readonly arcById = new Map(); + public driverFactory: DriverFactory; + public storageKeyParser: StorageKeyParser; + public capabilitiesResolver: _CapabilitiesResolver; constructor(opts: RuntimeOptions = {}) { const customMap = opts.urlMap || nob; @@ -186,18 +172,27 @@ export class Runtime { this.pecFactory = opts.pecFactory || pecIndustry(this.loader); this.composerClass = opts.composerClass || SlotComposer; this.cacheService = new RuntimeCacheService(); - this.memoryProvider = opts.memoryProvider || staticMemoryProvider; + this.memoryProvider = opts.memoryProvider || new SimpleVolatileMemoryProvider(); this.storageManager = opts.storageManager || new DirectStorageEndpointManager(); this.context = opts.context || new Manifest({id: 'manifest:default'}); + this.initDrivers(); // user information. One persona per runtime for now. } - getCacheService() { - return this.cacheService; + initDrivers() { + // storage drivers + this.driverFactory = new DriverFactory(); + this.storageKeyParser = new StorageKeyParser(); + this.capabilitiesResolver = new _CapabilitiesResolver(); + VolatileStorageKey.register(this); + // TODO(sjmiles): affects DriverFactory + RamDiskStorageDriverProvider.register(this); } - getMemoryProvider(): VolatileMemoryProvider { - return this.memoryProvider; + resetDrivers() { + this.driverFactory.providers = new Set(); + this.storageKeyParser.reset(); + this.capabilitiesResolver.reset(); } destroy() { @@ -212,12 +207,21 @@ export class Runtime { buildArcParams(name?: string) { const id = IdGenerator.newSession().newArcId(name); const {loader, context} = this; - const pecFactories = [this.pecFactory]; - const slotComposer = this.composerClass ? new this.composerClass() : null; const factories = [new VolatileStorageKeyFactory()]; - const storageManager = this.storageManager; - const capabilitiesResolver = new CapabilitiesResolver({arcId: id, factories}); - return {id, loader, pecFactories, slotComposer, storageManager, capabilitiesResolver, context}; + return { + id, + loader, + context, + pecFactories: [this.pecFactory], + slotComposer: this.composerClass ? new this.composerClass() : null, + storageManager: this.storageManager, + capabilitiesResolver: new _CapabilitiesResolver({arcId: id, factories}), + driverFactory: this.driverFactory, + storageKey: storageKeyPrefix ? storageKeyPrefix(id) : new VolatileStorageKey(id, '') + }; + //const volatileStorageDriverProvider = new VolatileStorageDriverProvider(this); + //DriverFactory.register(this.volatileStorageDriverProvider); + //return {id, loader, pecFactories, slotComposer, storageManager, capabilitiesResolver, context}; } // TODO(shans): Clean up once old storage is removed. @@ -229,9 +233,9 @@ export class Runtime { const slotComposer = this.composerClass ? new this.composerClass() : null; const storageKey = storageKeyPrefix ? storageKeyPrefix(id) : new VolatileStorageKey(id, ''); const factories = (options && options.storargeKeyFactories) || [new VolatileStorageKeyFactory()]; - const capabilitiesResolver = new CapabilitiesResolver({arcId: id, factories}); - const {loader, context, storageManager} = this; - return new Arc({id, storageKey, capabilitiesResolver, loader, slotComposer, context, storageManager, ...options}); + const capabilitiesResolver = new _CapabilitiesResolver({arcId: id, factories}); + const {loader, context, storageManager, driverFactory} = this; + return new Arc({id, storageKey, capabilitiesResolver, loader, slotComposer, context, storageManager, driverFactory, ...options}); } // Stuff the shell(s) need @@ -242,10 +246,15 @@ export class Runtime { * (2) a deserialized arc (TODO: needs implementation) * (3) a newly created arc */ - runArc(name: string, storageKeyPrefix: (arcId: ArcId) => StorageKey, options?: RuntimeArcOptions): Arc { + runArc(name: string, storageKeyPrefix: StorageKeyPrefixer, options?: RuntimeArcOptions): Arc { if (!this.arcById.has(name)) { // TODO: Support deserializing serialized arcs. - this.arcById.set(name, this.newArc(name, storageKeyPrefix, options)); + const params = { + ...this.buildArcParams(name, storageKeyPrefix), + ...options + }; + const arc = new Arc(params); + this.arcById.set(name, arc); } return this.arcById.get(name); } diff --git a/src/runtime/storage/database-storage-key.ts b/src/runtime/storage/database-storage-key.ts index d05144dc391..2d160254191 100644 --- a/src/runtime/storage/database-storage-key.ts +++ b/src/runtime/storage/database-storage-key.ts @@ -11,7 +11,6 @@ import {assert} from '../../platform/assert-web.js'; import {StorageKey} from './storage-key.js'; import {Capabilities, Persistence, Queryable, Ttl, Shareable, DeletePropagation} from '../capabilities.js'; -import {CapabilitiesResolver} from '../capabilities-resolver.js'; import {StorageKeyFactory, StorageKeyOptions} from '../storage-key-factory.js'; export abstract class DatabaseStorageKey extends StorageKey { @@ -42,9 +41,9 @@ export abstract class DatabaseStorageKey extends StorageKey { return match; } - static register() { - CapabilitiesResolver.registerStorageKeyFactory(new PersistentDatabaseStorageKeyFactory()); - CapabilitiesResolver.registerStorageKeyFactory(new MemoryDatabaseStorageKeyFactory()); + static register({capabilitiesResolver}) { + capabilitiesResolver.registerStorageKeyFactory(new PersistentDatabaseStorageKeyFactory()); + capabilitiesResolver.registerStorageKeyFactory(new MemoryDatabaseStorageKeyFactory()); } } diff --git a/src/runtime/storage/direct-storage-endpoint-manager.ts b/src/runtime/storage/direct-storage-endpoint-manager.ts index e964bf2edac..6a8b6e08c09 100644 --- a/src/runtime/storage/direct-storage-endpoint-manager.ts +++ b/src/runtime/storage/direct-storage-endpoint-manager.ts @@ -35,12 +35,16 @@ export class DirectStorageEndpointManager implements StorageEndpointManager, Sto if (ctor == null) { throw new Error(`No constructor registered for mode ${storeInfo.mode}`); } - this.activeStoresByKey.set(storeInfo.storageKey, await ctor.construct>({ + const construct = ctor.construct.bind(ctor); + //const instance = await ctor.construct>({ + debugger; + const instance = await construct({ storageKey: storeInfo.storageKey, exists: storeInfo.exists, type: storeInfo.type as unknown as CRDTTypeRecordToType>, storeInfo: storeInfo as unknown as StoreInfo>>, - })); + }); + this.activeStoresByKey.set(storeInfo.storageKey, instance); storeInfo.exists = Exists.ShouldExist; } return this.activeStoresByKey.get(storeInfo.storageKey) as ActiveStore>; diff --git a/src/runtime/storage/direct-store-muxer.ts b/src/runtime/storage/direct-store-muxer.ts index 3163dc09b94..89d9a60c34c 100644 --- a/src/runtime/storage/direct-store-muxer.ts +++ b/src/runtime/storage/direct-store-muxer.ts @@ -14,10 +14,9 @@ import {ActiveStore} from './active-store.js'; import {StorageKey} from './storage-key.js'; import {DirectStore} from './direct-store.js'; import {Dictionary, BiMap, noAwait} from '../../utils/lib-utils.js'; -import {StoreConstructorOptions, StorageCommunicationEndpointProvider, StorageMode, ProxyMessageType} from './store-interface.js'; +import {StoreConstructorOptions, StorageCommunicationEndpointProvider, ProxyMessageType} from './store-interface.js'; import {assert} from '../../platform/assert-web.js'; import {PropagatedException, reportSystemException} from '../arc-exceptions.js'; -import {ChannelConstructor} from '../channel-constructor.js'; /** * Registering a callback results in a callback ID. Storage Proxy Muxers/Reference Mode Stores register a diff --git a/src/runtime/storage/drivers/driver-factory.ts b/src/runtime/storage/drivers/driver-factory.ts index 99480173173..6f0bad2d45f 100644 --- a/src/runtime/storage/drivers/driver-factory.ts +++ b/src/runtime/storage/drivers/driver-factory.ts @@ -18,31 +18,38 @@ export interface StorageDriverProvider { driver(storageKey: StorageKey, exists: Exists): Promise>; } +let staticDriverFactory; + export class DriverFactory { - static providers: Set = new Set(); - static async driverInstance(storageKey: StorageKey, exists: Exists) { - for (const provider of this.providers) { - if (provider.willSupport(storageKey)) { - return provider.driver(storageKey, exists); - } - } - return null; + static register({driverFactory}, storageDriverProvider: StorageDriverProvider) { + driverFactory.register(storageDriverProvider); } - - static register(storageDriverProvider: StorageDriverProvider) { + providers: Set = new Set(); + constructor() { + staticDriverFactory = this; + } + register(storageDriverProvider: StorageDriverProvider) { this.providers.add(storageDriverProvider); } - - static unregister(storageDriverProvider: StorageDriverProvider) { + unregister(storageDriverProvider: StorageDriverProvider) { this.providers.delete(storageDriverProvider); } - - static willSupport(storageKey: StorageKey) { + willSupport(storageKey: StorageKey) { + return !!(this.supportingProvider(storageKey)); + } + async driverInstance(storageKey: StorageKey, exists: Exists) { + const provider = this.supportingProvider(storageKey); + return provider ? provider.driver(storageKey, exists) : null; + } + supportingProvider(storageKey): StorageDriverProvider { for (const provider of this.providers) { if (provider.willSupport(storageKey)) { - return true; + return provider; } } - return false; + return null; + } + static async driverInstance(storageKey: StorageKey, exists: Exists) { + return staticDriverFactory.driverInstance(storageKey, exists); } } diff --git a/src/runtime/storage/drivers/firebase.ts b/src/runtime/storage/drivers/firebase.ts index 41acba8b022..a433b3f33f0 100644 --- a/src/runtime/storage/drivers/firebase.ts +++ b/src/runtime/storage/drivers/firebase.ts @@ -8,15 +8,12 @@ * http://polymer.github.io/PATENTS.txt */ -import {StorageDriverProvider, DriverFactory} from './driver-factory.js'; +import {StorageDriverProvider} from './driver-factory.js'; import {Driver, ReceiveMethod, Exists} from './driver.js'; import {StorageKey} from '../storage-key.js'; -import {ArcId} from '../../id.js'; import {RuntimeCacheService} from '../../runtime-cache.js'; import {assert} from '../../../platform/assert-web.js'; import {firebase} from '../../../../concrete-storage/firebase.js'; -import {StorageKeyParser} from '../storage-key-parser.js'; -import {CapabilitiesResolver} from '../../capabilities-resolver.js'; import {Capabilities, Persistence, Shareable} from '../../capabilities.js'; import {StorageKeyOptions, StorageKeyFactory} from '../../storage-key-factory.js'; @@ -73,7 +70,7 @@ export class FirebaseAppCache { getApp(key: FirebaseStorageKey) { const keyAsString = key.toString(); if (!this.appCache.has(keyAsString)) { - this.appCache.set(keyAsString, firebase.initializeApp(key)); + this.appCache.set(keyAsString, firebase.initializeApp(key, `[salt${Math.random()}]`)); } return this.appCache.get(keyAsString); } @@ -227,15 +224,16 @@ export class FirebaseStorageDriverProvider implements StorageDriverProvider { } const driver = new FirebaseDriver(storageKey, exists); - await driver.init(new FirebaseAppCache(this.cacheService)); + const cache = new FirebaseAppCache(this.cacheService); + await driver.init(cache); return driver; } - static register(cacheService: RuntimeCacheService, options: FirebaseStorageKeyOptions) { - DriverFactory.register(new FirebaseStorageDriverProvider(cacheService)); - StorageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); + static register({driverFactory, storageKeyParser, capabilitiesResolver}, cacheService: RuntimeCacheService, options: FirebaseStorageKeyOptions) { + driverFactory.register(new FirebaseStorageDriverProvider(cacheService)); + storageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); const {projectId, domain, apiKey} = options; - CapabilitiesResolver.registerStorageKeyFactory(new FirebaseStorageKeyFactory(options)); + capabilitiesResolver.registerStorageKeyFactory(new FirebaseStorageKeyFactory(options)); } } diff --git a/src/runtime/storage/drivers/ramdisk.ts b/src/runtime/storage/drivers/ramdisk.ts index 4d425e55b0e..3776c30c201 100644 --- a/src/runtime/storage/drivers/ramdisk.ts +++ b/src/runtime/storage/drivers/ramdisk.ts @@ -11,11 +11,8 @@ import {StorageKey} from '../storage-key.js'; import {StorageDriverProvider, DriverFactory} from './driver-factory.js'; import {VolatileDriver, VolatileMemoryProvider} from './volatile.js'; -import {StorageKeyParser} from '../storage-key-parser.js'; import {Exists} from './driver.js'; -import {ArcId} from '../../id.js'; -import {Capabilities, Persistence, Encryption, Ttl, Queryable, Shareable} from '../../capabilities.js'; -import {CapabilitiesResolver} from '../../capabilities-resolver.js'; +import {Capabilities, Persistence, Shareable} from '../../capabilities.js'; import {StorageKeyFactory, StorageKeyOptions} from '../../storage-key-factory.js'; export class RamDiskStorageKey extends StorageKey { @@ -84,9 +81,9 @@ export class RamDiskStorageDriverProvider implements StorageDriverProvider { return new VolatileDriver(storageKey as RamDiskStorageKey, exists, memory); } - static register(memoryProvider: VolatileMemoryProvider) { - DriverFactory.register(new RamDiskStorageDriverProvider(memoryProvider)); - StorageKeyParser.addParser(RamDiskStorageKey.protocol, RamDiskStorageKey.fromString); - CapabilitiesResolver.registerStorageKeyFactory(new RamDiskStorageKeyFactory()); + static register({driverFactory, storageKeyParser, capabilitiesResolver, memoryProvider}) { + driverFactory.register(new RamDiskStorageDriverProvider(memoryProvider)); + storageKeyParser.addParser(RamDiskStorageKey.protocol, RamDiskStorageKey.fromString); + capabilitiesResolver.registerStorageKeyFactory(new RamDiskStorageKeyFactory()); } } diff --git a/src/runtime/storage/drivers/volatile.ts b/src/runtime/storage/drivers/volatile.ts index 6b718e85d81..31a1b30bad0 100644 --- a/src/runtime/storage/drivers/volatile.ts +++ b/src/runtime/storage/drivers/volatile.ts @@ -8,7 +8,7 @@ * http://polymer.github.io/PATENTS.txt */ -import {StorageDriverProvider, DriverFactory} from './driver-factory.js'; +import {StorageDriverProvider} from './driver-factory.js'; import {Driver, ReceiveMethod, Exists} from './driver.js'; import {StorageKey} from '../storage-key.js'; import {Arc} from '../../arc.js'; @@ -16,9 +16,7 @@ import {ArcId} from '../../id.js'; import {RamDiskStorageKey} from './ramdisk.js'; import {Dictionary} from '../../../utils/lib-utils.js'; import {assert} from '../../../platform/assert-web.js'; -import {StorageKeyParser} from '../storage-key-parser.js'; -import {Capabilities, Persistence, Encryption, Ttl, Queryable, Shareable} from '../../capabilities.js'; -import {CapabilitiesResolver} from '../../capabilities-resolver.js'; +import {Capabilities, Persistence, Shareable} from '../../capabilities.js'; import {StorageKeyFactory, StorageKeyOptions} from '../../storage-key-factory.js'; type VolatileEntry = {data: Data, version: number, drivers: VolatileDriver[]}; @@ -59,8 +57,12 @@ export class VolatileStorageKey extends StorageKey { return new VolatileStorageKey(ArcId.fromString(arcId), unique, path); } - static register() { - CapabilitiesResolver.registerStorageKeyFactory(new VolatileStorageKeyFactory()); + // static register() { + // CapabilitiesResolver.registerStorageKeyFactory(new VolatileStorageKeyFactory()); + // } + + static register({capabilitiesResolver}) { + capabilitiesResolver.registerStorageKeyFactory(new VolatileStorageKeyFactory()); } } @@ -267,9 +269,9 @@ export class VolatileStorageDriverProvider implements StorageDriverProvider { } // QUESTION: This method is never being called, is it needed? - static register(arc: Arc) { - DriverFactory.register(new VolatileStorageDriverProvider(arc)); + static register({driverFactory}, arc: Arc) { + driverFactory.register(new VolatileStorageDriverProvider(arc)); } } -StorageKeyParser.addDefaultParser(VolatileStorageKey.protocol, VolatileStorageKey.fromString); +//StorageKeyParser.addDefaultParser(VolatileStorageKey.protocol, VolatileStorageKey.fromString); diff --git a/src/runtime/storage/storage-key-parser.ts b/src/runtime/storage/storage-key-parser.ts index cd5aefed027..20ab40291c0 100644 --- a/src/runtime/storage/storage-key-parser.ts +++ b/src/runtime/storage/storage-key-parser.ts @@ -9,59 +9,74 @@ */ import {StorageKey, StorageKeyLiteral} from './storage-key.js'; import {ReferenceModeStorageKey} from './reference-mode-storage-key.js'; +import {VolatileStorageKey} from './drivers/volatile.js'; type ParserTopLevel = (key: string) => StorageKey; type Parser = (key: string, parse: ParserTopLevel) => StorageKey; +let staticParser; + /** * Parses storage key string representations back into real StorageKey * instances. * - * Singleton class with static methods. If you modify the default set of storage + * If you modify the default set of storage * keys in a test, remember to call StorageKeyParser.reset() in the tear-down * method. */ export class StorageKeyParser { - private static defaultParsers: [string, Parser][] = [ + private parsers; + + constructor() { + this.parsers = this.getDefaultParsers(); + staticParser = this; + } + + private defaultParsers: [string, Parser][] = [ [ReferenceModeStorageKey.protocol, ReferenceModeStorageKey.fromString], + [VolatileStorageKey.protocol, VolatileStorageKey.fromString] ]; - private static getDefaultParsers(): Map { + private getDefaultParsers(): Map { return new Map(this.defaultParsers); } - private static parsers = StorageKeyParser.getDefaultParsers(); - - static parse(key: string): StorageKey { + parse(key: string): StorageKey { const match = key.match(/^((?:\w|-)+):\/\/(.*)$/); if (!match) { throw new Error('Failed to parse storage key: ' + key); } const protocol = match[1]; - const parser = StorageKeyParser.parsers.get(protocol); + const parser = this.parsers.get(protocol); if (!parser) { throw new Error(`Unknown storage key protocol ${protocol} in key ${key}.`); } - return parser(key, StorageKeyParser.parse); + return parser(key, this.parse.bind(this)); } - static reset() { + reset() { this.parsers = this.getDefaultParsers(); } - static addParser(protocol: string, parser: Parser) { + addParser(protocol: string, parser: Parser) { if (this.parsers.has(protocol)) { throw new Error(`Parser for storage key protocol ${protocol} already exists.`); } this.parsers.set(protocol, parser); } - static addDefaultParser(protocol: string, parser: Parser) { + addDefaultParser(protocol: string, parser: Parser) { this.defaultParsers.push([protocol, parser]); if (!this.parsers.has(protocol)) { this.parsers.set(protocol, parser); } } + + static parse(key): StorageKey { + return staticParser.parse(key); + } } +staticParser = new StorageKeyParser(); + StorageKey.fromLiteral = (literal: StorageKeyLiteral) => StorageKeyParser.parse(literal.key); diff --git a/src/runtime/storage/testing/mock-firebase.ts b/src/runtime/storage/testing/mock-firebase.ts index bb9a31b672e..6ffe7acd456 100644 --- a/src/runtime/storage/testing/mock-firebase.ts +++ b/src/runtime/storage/testing/mock-firebase.ts @@ -16,7 +16,6 @@ import {Exists} from '../drivers/driver.js'; import {assert} from '../../../platform/chai-web.js'; import {RuntimeCacheService} from '../../runtime-cache.js'; import {StorageKeyParser} from '../storage-key-parser.js'; -import {StorageKeyOptions} from '../../storage-key-factory.js'; /** * These classes are intended to mimic firebase behaviour, including asynchrony. @@ -333,11 +332,12 @@ class MockFirebaseAppCache extends FirebaseAppCache { } export class MockFirebaseStorageDriverProvider extends FirebaseStorageDriverProvider { + static appCache; + async driver(storageKey: StorageKey, exists: Exists) { if (!this.willSupport(storageKey)) { throw new Error(`This provider does not support storageKey ${storageKey.toString()}`); } - return MockFirebaseStorageDriverProvider.newDriverForTesting(this.cacheService, storageKey, exists); } @@ -348,10 +348,13 @@ export class MockFirebaseStorageDriverProvider extends FirebaseStorageDriverProv return driver; } - static register(cacheService: RuntimeCacheService) { - DriverFactory.register(new MockFirebaseStorageDriverProvider(cacheService)); - StorageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); - const {projectId, domain, apiKey} = mockFirebaseStorageKeyOptions; + static register({driverFactory, storageKeyParser, capabilitiesResolver}, cacheService: RuntimeCacheService, options?: FirebaseStorageKeyOptions) { + //static register(cacheService: RuntimeCacheService) { + //DriverFactory.register(new MockFirebaseStorageDriverProvider(cacheService)); + //StorageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); + //const {projectId, domain, apiKey} = mockFirebaseStorageKeyOptions; + driverFactory.register(new FirebaseStorageDriverProvider(cacheService)); + storageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); } static getValueForTesting(cacheService: RuntimeCacheService, storageKey: MockFirebaseStorageKey) { diff --git a/src/runtime/storage/tests/active-store-test.ts b/src/runtime/storage/tests/active-store-test.ts index 163fb413474..1f7f1073ba1 100644 --- a/src/runtime/storage/tests/active-store-test.ts +++ b/src/runtime/storage/tests/active-store-test.ts @@ -23,22 +23,17 @@ import {ActiveStore} from '../active-store.js'; import {DirectStorageEndpointManager} from '../direct-storage-endpoint-manager.js'; import {Runtime} from '../../runtime.js'; -let testKey: StorageKey; - -async function createStore(): Promise> { - return new DirectStorageEndpointManager().getActiveStore(new StoreInfo({ - storageKey: testKey, type: new CountType(), exists: Exists.ShouldCreate, id: 'an-id'})); -} - describe('Store', async () => { - + let runtime; + let testKey: StorageKey; + async function createStore(): Promise> { + const info = new StoreInfo({storageKey: testKey, type: new CountType(), exists: Exists.ShouldCreate, id: 'an-id'}); + const endpoints = new DirectStorageEndpointManager(); + return endpoints.getActiveStore(info); + } beforeEach(() => { testKey = new MockStorageKey(); - Runtime.resetDrivers(); - }); - - after(() => { - Runtime.resetDrivers(); + runtime = new Runtime(); }); it(`will throw an exception if an appropriate driver can't be found`, async () => { @@ -51,7 +46,7 @@ describe('Store', async () => { }); it('will construct Direct stores when required', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -59,7 +54,7 @@ describe('Store', async () => { }); it('will propagate model updates from proxies to drivers', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -76,7 +71,7 @@ describe('Store', async () => { }); it('will apply and propagate operation updates from proxies to drivers', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -95,7 +90,7 @@ describe('Store', async () => { }); it('will respond to a model request from a proxy with a model', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -130,7 +125,7 @@ describe('Store', async () => { }); it('will only send a model response to the requesting proxy', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -151,7 +146,7 @@ describe('Store', async () => { }); it('will propagate updates from drivers to proxies', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -177,7 +172,7 @@ describe('Store', async () => { }); it('can clone data from another store', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); // Write some data. @@ -192,7 +187,7 @@ describe('Store', async () => { }); it(`won't send an update to the driver after driver-originated messages`, async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -208,7 +203,7 @@ describe('Store', async () => { }); it('will resend failed driver updates after merging', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); @@ -239,7 +234,7 @@ describe('Store', async () => { }); it('resolves a combination of messages from the proxy and the driver', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createStore(); const driver = activeStore['driver'] as MockDriver; let lastModel = null; diff --git a/src/runtime/storage/tests/direct-store-muxer-test.ts b/src/runtime/storage/tests/direct-store-muxer-test.ts index 5d96a34b002..5ba08c298e2 100644 --- a/src/runtime/storage/tests/direct-store-muxer-test.ts +++ b/src/runtime/storage/tests/direct-store-muxer-test.ts @@ -33,8 +33,8 @@ let storageManager: StorageEndpointManager; describe('Direct Store Muxer', async () => { beforeEach(() => { - Runtime.resetDrivers(); - DriverFactory.register(new MockStorageDriverProvider()); + const runtime = new Runtime(); + DriverFactory.register(runtime, new MockStorageDriverProvider()); storageManager = new DirectStorageEndpointManager(); }); diff --git a/src/runtime/storage/tests/firebase-store-integration-test.ts b/src/runtime/storage/tests/firebase-store-integration-test.ts index 1daae0bbdbd..363ab7dd4c5 100644 --- a/src/runtime/storage/tests/firebase-store-integration-test.ts +++ b/src/runtime/storage/tests/firebase-store-integration-test.ts @@ -11,7 +11,6 @@ import {assert} from '../../../platform/chai-web.js'; import {ProxyMessageType} from '../store-interface.js'; import {CRDTCountTypeRecord, CRDTCount, CountOpTypes} from '../../../crdt/lib-crdt.js'; -import {DriverFactory} from '../drivers/driver-factory.js'; import {Exists} from '../drivers/driver.js'; import {Runtime} from '../../runtime.js'; import {MockFirebaseStorageDriverProvider, MockFirebaseStorageKey} from '../testing/mock-firebase.js'; @@ -22,23 +21,21 @@ import {ActiveStore} from '../active-store.js'; import {DirectStorageEndpointManager} from '../direct-storage-endpoint-manager.js'; async function createStore(storageKey: StorageKey, exists: Exists): Promise> { - return (await new DirectStorageEndpointManager().getActiveStore(new StoreInfo({ - storageKey, type: new CountType(), exists, id: 'an-id'}))) as ActiveStore; + const info = new StoreInfo({storageKey, type: new CountType(), exists, id: 'an-id'}); + const endpoints = new DirectStorageEndpointManager(); + const store = await endpoints.getActiveStore(info); + return store as ActiveStore; } describe('Firebase + Store Integration', async () => { let runtime; beforeEach(() => { - Runtime.resetDrivers(); runtime = new Runtime(); - MockFirebaseStorageDriverProvider.register(runtime.getCacheService()); + MockFirebaseStorageDriverProvider.register(runtime, runtime.getCacheService()); }); - after(() => { - Runtime.resetDrivers(); - }); - - it('will store a sequence of model and operation updates as models', async () => { + it('FLOOB will store a sequence of model and operation updates as models', async () => { + debugger; const storageKey = new MockFirebaseStorageKey('location'); const activeStore = await createStore(storageKey, Exists.ShouldCreate); diff --git a/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts b/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts index dae56181491..d9feed5ff50 100644 --- a/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts +++ b/src/runtime/storage/tests/ramdisk-direct-store-muxer-integration-test.ts @@ -29,10 +29,6 @@ function assertHasModel(message: ProxyMessage, model: CRDTEntity< } describe('RamDisk + Direct Store Muxer Integration', async () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('will allow storage of a number of objects', async () => { const manifest = await Manifest.parse(` schema Simple diff --git a/src/runtime/storage/tests/ramdisk-store-integration-test.ts b/src/runtime/storage/tests/ramdisk-store-integration-test.ts index ef31c07ad1e..cc3af5148b1 100644 --- a/src/runtime/storage/tests/ramdisk-store-integration-test.ts +++ b/src/runtime/storage/tests/ramdisk-store-integration-test.ts @@ -27,10 +27,6 @@ async function createStore(storageKey: StorageKey, exists: Exists): Promise { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('will store a sequence of model and operation updates as models', async () => { const runtime = new Runtime(); //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); diff --git a/src/runtime/storage/tests/reference-mode-store-integration-test.ts b/src/runtime/storage/tests/reference-mode-store-integration-test.ts index fd6ff96b5e2..84b2c91e566 100644 --- a/src/runtime/storage/tests/reference-mode-store-integration-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-integration-test.ts @@ -23,13 +23,7 @@ import {StoreInfo} from '../store-info.js'; describe('ReferenceModeStore Integration', async () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('will store and retrieve entities through referenceModeStores (separate stores)', async () => { - const runtime = new Runtime(); - //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const type = new EntityType(new Schema(['AnEntity'], {foo: 'Text'})).collectionOf(); @@ -64,8 +58,6 @@ describe('ReferenceModeStore Integration', async () => { }); it('will store and retrieve entities through referenceModeStores (shared stores)', async () => { - const runtime = new Runtime(); - //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); @@ -101,8 +93,6 @@ describe('ReferenceModeStore Integration', async () => { }); it('will store and retrieve entities through referenceModeStores (shared proxies)', async () => { - const runtime = new Runtime(); - //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); @@ -139,8 +129,6 @@ describe('ReferenceModeStore Integration', async () => { }); it('will send an ordered list from one handle to another (separate store)', async () => { - const runtime = new Runtime(); - //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const type = new EntityType(new Schema(['AnEntity'], { @@ -177,8 +165,6 @@ describe('ReferenceModeStore Integration', async () => { }); it('will send an ordered list from one handle to another (shared store)', async () => { - const runtime = new Runtime(); - //RamDiskStorageDriverProvider.register(runtime.getMemoryProvider()); const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); const arc = new Runtime().newArc('testArc'); diff --git a/src/runtime/storage/tests/reference-mode-store-test.ts b/src/runtime/storage/tests/reference-mode-store-test.ts index 51ea36393f0..df782b4d927 100644 --- a/src/runtime/storage/tests/reference-mode-store-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-test.ts @@ -87,18 +87,16 @@ function myEntityToMyEntityModel(entity: MyEntity, actor: string): MyEntityModel describe('Reference Mode Store', async () => { + let runtime; + beforeEach(() => { + runtime = new Runtime(); testKey = new ReferenceModeStorageKey(new MockHierarchicalStorageKey(), new MockHierarchicalStorageKey()); storeInfo = new StoreInfo({ storageKey: testKey, type: collectionType, exists: Exists.ShouldCreate, id: 'base-store-id'}); - Runtime.resetDrivers(); storageManager = new DirectStorageEndpointManager(); }); - after(() => { - Runtime.resetDrivers(); - }); - it(`will throw an exception if an appropriate driver can't be found`, async () => { const type = new SingletonType(new CountType()); try { @@ -111,7 +109,7 @@ describe('Reference Mode Store', async () => { }); it('will construct ReferenceMode stores when required', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const type = new SingletonType(new CountType()); const activeStore = await storageManager.getActiveStore((new StoreInfo({ @@ -120,7 +118,7 @@ describe('Reference Mode Store', async () => { }); it('will propagate model updates from proxies to drivers', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -151,7 +149,7 @@ describe('Reference Mode Store', async () => { }); it('can clone data from another store', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -172,7 +170,7 @@ describe('Reference Mode Store', async () => { }); it('will apply and propagate operation updates from proxies to drivers', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -202,7 +200,7 @@ describe('Reference Mode Store', async () => { }); it('clear entity in the backing store when they are removed from a collection', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -241,7 +239,7 @@ describe('Reference Mode Store', async () => { }); it('will respond to a model request from a proxy with a model', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -280,7 +278,7 @@ describe('Reference Mode Store', async () => { }); it('will only send a model response to the requesting proxy', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -301,7 +299,7 @@ describe('Reference Mode Store', async () => { }); it('will propagate updates from drivers to proxies', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -339,7 +337,7 @@ describe('Reference Mode Store', async () => { // TODO: this test can be enabled when we output operations from collection model merges it.skip(`won't send an update to the driver after driver-originated messages`, async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -356,7 +354,7 @@ describe('Reference Mode Store', async () => { }); it('will resend failed driver updates after merging', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -399,7 +397,7 @@ describe('Reference Mode Store', async () => { }); it('resolves a combination of messages from the proxy and the driver', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); @@ -440,7 +438,7 @@ describe('Reference Mode Store', async () => { }); it('holds onto a container update until the relevant backing data arrives', async () => { - DriverFactory.register(new MockStorageDriverProvider()); + DriverFactory.register(runtime, new MockStorageDriverProvider()); const activeStore = await createReferenceModeStore(); diff --git a/src/runtime/storage/tests/storage-key-test.ts b/src/runtime/storage/tests/storage-key-test.ts index a2fdff9b8a4..bf7db3640fe 100644 --- a/src/runtime/storage/tests/storage-key-test.ts +++ b/src/runtime/storage/tests/storage-key-test.ts @@ -21,11 +21,7 @@ describe('StorageKey', () => { beforeEach(() => { const runtime = new Runtime(); - FirebaseStorageDriverProvider.register(runtime.getCacheService(), mockFirebaseStorageKeyOptions); - }); - - afterEach(() => { - Runtime.resetDrivers(); + FirebaseStorageDriverProvider.register(runtime, runtime.getCacheService(), mockFirebaseStorageKeyOptions); }); it('can round-trip VolatileStorageKey', () => { diff --git a/src/runtime/storage/tests/store-sequence-test.ts b/src/runtime/storage/tests/store-sequence-test.ts index cca6cf3f354..924fd9d43ef 100644 --- a/src/runtime/storage/tests/store-sequence-test.ts +++ b/src/runtime/storage/tests/store-sequence-test.ts @@ -62,9 +62,8 @@ describe('Store Sequence', async () => { it('services a model request and applies 2 models', async () => { const sequenceTest = new SequenceTest>(); sequenceTest.setTestConstructor(async () => { - Runtime.resetDrivers(); - DriverFactory.register(new MockStorageDriverProvider()); - + const runtime = new Runtime(); + DriverFactory.register(runtime, new MockStorageDriverProvider()); return createStore(testKey, Exists.ShouldCreate); }); @@ -127,9 +126,8 @@ describe('Store Sequence', async () => { const sequenceTest = new SequenceTest>(); sequenceTest.setTestConstructor(async () => { - Runtime.resetDrivers(); - DriverFactory.register(new MockStorageDriverProvider()); - + const runtime = new Runtime(); + DriverFactory.register(runtime, new MockStorageDriverProvider()); return createStore(testKey, Exists.ShouldCreate); }); @@ -178,8 +176,7 @@ describe('Store Sequence', async () => { sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); const arc = runtime.newArc('arc', null); - Runtime.resetDrivers(); - VolatileStorageDriverProvider.register(arc); + VolatileStorageDriverProvider.register(runtime, arc); const storageKey = new VolatileStorageKey(arc.id, 'unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); const activeStore2 = await createStore(storageKey, Exists.ShouldExist); @@ -228,8 +225,7 @@ describe('Store Sequence', async () => { const sequenceTest = new SequenceTest(); sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); - Runtime.resetDrivers(); - MockFirebaseStorageDriverProvider.register(runtime.getCacheService()); + MockFirebaseStorageDriverProvider.register(runtime, runtime.getCacheService()); const storageKey = new FirebaseStorageKey('test', 'test.domain', 'testKey', 'foo'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); const activeStore2 = await createStore(storageKey, Exists.ShouldExist); @@ -278,8 +274,7 @@ describe('Store Sequence', async () => { sequenceTest.setTestConstructor(async () => { const runtime = new Runtime(); const arc = runtime.newArc('arc', id => new VolatileStorageKey(id, '')); - Runtime.resetDrivers(); - VolatileStorageDriverProvider.register(arc); + VolatileStorageDriverProvider.register(runtime, arc); const storageKey = new VolatileStorageKey(arc.id, 'unique'); const activeStore1 = await createStore(storageKey, Exists.ShouldCreate); const activeStore2 = await createStore(storageKey, Exists.ShouldExist); diff --git a/src/runtime/tests/arc-test.ts b/src/runtime/tests/arc-test.ts index 14b240c9f20..a0911ba8f0f 100644 --- a/src/runtime/tests/arc-test.ts +++ b/src/runtime/tests/arc-test.ts @@ -64,12 +64,7 @@ async function setup(storageKeyPrefix: (arcId: ArcId) => StorageKey) { } describe('Arc new storage', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('preserves data when round-tripping through serialization', async () => { - Runtime.resetDrivers(); // TODO(shans): deserialization currently uses a RamDisk store to deserialize into because we don't differentiate // between parsing a manifest for public consumption (e.g. with RamDisk resources in it) and parsing a serialized // arc (with an @activeRecipe). We'll fix this by adding a 'private' keyword to store serializations which will @@ -98,18 +93,18 @@ describe('Arc new storage', () => { defineParticle(({Particle}) => class Noop extends Particle {}); ` }); + const runtime = new Runtime({loader}); - const manifest = await runtime.parseFile('./manifest'); - const dataClass = Entity.createEntityClass(manifest.findSchemaByName('Data'), null); - const id = ArcId.fromString('test'); - const storageKey = new VolatileStorageKey(id, 'unique'); - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id, storageKey, loader, context: manifest, storageManager}); + runtime.context = await runtime.parseFile('./manifest'); + + const opts = runtime.buildArcParams('test'); + const arc = new Arc(opts); + const dataClass = Entity.createEntityClass(runtime.context.findSchemaByName('Data'), null); const varStore = await arc.createStore(new SingletonType(dataClass.type), undefined, 'test:0'); - const colStore = await arc.createStore(dataClass.type.collectionOf(), undefined, 'test:1'); + const colStore = await arc.createStore(dataClass.type.collectionOf(), undefined, 'test:1'); - const refVarKey = new ReferenceModeStorageKey(new VolatileStorageKey(id, 'colVar'), new VolatileStorageKey(id, 'refVar')); + const refVarKey = new ReferenceModeStorageKey(new VolatileStorageKey(arc.id, 'colVar'), new VolatileStorageKey(arc.id, 'refVar')); const refVarStore = await arc.createStore(new SingletonType(dataClass.type), undefined, 'test:2', [], refVarKey); const varHandle = await handleForStoreInfo(varStore, arc); @@ -126,7 +121,7 @@ describe('Arc new storage', () => { await colHandle.add(d3); await refVarHandle.set(d4); - const recipe = manifest.recipes[0]; + const recipe = runtime.context.recipes[0]; recipe.handles[0].mapToStorage(varStore); recipe.handles[1].mapToStorage(colStore); recipe.handles[2].mapToStorage(refVarStore); @@ -135,13 +130,15 @@ describe('Arc new storage', () => { await arc.instantiate(recipe); const serialization = await arc.serialize(); + console.log(serialization); arc.dispose(); await varHandle.clear(); await colHandle.clear(); await refVarHandle.clear(); - const arc2 = await Arc.deserialize({serialization, loader, fileName: '', context: manifest, storageManager}); + const {context, storageManager, driverFactory} = opts; + const arc2 = await Arc.deserialize({fileName: '', serialization, loader, context, storageManager, driverFactory}); const varStore2 = arc2.findStoreById(varStore.id) as StoreInfo; const colStore2 = arc2.findStoreById(colStore.id) as StoreInfo; const refVarStore2 = arc2.findStoreById(refVarStore.id) as StoreInfo; @@ -198,10 +195,6 @@ describe('Arc new storage', () => { const doSetup = async () => setup(arcId => new VolatileStorageKey(arcId, '')); describe('Arc', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('idle can safely be called multiple times ', async () => { const runtime = new Runtime(); const arc = runtime.newArc('test'); @@ -242,8 +235,8 @@ describe('Arc', () => { }); it('optional provided handles do not resolve without parent', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -262,12 +255,13 @@ describe('Arc', () => { TestParticle a: reads thingA b: writes thingB - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -294,7 +288,7 @@ describe('Arc', () => { assert.isNull(await dHandle.fetch()); }); - it('FOOBL instantiates recipes only if fate is correct', async () => { + it('instantiates recipes only if fate is correct', async () => { const loader = new Loader(null, { './a.js': ` defineParticle(({Particle}) => class Noop extends Particle {}); @@ -360,8 +354,8 @@ describe('Arc', () => { }); it('required provided handles do not resolve without parent', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -380,12 +374,13 @@ describe('Arc', () => { TestParticle a: reads thingA b: writes thingB - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -414,8 +409,8 @@ describe('Arc', () => { it('optional provided handles cannot resolve without parent', async () => { await assertThrowsAsync(async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -435,11 +430,12 @@ describe('Arc', () => { a: reads thingA b: writes thingB d: writes maybeThingD - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -486,9 +482,9 @@ describe('Arc', () => { const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const loader = runtime.loader; const slotComposer = new SlotComposer(); - const arc = new Arc({loader, context, id, storageKey, storageManager, slotComposer}); + const {loader, driverFactory} = runtime; + const arc = new Arc({loader, context, id, storageKey, storageManager, slotComposer, driverFactory}); const thingClass = Entity.createEntityClass(context.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -508,8 +504,8 @@ describe('Arc', () => { }); it('optional provided handles are not required to resolve with dependencies', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -529,11 +525,12 @@ describe('Arc', () => { a: reads thingA b: writes thingB c: reads maybeThingC - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -564,8 +561,8 @@ describe('Arc', () => { it('required provided handles must resolve with dependencies', async () => { await assertThrowsAsync(async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -585,11 +582,12 @@ describe('Arc', () => { a: reads thingA b: writes thingB c: reads maybeThingC - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -609,8 +607,8 @@ describe('Arc', () => { }); it('optional provided handles can resolve with parent 1', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -631,11 +629,12 @@ describe('Arc', () => { b: writes thingB c: reads maybeThingC d: writes maybeThingD - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -663,8 +662,8 @@ describe('Arc', () => { }); it('required provided handles can resolve with parent 2', async () => { - const loader = new Loader(); - const manifest = await Manifest.parse(` + const runtime = new Runtime(); + const manifest = await runtime.parse(` schema Thing value: Text @@ -685,11 +684,12 @@ describe('Arc', () => { b: writes thingB c: reads maybeThingC d: writes maybeThingD - `, {loader, fileName: process.cwd() + '/input.manifest'}); + `, {fileName: process.cwd() + '/input.manifest'}); const id = ArcId.newForTest('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer: new SlotComposer(), loader, context: manifest, id, storageKey, storageManager, driverFactory}); const thingClass = Entity.createEntityClass(manifest.findSchemaByName('Thing'), null); const aStore = await arc.createStore(new SingletonType(thingClass.type), 'aStore', 'test:1'); @@ -717,19 +717,20 @@ describe('Arc', () => { }); it('deserializing a serialized empty arc produces an empty arc', async () => { + const runtime = new Runtime(); const slotComposer = new SlotComposer(); - const loader = new Loader(); const id = Id.fromString('test'); const storageKey = new VolatileStorageKey(id, ''); const context = new Manifest({id}); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({slotComposer, loader, id, storageKey, context, storageManager}); + const {loader, driverFactory} = runtime; + const arc = new Arc({slotComposer, loader, context, id, storageKey, storageManager, driverFactory}); await arc.idle; const serialization = await arc.serialize(); arc.dispose(); - const newArc = await Arc.deserialize({serialization, loader, slotComposer, context, fileName: 'foo.manifest', storageManager}); + const newArc = await Arc.deserialize({serialization, loader, slotComposer, context, fileName: 'foo.manifest', storageManager, driverFactory}); await newArc.idle; assert.strictEqual(newArc.stores.length, 0); assert.strictEqual(newArc.activeRecipe.toString(), `@active\n${arc.activeRecipe.toString()}`); @@ -738,7 +739,7 @@ describe('Arc', () => { }); it('deserializing a simple serialized arc produces that arc', async () => { - const {arc, context, recipe, Foo, Bar, loader} = await doSetup(); + const {arc, context, recipe, Foo, Bar, loader} = await doSetup(); let fooStore = await arc.createStore(new SingletonType(Foo.type), undefined, 'test:1'); const fooHandle = await handleForStoreInfo(fooStore, arc); const fooStoreCallbacks = CallbackTracker.create(await arc.getActiveStore(fooStore), 1); @@ -758,8 +759,8 @@ describe('Arc', () => { const serialization = await arc.serialize(); arc.dispose(); - const storageManager = arc.storageManager; - const newArc = await Arc.deserialize({serialization, loader, fileName: '', slotComposer: new SlotComposer(), context, storageManager}); + const {driverFactory, storageManager} = arc; + const newArc = await Arc.deserialize({serialization, loader, fileName: '', slotComposer: new SlotComposer(), context, storageManager, driverFactory}); await newArc.idle; fooStore = newArc.findStoreById(fooStore.id) as StoreInfo; barStore = newArc.findStoreById(barStore.id) as StoreInfo; @@ -787,11 +788,13 @@ describe('Arc', () => { '*': 'defineParticle(({Particle}) => class extends Particle {});', }); - const manifest = await Manifest.load('./manifest', loader); + const runtime = new Runtime({loader}); + const manifest = await runtime.parseFile('./manifest'); const id = Id.fromString('test'); const storageKey = new VolatileStorageKey(id, ''); const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id, storageKey, loader, context: manifest, storageManager}); + const {driverFactory} = runtime; + const arc = new Arc({id, storageKey, loader, context: manifest, storageManager, driverFactory}); const recipe = manifest.recipes[0]; assert(recipe.normalize()); assert(recipe.isResolved()); @@ -821,22 +824,23 @@ describe('Arc', () => { const storageKey1 = new VolatileStorageKey(id1, ''); const storageKey2 = new VolatileStorageKey(id2, ''); - Runtime.resetDrivers(); // runtime creates a default RamDisk with SimpleVolatileMemoryProvider - assert.equal(DriverFactory.providers.size, 1); + const runtime = new Runtime(); + const {driverFactory} = runtime; + assert.equal(driverFactory.providers.size, 1); const storageManager = new DirectStorageEndpointManager(); - const arc1 = new Arc({id: id1, storageKey: storageKey1, loader: new Loader(), context: new Manifest({id: id1}), storageManager}); - assert.strictEqual(DriverFactory.providers.size, 2); + const arc1 = new Arc({id: id1, storageKey: storageKey1, loader: new Loader(), context: new Manifest({id: id1}), storageManager, driverFactory}); + assert.strictEqual(driverFactory.providers.size, 2); - const arc2 = new Arc({id: id2, storageKey: storageKey2, loader: new Loader(), context: new Manifest({id: id2}), storageManager}); - assert.strictEqual(DriverFactory.providers.size, 3); + const arc2 = new Arc({id: id2, storageKey: storageKey2, loader: new Loader(), context: new Manifest({id: id2}), storageManager, driverFactory}); + assert.strictEqual(driverFactory.providers.size, 3); arc1.dispose(); - assert.strictEqual(DriverFactory.providers.size, 2); + assert.strictEqual(driverFactory.providers.size, 2); arc2.dispose(); - assert.equal(DriverFactory.providers.size, 1); + assert.equal(driverFactory.providers.size, 1); }); it('preserves create handle ids if specified', Flags.withDefaultReferenceMode(async () => { diff --git a/src/runtime/tests/capabilities-resolver-test.ts b/src/runtime/tests/capabilities-resolver-test.ts index 917e308bce0..f7ff7b7c5a1 100644 --- a/src/runtime/tests/capabilities-resolver-test.ts +++ b/src/runtime/tests/capabilities-resolver-test.ts @@ -10,22 +10,23 @@ import {assert} from '../../platform/chai-web.js'; import {Flags} from '../flags.js'; import {VolatileStorageKey} from '../storage/drivers/volatile.js'; -import {RamDiskStorageKey, RamDiskStorageDriverProvider} from '../storage/drivers/ramdisk.js'; +import {RamDiskStorageKey} from '../storage/drivers/ramdisk.js'; import {DatabaseStorageKey, MemoryDatabaseStorageKey, PersistentDatabaseStorageKey, MemoryDatabaseStorageKeyFactory} from '../storage/database-storage-key.js'; import {StorageKey} from '../storage/storage-key.js'; import {ReferenceModeStorageKey} from '../storage/reference-mode-storage-key.js'; import {EntityType, ReferenceType, Schema} from '../../types/lib-types.js'; -import {CapabilitiesResolver} from '../capabilities-resolver.js'; +import {_CapabilitiesResolver} from '../capabilities-resolver.js'; import {ArcId} from '../id.js'; import {Capabilities, Persistence, Ttl, Shareable, DeletePropagation} from '../capabilities.js'; import {assertThrowsAsync} from '../../testing/test-util.js'; import {Runtime} from '../runtime.js'; import {Manifest} from '../manifest.js'; -import {TestVolatileMemoryProvider} from '../testing/test-volatile-memory-provider.js'; describe('Capabilities Resolver New', () => { - afterEach(() => { - Runtime.resetDrivers(); + + let runtime; + beforeEach(() => { + runtime = new Runtime(); }); type StorageKeyType = typeof VolatileStorageKey|typeof RamDiskStorageKey|typeof DatabaseStorageKey; @@ -38,7 +39,7 @@ describe('Capabilities Resolver New', () => { `Expected ${refKey.storageKey.constructor.name} to be instance of ${expectedType.name}`); } const entityType = new EntityType(new Schema(['Thing'], {result: 'Text'})); - const referenceType = new ReferenceType(entityType); + //const referenceType = new ReferenceType(entityType); const handleId = 'h0'; const unspecified = Capabilities.fromAnnotations(); @@ -49,7 +50,7 @@ describe('Capabilities Resolver New', () => { const onDiskWithTtl = Capabilities.create([Persistence.onDisk(), Ttl.minutes(30)]); it('fails creating keys with no factories', Flags.withDefaultReferenceMode(async () => { - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); // Verify storage keys for none of the capabilities cannot be created. await assertThrowsAsync(async () => resolver.createStorageKey(unspecified, entityType, handleId)); await assertThrowsAsync(async () => resolver.createStorageKey(inMemory, entityType, handleId)); @@ -62,7 +63,7 @@ describe('Capabilities Resolver New', () => { // Register volatile storage key factory. // Verify only volatile (in-memory, no ttl) storage key can be created. //VolatileStorageKey.register(); - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); const createKey = resolver.createStorageKey.bind(resolver); verifyReferenceModeStorageKey(await createKey(unspecified, entityType, handleId), VolatileStorageKey); verifyReferenceModeStorageKey(await createKey(inMemory, entityType, handleId), VolatileStorageKey); @@ -73,9 +74,9 @@ describe('Capabilities Resolver New', () => { })); it('creates keys with db only factories', Flags.withDefaultReferenceMode(async () => { - Runtime.resetDrivers(true); - DatabaseStorageKey.register(); - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); + runtime.resetDrivers(); + DatabaseStorageKey.register(runtime); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); const createKey = resolver.createStorageKey.bind(resolver); verifyReferenceModeStorageKey(await createKey(unspecified, entityType, handleId), MemoryDatabaseStorageKey); verifyReferenceModeStorageKey(await createKey(inMemory, entityType, handleId), MemoryDatabaseStorageKey); @@ -87,9 +88,8 @@ describe('Capabilities Resolver New', () => { it('creates keys with volatile and db factories', Flags.withDefaultReferenceMode(async () => { // Register database storage key factories. Verify all storage keys created as expected. - Runtime.resetDrivers(); - DatabaseStorageKey.register(); - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); + DatabaseStorageKey.register(runtime); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); const verify = async (a, b, c, d) => verifyReferenceModeStorageKey(await resolver.createStorageKey(a, b, c), d); await verify(unspecified, entityType, handleId, VolatileStorageKey); await verify(Capabilities.create([new Shareable(false)]), entityType, handleId, VolatileStorageKey); @@ -101,7 +101,7 @@ describe('Capabilities Resolver New', () => { })); it('creates keys with custom factory', Flags.withDefaultReferenceMode(async () => { - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test'), factories: [new MemoryDatabaseStorageKeyFactory()]}); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test'), factories: [new MemoryDatabaseStorageKeyFactory()]}); verifyReferenceModeStorageKey(await resolver.createStorageKey(unspecified, entityType, handleId), VolatileStorageKey); verifyReferenceModeStorageKey(await resolver.createStorageKey(inMemory, entityType, handleId), VolatileStorageKey); verifyReferenceModeStorageKey(await resolver.createStorageKey(inMemoryWithTtls, entityType, handleId), MemoryDatabaseStorageKey); @@ -110,7 +110,7 @@ describe('Capabilities Resolver New', () => { })); it('creates keys for recipe with volatile and db factories', Flags.withDefaultReferenceMode(async () => { - DatabaseStorageKey.register(); + DatabaseStorageKey.register(runtime); const manifestStr = ` recipe h0: create @@ -120,7 +120,7 @@ describe('Capabilities Resolver New', () => { h4: create @queryable @ttl('30m') `; const recipe = (await Manifest.parse(manifestStr)).recipes[0]; - const resolver = new CapabilitiesResolver({arcId: ArcId.newForTest('test')}); + const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); verifyReferenceModeStorageKey(await resolver.createStorageKey( recipe.handles[0].capabilities, entityType, handleId), VolatileStorageKey); diff --git a/src/runtime/tests/description-test.ts b/src/runtime/tests/description-test.ts index 71d924ae9c6..3453037ecc9 100644 --- a/src/runtime/tests/description-test.ts +++ b/src/runtime/tests/description-test.ts @@ -594,7 +594,8 @@ recipe }); it('capitalizes when some particles do not have descriptions', async () => { - const manifest = (await Manifest.parse(` + const runtime = new Runtime(); + const manifest = (await runtime.parse(` interface DummyInterface particle NoDescription particle NoDescMuxer @@ -618,8 +619,8 @@ recipe const recipe = manifest.recipes[0]; // Cannot use createTestArc here, because capabilities-resolver cannot be set to null, // and interface returns a null schema, and cannot generate hash. - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id: ArcId.newForTest('test'), context: manifest, loader: new Loader(), storageManager}); + const {storageManager, driverFactory} = runtime; + const arc = new Arc({id: ArcId.newForTest('test'), context: manifest, loader: new Loader(), storageManager, driverFactory}); arc['_activeRecipe'] = recipe; arc['_recipeDeltas'].push({particles: recipe.particles, handles: recipe.handles, slots: recipe.slots, patterns: recipe.patterns}); diff --git a/src/runtime/tests/manifest-test.ts b/src/runtime/tests/manifest-test.ts index 965d3a4134a..a3baa584bc7 100644 --- a/src/runtime/tests/manifest-test.ts +++ b/src/runtime/tests/manifest-test.ts @@ -12,7 +12,7 @@ import {parse} from '../../gen/runtime/manifest-parser.js'; import {assert} from '../../platform/chai-web.js'; import {fs} from '../../platform/fs-web.js'; import {path} from '../../platform/path-web.js'; -import {Manifest, ManifestParseOptions, ErrorSeverity} from '../manifest.js'; +import {Manifest, ErrorSeverity} from '../manifest.js'; import {checkDefined, checkNotNull} from '../testing/preconditions.js'; import {Loader} from '../../platform/loader.js'; import {Dictionary} from '../../utils/lib-utils.js'; @@ -22,17 +22,15 @@ import {ClaimType} from '../arcs-types/enums.js'; import {CheckHasTag, CheckBooleanExpression, CheckCondition, CheckIsFromStore, CheckImplication} from '../arcs-types/check.js'; import {ProvideSlotConnectionSpec, ParticleDataflowType} from '../arcs-types/particle-spec.js'; import {Entity} from '../entity.js'; -import {RamDiskStorageDriverProvider, RamDiskStorageKey} from '../storage/drivers/ramdisk.js'; +import {RamDiskStorageKey} from '../storage/drivers/ramdisk.js'; import {digest} from '../../platform/digest-web.js'; -import {DriverFactory} from '../storage/drivers/driver-factory.js'; -import {TestVolatileMemoryProvider} from '../testing/test-volatile-memory-provider.js'; import {FirebaseStorageDriverProvider} from '../storage/drivers/firebase.js'; import {Runtime} from '../runtime.js'; import {mockFirebaseStorageKeyOptions} from '../storage/testing/mock-firebase.js'; import {Flags} from '../flags.js'; import {TupleType, CollectionType, EntityType, TypeVariable, Schema, BinaryExpression, FieldNamePrimitive, NumberPrimitive, PrimitiveField} from '../../types/lib-types.js'; -import {ActiveCollectionEntityStore, handleForStoreInfo, CollectionEntityType} from '../storage/storage.js'; +import {handleForStoreInfo, CollectionEntityType} from '../storage/storage.js'; import {Ttl} from '../capabilities.js'; import {StoreInfo} from '../storage/store-info.js'; import {deleteFieldRecursively} from '../../utils/lib-utils.js'; @@ -50,13 +48,11 @@ describe('manifest', async () => { let runtime; let storageManager; beforeEach(() => { - Runtime.resetDrivers(); runtime = new Runtime(); storageManager = new DirectStorageEndpointManager(); }); afterEach(() => { - Runtime.resetDrivers(); }); it('can parse a manifest containing a recipe', async () => { @@ -2799,9 +2795,7 @@ resource SomeName }); it('can parse a manifest with storage key handle definitions', async () => { - FirebaseStorageDriverProvider.register( - new Runtime().getCacheService(), - mockFirebaseStorageKeyOptions); + FirebaseStorageDriverProvider.register(runtime, runtime.getCacheService(), mockFirebaseStorageKeyOptions); const manifest = await runtime.parse(` schema Bar value: Text @@ -4465,13 +4459,6 @@ describe('annotations', async () => { '*': '{"root": {}, "locations": {}}' }); const runtime = new Runtime(); - beforeEach(() => { - Runtime.resetDrivers(); - }); - afterEach(() => { - Runtime.resetDrivers(); - }); - it('parses annotations', async () => { const annotationsStr = ` annotation noParam diff --git a/src/runtime/tests/particle-api-test.ts b/src/runtime/tests/particle-api-test.ts index 0b47fe11be4..6d0d58919ea 100644 --- a/src/runtime/tests/particle-api-test.ts +++ b/src/runtime/tests/particle-api-test.ts @@ -350,7 +350,7 @@ describe('particle-api', () => { // TODO(cypher1): Disabling this for now. The resolution seems to depend on order. // It is likely that this usage was depending on behavior that may not be intended. it.skip('can load a recipe referencing a manifest store', async () => { - RamDiskStorageDriverProvider.register(new TestVolatileMemoryProvider()); + //RamDiskStorageDriverProvider.register(new TestVolatileMemoryProvider()); const arc = await loadFilesIntoNewArc({ manifest: ` diff --git a/src/runtime/tests/particle-interface-loading-test.ts b/src/runtime/tests/particle-interface-loading-test.ts index 6e6eb150aea..11d61d5cd98 100644 --- a/src/runtime/tests/particle-interface-loading-test.ts +++ b/src/runtime/tests/particle-interface-loading-test.ts @@ -68,11 +68,12 @@ describe('particle interface loading', () => { } } }; - });`}); - - const manifest = await Manifest.load('./src/runtime/tests/artifacts/test-particles.manifest', loader); - const storageManager = new DirectStorageEndpointManager(); - const arc = new Arc({id: ArcId.newForTest('test'), loader, context: manifest, storageManager}); + });` + }); + const runtime = new Runtime({loader}); + const manifest = await runtime.parseFile('./src/runtime/tests/artifacts/test-particles.manifest'); + const {driverFactory, storageManager} = runtime; + const arc = new Arc({id: ArcId.newForTest('test'), loader, context: manifest, storageManager, driverFactory}); const fooType = new EntityType(manifest.schemas.Foo); const barType = new EntityType(manifest.schemas.Bar); @@ -293,8 +294,8 @@ describe('particle interface loading', () => { const serialization = await arc.serialize(); arc.dispose(); - const storageManager = new DirectStorageEndpointManager(); - const arc2 = await Arc.deserialize({serialization, loader, fileName: '', context: manifest, storageManager}); + const {driverFactory, storageManager} = runtime; + const arc2 = await Arc.deserialize({serialization, loader, fileName: '', context: manifest, storageManager, driverFactory}); await arc2.idle; const fooHandle2 = await handleForStoreInfo(arc2.stores.find(StoreInfo.isSingletonEntityStore), arc2); diff --git a/src/runtime/tests/runtime-test.ts b/src/runtime/tests/runtime-test.ts index 57a446f36c0..156af9c9280 100644 --- a/src/runtime/tests/runtime-test.ts +++ b/src/runtime/tests/runtime-test.ts @@ -45,18 +45,16 @@ function assertManifestsEqual(actual: Manifest, expected: Manifest) { } describe('Runtime', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('gets an arc description for an arc', async () => { - const storageManager = new DirectStorageEndpointManager(); + const runtime = new Runtime(); + const {storageManager, driverFactory} = runtime; const arc = new Arc({ slotComposer: new SlotComposer(), id: ArcId.newForTest('test'), loader: new Loader(), context: new Manifest({id: ArcId.newForTest('test')}), - storageManager + storageManager, + driverFactory }); const description = await Description.create(arc); const expected = await description.getArcDescription(); diff --git a/src/runtime/tests/test-environment-test.ts b/src/runtime/tests/test-environment-test.ts index 86ef8a3fb23..a7469ad867e 100644 --- a/src/runtime/tests/test-environment-test.ts +++ b/src/runtime/tests/test-environment-test.ts @@ -24,5 +24,4 @@ afterEach(function() { // Error function not yet included in mocha typescript declarations... this.test['error'](exception); } - Runtime.resetDrivers(); }); diff --git a/src/tests/arc-integration-test.ts b/src/tests/arc-integration-test.ts index 6c161b2160c..980710fa700 100644 --- a/src/tests/arc-integration-test.ts +++ b/src/tests/arc-integration-test.ts @@ -17,10 +17,6 @@ import {TestVolatileMemoryProvider} from '../runtime/testing/test-volatile-memor import {storageKeyPrefixForTest} from '../runtime/testing/handle-for-test.js'; describe('Arc integration', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); - it('copies store tags', async () => { const loader = new Loader(null, { './p.js': `defineParticle(({Particle}) => class P extends Particle { diff --git a/src/tests/particles/common-test.ts b/src/tests/particles/common-test.ts index 39ed6931342..d81b33235ec 100644 --- a/src/tests/particles/common-test.ts +++ b/src/tests/particles/common-test.ts @@ -19,9 +19,6 @@ import {storageKeyPrefixForTest} from '../../runtime/testing/handle-for-test.js' import {ActiveCollectionEntityStore, handleForActiveStore} from '../../runtime/storage/storage.js'; describe('common particles test', () => { - afterEach(() => { - Runtime.resetDrivers(); - }); it('resolves after cloning', async () => { const memoryProvider = new TestVolatileMemoryProvider(); const manifest = await Manifest.parse(` diff --git a/src/tests/particles/dataflow-test.ts b/src/tests/particles/dataflow-test.ts index 676bd3a7dd2..b0d0fdb3193 100644 --- a/src/tests/particles/dataflow-test.ts +++ b/src/tests/particles/dataflow-test.ts @@ -27,5 +27,4 @@ describe('Dataflow example recipes', () => { } }); } - Runtime.resetDrivers(); }); diff --git a/src/tests/particles/particles-test.ts b/src/tests/particles/particles-test.ts index 3c443e673d4..9b352735ab7 100644 --- a/src/tests/particles/particles-test.ts +++ b/src/tests/particles/particles-test.ts @@ -19,14 +19,6 @@ describe('Particle definitions', () => { const runtime = new Runtime(); const filenames = glob.sync('particles/**/*.arcs'); - beforeEach(() => { - Runtime.resetDrivers(); - }); - - afterEach(() => { - Runtime.resetDrivers(); - }); - filenames .forEach(filename => { // skip experimental Native partices for now as they need a heavyweight build step diff --git a/src/tests/recipe-descriptions-test.ts b/src/tests/recipe-descriptions-test.ts index d152d616fd5..f7bae1eab56 100644 --- a/src/tests/recipe-descriptions-test.ts +++ b/src/tests/recipe-descriptions-test.ts @@ -10,16 +10,12 @@ import {assert} from '../platform/chai-web.js'; import {Loader} from '../platform/loader.js'; -import {Manifest} from '../runtime/manifest.js'; import {Runtime} from '../runtime/runtime.js'; import {StrategyTestHelper} from '../planning/testing/strategy-test-helper.js'; -import {TestVolatileMemoryProvider} from '../runtime/testing/test-volatile-memory-provider.js'; -import {RamDiskStorageDriverProvider} from '../runtime/storage/drivers/ramdisk.js'; import {VolatileStorageKey} from '../runtime/storage/drivers/volatile.js'; import {ArcId} from '../runtime/id.js'; import {storageKeyPrefixForTest} from '../runtime/testing/handle-for-test.js'; import {newRecipe} from '../runtime/recipe/lib-recipe.js'; -import {DriverFactory} from '../runtime/storage/drivers/driver-factory.js'; describe('recipe descriptions test', () => { // Avoid initialising non-POD variables globally, since they would be constructed even when @@ -35,11 +31,6 @@ describe('recipe descriptions test', () => { }); }); - afterEach(() => { - Runtime.resetDrivers(); - }); - - function createManifestString(options) { options = options || {}; diff --git a/src/tools/allocator-recipe-resolver.ts b/src/tools/allocator-recipe-resolver.ts index 38098a6a2a9..a99b5179062 100644 --- a/src/tools/allocator-recipe-resolver.ts +++ b/src/tools/allocator-recipe-resolver.ts @@ -13,7 +13,7 @@ import {Runtime} from '../runtime/runtime.js'; import {Manifest} from '../runtime/manifest.js'; import {Type} from '../types/lib-types.js'; import {Recipe, RecipeComponent} from '../runtime/recipe/lib-recipe.js'; -import {CapabilitiesResolver} from '../runtime/capabilities-resolver.js'; +import {_CapabilitiesResolver} from '../runtime/capabilities-resolver.js'; import {IngressValidation} from '../runtime/policy/ingress-validation.js'; import {CreatableStorageKey} from '../runtime/storage/creatable-storage-key.js'; import {DatabaseStorageKey} from '../runtime/storage/database-storage-key.js'; @@ -43,7 +43,7 @@ export class AllocatorRecipeResolver { constructor(context: Manifest, private randomSalt: string, policiesManifest?: Manifest|null) { this.runtime = new Runtime({context}); - DatabaseStorageKey.register(); + DatabaseStorageKey.register(this.runtime); this.ingressValidation = policiesManifest ? new IngressValidation(policiesManifest.policies) : null; } @@ -77,7 +77,7 @@ export class AllocatorRecipeResolver { const handleById: {[index: string]: ({handles: Handle[], store?: StoreInfo})} = {}; // Find all `create` handles of long running recipes. for (const recipe of recipes.filter(r => isLongRunning(r))) { - const resolver = new CapabilitiesResolver({arcId: Id.fromString(findLongRunningArcId(recipe))}); + const resolver = new _CapabilitiesResolver({arcId: Id.fromString(findLongRunningArcId(recipe))}); for (const createHandle of recipe.handles.filter(h => h.fate === 'create' && h.id)) { if (handleById[createHandle.id]) { throw new AllocatorRecipeResolverError(` @@ -179,7 +179,7 @@ export class AllocatorRecipeResolver { if (isLongRunning(handle.recipe) && handle.id) { assert(!handle.storageKey); // store's storage key was set, but not the handle's const arcId = Id.fromString(findLongRunningArcId(handle.recipe)); - const resolver = new CapabilitiesResolver({arcId}); + const resolver = new _CapabilitiesResolver({arcId}); assert(handle.type.isResolved()); if (handle.type.getEntitySchema() === null) { throw new AllocatorRecipeResolverError(`Handle '${handle.id}' was not properly resolved.`); diff --git a/src/tools/tests/allocator-recipe-resolver-test.ts b/src/tools/tests/allocator-recipe-resolver-test.ts index a1276fa7995..fe874231145 100644 --- a/src/tools/tests/allocator-recipe-resolver-test.ts +++ b/src/tools/tests/allocator-recipe-resolver-test.ts @@ -27,7 +27,6 @@ import {TestVolatileMemoryProvider} from '../../runtime/testing/test-volatile-me const randomSalt = 'random_salt'; describe('allocator recipe resolver', () => { - afterEach(() => Runtime.resetDrivers()); it('detects long running arc', async () => { const manifest = (await Manifest.parse(` recipe Zero @@ -644,7 +643,6 @@ describe('allocator recipe resolver', () => { }); }); describe('allocator recipe resolver - ingress restricting', () => { - afterEach(() => Runtime.resetDrivers()); const particleSpec = ` particle Writer thing: writes Thing {a: Text, b: Text, c: Text, d: Text, e: Text} @@ -740,7 +738,6 @@ particle ReaderB const recipes = await resolver.resolve(); const writingRecipe = recipes.find(recipe => recipe.name === 'WritingRecipe'); assert.equal(writingRecipe.handles[0].type.resolvedType().toString(), expectedSchema); - Runtime.resetDrivers(); }; it('restricts writer fields by one writer-reader recipe', async () => { diff --git a/src/tools/tests/codegen-unit-test-base.ts b/src/tools/tests/codegen-unit-test-base.ts index 7fe53675fc0..a4c738b4199 100644 --- a/src/tools/tests/codegen-unit-test-base.ts +++ b/src/tools/tests/codegen-unit-test-base.ts @@ -95,7 +95,6 @@ export abstract class ManifestCodegenUnitTest extends CodegenUnitTest { */ export async function runCompute(testCase: CodegenUnitTest, test: Test): Promise { Flags.reset(); - Runtime.resetDrivers(); const result = await testCase.compute(test.input, test.options, test); return Array.isArray(result) ? result : [result]; } diff --git a/src/tools/tests/recipe2plan-test.ts b/src/tools/tests/recipe2plan-test.ts index 75a8ab9846f..244ed886fe8 100644 --- a/src/tools/tests/recipe2plan-test.ts +++ b/src/tools/tests/recipe2plan-test.ts @@ -461,7 +461,6 @@ policy PolicyBarBr2Br3 { const assertSuccess = async (recipeStr) => verifyRecipeIngress(recipeStr, true); const assertFailure = async (recipeStr) => verifyRecipeIngress(recipeStr, false); const verifyRecipeIngress = async (recipeStr: string, expectedSuccess: boolean) => { - Runtime.resetDrivers(); const recipesManifest = await Manifest.parse(` ${manifestMetaAndParticleSpecs} ${recipeStr} diff --git a/src/wasm/tests/wasm-api-test.ts b/src/wasm/tests/wasm-api-test.ts index b24895a6e46..6ba79e8d39a 100644 --- a/src/wasm/tests/wasm-api-test.ts +++ b/src/wasm/tests/wasm-api-test.ts @@ -34,7 +34,6 @@ class TestLoader extends Loader { constructor(readonly testDir: string) { super(); } - resolve(path: string) { // The manifest is in the same dir as this test file but the compiled wasm binaries // are in language-specific dirs, so we need to adjust the loading path accordingly. @@ -43,7 +42,6 @@ class TestLoader extends Loader { } return (path[0] === '$') ? `RESOLVED(${path})` : path; } - clone(): TestLoader { return this; } @@ -71,7 +69,6 @@ async function createBackingEntity(arc: Arc, referenceType: ReferenceType { describe(`wasm tests (${testLabel})`, function() { const isKotlin = testLabel === 'Kotlin'; @@ -112,7 +109,7 @@ Object.entries(testMap).forEach(([testLabel, testDir]) => { const slotObserver = new SlotTestObserver(); slotComposer.observeSlots(slotObserver); - return {arc, stores: info.stores, slotObserver}; + return {arc, stores: info.stores, slotObserver, runtime}; } it('onHandleSync / onHandleUpdate', async () => { @@ -534,7 +531,7 @@ Object.entries(testMap).forEach(([testLabel, testDir]) => { this.skip(); } - const {arc, stores} = await setup('OnFirstStartTest'); + const {arc, stores, runtime} = await setup('OnFirstStartTest'); const fooHandle = await handleForStoreInfo(stores.get('fooHandle') as StoreInfo, arc); assert.deepStrictEqual(await fooHandle.fetch() as {}, {txt: 'Created!'}); @@ -544,8 +541,8 @@ Object.entries(testMap).forEach(([testLabel, testDir]) => { const manifest = await manifestPromise; - const storageManager = new DirectStorageEndpointManager(); - const arc2 = await Arc.deserialize({serialization, loader, fileName: '', context: manifest, storageManager}); + const {driverFactory, storageManager} = runtime; + const arc2 = await Arc.deserialize({serialization, loader, fileName: '', context: manifest, storageManager, driverFactory}); await arc2.idle; const fooClass = Entity.createEntityClass(manifest.findSchemaByName('FooHandle'), null); @@ -555,58 +552,58 @@ Object.entries(testMap).forEach(([testLabel, testDir]) => { }); it('multiple handles onUpdate', async function() { - if (isCpp) { - this.skip(); - } - const {arc, stores} = await setup('CombineUpdatesTest'); - const handle1 = await handleForStoreInfo(stores.get('handle1') as StoreInfo, arc); - const handle2 = await handleForStoreInfo(stores.get('handle2') as StoreInfo, arc); - const handle3 = await handleForStoreInfo(stores.get('handle3') as StoreInfo, arc); - const handle4 = await handleForStoreInfo(stores.get('handle4') as StoreInfo, arc); - const handle5 = await handleForStoreInfo(stores.get('handle5') as StoreInfo, arc); - const handle6 = await handleForStoreInfo(stores.get('handle6') as StoreInfo, arc); - const handle7 = await handleForStoreInfo(stores.get('handle7') as StoreInfo, arc); - const handle8 = await handleForStoreInfo(stores.get('handle8') as StoreInfo, arc); - const handle9 = await handleForStoreInfo(stores.get('handle9') as StoreInfo, arc); - const handle10 = await handleForStoreInfo(stores.get('handle10') as StoreInfo, arc); - - await handle1.set(new handle1.entityClass({num: 1.0})); - await handle2.add(new handle2.entityClass({num: 1.0})); - await handle3.set(new handle3.entityClass({num3: 1.0})); - await handle4.set(new handle4.entityClass({num4: 1.0})); - await handle5.set(new handle5.entityClass({num5: 1.0})); - await handle6.set(new handle6.entityClass({num6: 1.0})); - await handle7.set(new handle7.entityClass({num7: 1.0})); - await handle8.set(new handle8.entityClass({num8: 1.0})); - await handle9.set(new handle9.entityClass({num9: 1.0})); - await handle10.set(new handle10.entityClass({num10: 1.0})); - - const errHandle = await handleForStoreInfo(stores.get('errors') as StoreInfo, arc); - - const sendEvent = async handler => { - await arc.idle; - arc.peh.sendEvent(arc.activeRecipe.particles[0], 'root', {handler}); - await arc.idle; - }; - - await sendEvent('checkEvents'); - - const errors = (await errHandle.toList()).map(e => e.msg); - - const expectedErrors = [ - `Single Handle OnUpdate called 1 times.`, - `Calling combineUpdates with 2 Handles called 2 times.`, - `Calling combineUpdates with 2 Handles called 2 times.`, - `Calling combineUpdates with 3 Handles called 3 times.`, - `Calling combineUpdates with 4 Handles called 4 times.`, - `Calling combineUpdates with 5 Handles called 5 times.`, - `Calling combineUpdates with 6 Handles called 6 times.`, - `Calling combineUpdates with 7 Handles called 7 times.`, - `Calling combineUpdates with 8 Handles called 8 times.`, - `Calling combineUpdates with 9 Handles called 9 times.`, - `Calling combineUpdates with 10 Handles called 10 times.`, - ]; - assert.deepStrictEqual(errors, expectedErrors); - }); + if (isCpp) { + this.skip(); + } + const {arc, stores} = await setup('CombineUpdatesTest'); + const handle1 = await handleForStoreInfo(stores.get('handle1') as StoreInfo, arc); + const handle2 = await handleForStoreInfo(stores.get('handle2') as StoreInfo, arc); + const handle3 = await handleForStoreInfo(stores.get('handle3') as StoreInfo, arc); + const handle4 = await handleForStoreInfo(stores.get('handle4') as StoreInfo, arc); + const handle5 = await handleForStoreInfo(stores.get('handle5') as StoreInfo, arc); + const handle6 = await handleForStoreInfo(stores.get('handle6') as StoreInfo, arc); + const handle7 = await handleForStoreInfo(stores.get('handle7') as StoreInfo, arc); + const handle8 = await handleForStoreInfo(stores.get('handle8') as StoreInfo, arc); + const handle9 = await handleForStoreInfo(stores.get('handle9') as StoreInfo, arc); + const handle10 = await handleForStoreInfo(stores.get('handle10') as StoreInfo, arc); + + await handle1.set(new handle1.entityClass({num: 1.0})); + await handle2.add(new handle2.entityClass({num: 1.0})); + await handle3.set(new handle3.entityClass({num3: 1.0})); + await handle4.set(new handle4.entityClass({num4: 1.0})); + await handle5.set(new handle5.entityClass({num5: 1.0})); + await handle6.set(new handle6.entityClass({num6: 1.0})); + await handle7.set(new handle7.entityClass({num7: 1.0})); + await handle8.set(new handle8.entityClass({num8: 1.0})); + await handle9.set(new handle9.entityClass({num9: 1.0})); + await handle10.set(new handle10.entityClass({num10: 1.0})); + + const errHandle = await handleForStoreInfo(stores.get('errors') as StoreInfo, arc); + + const sendEvent = async handler => { + await arc.idle; + arc.peh.sendEvent(arc.activeRecipe.particles[0], 'root', {handler}); + await arc.idle; + }; + + await sendEvent('checkEvents'); + + const errors = (await errHandle.toList()).map(e => e.msg); + + const expectedErrors = [ + `Single Handle OnUpdate called 1 times.`, + `Calling combineUpdates with 2 Handles called 2 times.`, + `Calling combineUpdates with 2 Handles called 2 times.`, + `Calling combineUpdates with 3 Handles called 3 times.`, + `Calling combineUpdates with 4 Handles called 4 times.`, + `Calling combineUpdates with 5 Handles called 5 times.`, + `Calling combineUpdates with 6 Handles called 6 times.`, + `Calling combineUpdates with 7 Handles called 7 times.`, + `Calling combineUpdates with 8 Handles called 8 times.`, + `Calling combineUpdates with 9 Handles called 9 times.`, + `Calling combineUpdates with 10 Handles called 10 times.`, + ]; + assert.deepStrictEqual(errors, expectedErrors); + }); }); }); From 4978ea0f60345b25900e985b73f7ef4900754d8e Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Mon, 7 Dec 2020 18:27:40 -0800 Subject: [PATCH 12/31] repairs after unpleasant rebase --- src/planning/planner.ts | 2 +- src/runtime/runtime.ts | 85 ++++++++------------------------- src/tools/manifest2proto-cli.ts | 2 +- 3 files changed, 21 insertions(+), 68 deletions(-) diff --git a/src/planning/planner.ts b/src/planning/planner.ts index 94c58a56ddc..c4da9e5be78 100644 --- a/src/planning/planner.ts +++ b/src/planning/planner.ts @@ -109,7 +109,7 @@ export class Planner implements InspectablePlanner { Planner.getRuntimeCache(lastRuntime).clear(); } } - + getCache() { return Planner.getRuntimeCache(this.runtime); } diff --git a/src/runtime/runtime.ts b/src/runtime/runtime.ts index 6f595cd7a5d..d9677f00394 100644 --- a/src/runtime/runtime.ts +++ b/src/runtime/runtime.ts @@ -32,7 +32,9 @@ import {StorageKeyParser} from './storage/storage-key-parser.js'; import {_CapabilitiesResolver} from './capabilities-resolver.js'; import {StorageEndpointManager} from './storage/storage-manager.js'; import {DirectStorageEndpointManager} from './storage/direct-storage-endpoint-manager.js'; -import {Env} from './env.js'; +import {RamDiskStorageDriverProvider} from './storage/drivers/ramdisk.js'; +import {SimpleVolatileMemoryProvider, VolatileMemoryProvider, VolatileStorageKey, VolatileStorageKeyFactory, VolatileStorageDriverProvider} from './storage/drivers/volatile.js'; +//import {Env} from './env.js'; const {warn} = logsFactory('Runtime', 'orange'); @@ -72,28 +74,9 @@ export class Runtime { console.log('!FrOnK'); } - /** - * Call `init` to establish a default Runtime environment (capturing the return value is optional). - * Systems can use `Runtime.getRuntime()` to access this environment instead of plumbing `runtime` - * arguments through numerous functions. - * Some static methods on this class automatically use the default environment. - */ - static init(root?: string, urlMap?: {}, staticMap?: {}, context?: {}): Runtime { - const map = {...Runtime.mapFromRootPath(root), ...urlMap}; - const loader = new Loader(map, staticMap); - const pecFactory = pecIndustry(loader); - const runtime = new Runtime({ - loader, - composerClass: SlotComposer, - pecFactory, - memoryProvider: staticMemoryProvider - }); - return runtime; - } - - static create({root, urlMap, staticMap, context}): Runtime { - return this.init(root, urlMap, staticMap, context); - } + // TODO(sjmiles): static methods represent boilerplate. + // There's no essential reason they are part of Runtime. + // Consider. static mapFromRootPath(root: string) { // TODO(sjmiles): this is a commonly-used map, but it's not generic enough to live here. @@ -199,12 +182,15 @@ export class Runtime { workerPool.clear(); } - // Allow dynamic context binding to this runtime. - setContext(context: Manifest) { - this.context = context; + getCacheService() { + return this.cacheService; + } + + getMemoryProvider(): VolatileMemoryProvider { + return this.memoryProvider; } - buildArcParams(name?: string) { + buildArcParams(name?: string, storageKeyPrefix?: StorageKeyPrefixer): ArcOptions { const id = IdGenerator.newSession().newArcId(name); const {loader, context} = this; const factories = [new VolatileStorageKeyFactory()]; @@ -238,8 +224,6 @@ export class Runtime { return new Arc({id, storageKey, capabilitiesResolver, loader, slotComposer, context, storageManager, driverFactory, ...options}); } - // Stuff the shell(s) need - /** * Given an arc name, return either: * (1) the already running arc @@ -269,51 +253,20 @@ export class Runtime { return [...this.arcById.values()].find(arc => !!arc.activeRecipe.findParticle(particleId)); } - /** - * Given an arc, returns it's description as a string. - */ - static async getArcDescription(arc: Arc) : Promise { - // Verify that it's one of my arcs, and make this non-static, once I have - // Runtime objects in the calling code. - return (await Description.create(arc)).getArcDescription(); - } - - /** - * Parse a textual manifest and return a Manifest object. See the Manifest - * class for the options accepted. - */ - static async parseManifest(content: string, options?): Promise { - const runtime = this.getRuntime(); - const loader = runtime && runtime.loader; - return Manifest.parse(content, {loader, ...options}); - } - - /** - * Load and parse a manifest from a resource (not strictly a file) and return - * a Manifest object. The loader determines the semantics of the fileName. See - * the Manifest class for details. - */ - // static async loadManifest(fileName, loader, options) : Promise { - // return Manifest.load(fileName, loader, options); - // } - - // TODO(sjmiles): These methods represent boilerplate factored out of - // various shells.These needs could be filled other ways or represented - // by other modules. Suggestions welcome. - async parse(content: string, options?): Promise { + const {loader, memoryProvider} = this; // TODO(sjmiles): this method of generating a manifest id is ad-hoc, // maybe should be using one of the id generators, or even better - // we could eliminate it if the Manifest object takes care of this. - // TODO(sjmiles): this is a virtual manifest, the fileName is invented + // we could evacipate it if the Manifest object takes responsibility. const id = `in-memory-${Math.floor((Math.random()+1)*1e6)}.manifest`; - const {loader, memoryProvider} = this; + // TODO(sjmiles): this is a virtual manifest, the fileName is invented const opts = {id, fileName: `./${id}`, loader, memoryProvider, ...options}; return Manifest.parse(content, opts); } async parseFile(path: string, options?): Promise { - const content = await this.loader.loadResource(path); - return this.parse(content, {id: path, fileName: path, ...options}); + const {memoryProvider} = this; + const opts = {id: path, memoryProvider, ...options}; + return Manifest.load(path, opts.loader || this.loader, opts); } } diff --git a/src/tools/manifest2proto-cli.ts b/src/tools/manifest2proto-cli.ts index 444b4c0f848..ba40711a4dd 100644 --- a/src/tools/manifest2proto-cli.ts +++ b/src/tools/manifest2proto-cli.ts @@ -58,7 +58,7 @@ async function main() { try { fs.mkdirSync(opts.outdir, {recursive: true}); - const runtime = Runtime.init('../..', PATHS); + const runtime = new Runtime({rootPath: '../..', urlMap: PATHS}); const buffer = await encodeManifestToProto(runtime, opts._[0]); const outPath = path.join(opts.outdir, opts.outfile); From 1900fe2cb5ecd35dc136f0867d3fad777422b270 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Mon, 7 Dec 2020 21:31:09 -0800 Subject: [PATCH 13/31] remove debugger statement --- src/runtime/storage/direct-storage-endpoint-manager.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/runtime/storage/direct-storage-endpoint-manager.ts b/src/runtime/storage/direct-storage-endpoint-manager.ts index 6a8b6e08c09..7b8c09f7fd8 100644 --- a/src/runtime/storage/direct-storage-endpoint-manager.ts +++ b/src/runtime/storage/direct-storage-endpoint-manager.ts @@ -37,7 +37,6 @@ export class DirectStorageEndpointManager implements StorageEndpointManager, Sto } const construct = ctor.construct.bind(ctor); //const instance = await ctor.construct>({ - debugger; const instance = await construct({ storageKey: storeInfo.storageKey, exists: storeInfo.exists, From 00505f153206f20f245b2b351a4319aa7ac53df5 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Tue, 8 Dec 2020 14:02:10 -0800 Subject: [PATCH 14/31] sleuthing test failures --- .../tests/arcs/ts/runtime/plan-consumer-test.ts | 2 +- src/runtime/storage/drivers/driver-factory.ts | 11 ++++++++--- src/runtime/storage/drivers/firebase.ts | 1 - src/runtime/storage/testing/mock-firebase.ts | 4 ++-- .../tests/firebase-store-integration-test.ts | 5 ++--- .../reference-mode-store-integration-test.ts | 16 ++++++++-------- src/runtime/tests/arc-test.ts | 6 ++++-- tools/sigh.ts | 1 + 8 files changed, 26 insertions(+), 20 deletions(-) diff --git a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts index 246a43c85fa..385f2bfecc5 100644 --- a/shells/tests/arcs/ts/runtime/plan-consumer-test.ts +++ b/shells/tests/arcs/ts/runtime/plan-consumer-test.ts @@ -32,7 +32,7 @@ async function storeResults(consumer: PlanConsumer, suggestions: Suggestion[]) { await new Promise(resolve => setTimeout(resolve, 100)); } -describe('planFOOB consumer', () => { +describe('plan consumer', () => { it('consumes', async () => { const manifestText = ` import './shells/tests/artifacts/Products/Products.recipes' diff --git a/src/runtime/storage/drivers/driver-factory.ts b/src/runtime/storage/drivers/driver-factory.ts index 6f0bad2d45f..49856d798e9 100644 --- a/src/runtime/storage/drivers/driver-factory.ts +++ b/src/runtime/storage/drivers/driver-factory.ts @@ -21,14 +21,15 @@ export interface StorageDriverProvider { let staticDriverFactory; export class DriverFactory { - static register({driverFactory}, storageDriverProvider: StorageDriverProvider) { - driverFactory.register(storageDriverProvider); - } + //name; providers: Set = new Set(); constructor() { staticDriverFactory = this; + //this.name = Math.floor(Math.random()*90) + 10; + //console.warn('DriverFactory constructed: ', this.name); } register(storageDriverProvider: StorageDriverProvider) { + //console.warn(`DriverFactory(${this.name}).register`); //, storageDriverProvider); this.providers.add(storageDriverProvider); } unregister(storageDriverProvider: StorageDriverProvider) { @@ -49,6 +50,10 @@ export class DriverFactory { } return null; } + // statics + static register({driverFactory}, storageDriverProvider: StorageDriverProvider) { + driverFactory.register(storageDriverProvider); + } static async driverInstance(storageKey: StorageKey, exists: Exists) { return staticDriverFactory.driverInstance(storageKey, exists); } diff --git a/src/runtime/storage/drivers/firebase.ts b/src/runtime/storage/drivers/firebase.ts index a433b3f33f0..9128863695d 100644 --- a/src/runtime/storage/drivers/firebase.ts +++ b/src/runtime/storage/drivers/firebase.ts @@ -206,7 +206,6 @@ export class FirebaseDriver extends Driver { } } - export class FirebaseStorageDriverProvider implements StorageDriverProvider { protected readonly cacheService: RuntimeCacheService; diff --git a/src/runtime/storage/testing/mock-firebase.ts b/src/runtime/storage/testing/mock-firebase.ts index 6ffe7acd456..2554cc07e7a 100644 --- a/src/runtime/storage/testing/mock-firebase.ts +++ b/src/runtime/storage/testing/mock-firebase.ts @@ -353,8 +353,8 @@ export class MockFirebaseStorageDriverProvider extends FirebaseStorageDriverProv //DriverFactory.register(new MockFirebaseStorageDriverProvider(cacheService)); //StorageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); //const {projectId, domain, apiKey} = mockFirebaseStorageKeyOptions; - driverFactory.register(new FirebaseStorageDriverProvider(cacheService)); - storageKeyParser.addParser(FirebaseStorageKey.protocol, FirebaseStorageKey.fromString); + driverFactory.register(new MockFirebaseStorageDriverProvider(cacheService)); + storageKeyParser.addParser(MockFirebaseStorageKey.protocol, MockFirebaseStorageKey.fromString); } static getValueForTesting(cacheService: RuntimeCacheService, storageKey: MockFirebaseStorageKey) { diff --git a/src/runtime/storage/tests/firebase-store-integration-test.ts b/src/runtime/storage/tests/firebase-store-integration-test.ts index 363ab7dd4c5..f231681ce05 100644 --- a/src/runtime/storage/tests/firebase-store-integration-test.ts +++ b/src/runtime/storage/tests/firebase-store-integration-test.ts @@ -27,15 +27,14 @@ async function createStore(storageKey: StorageKey, exists: Exists): Promise; } -describe('Firebase + Store Integration', async () => { +describe('Firebase + Store Integration', async function() { let runtime; beforeEach(() => { runtime = new Runtime(); MockFirebaseStorageDriverProvider.register(runtime, runtime.getCacheService()); }); - it('FLOOB will store a sequence of model and operation updates as models', async () => { - debugger; + it('will store a sequence of model and operation updates as models', async () => { const storageKey = new MockFirebaseStorageKey('location'); const activeStore = await createStore(storageKey, Exists.ShouldCreate); diff --git a/src/runtime/storage/tests/reference-mode-store-integration-test.ts b/src/runtime/storage/tests/reference-mode-store-integration-test.ts index 84b2c91e566..4574704a224 100644 --- a/src/runtime/storage/tests/reference-mode-store-integration-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-integration-test.ts @@ -25,22 +25,21 @@ describe('ReferenceModeStore Integration', async () => { it('will store and retrieve entities through referenceModeStores (separate stores)', async () => { const storageKey = new ReferenceModeStorageKey(new RamDiskStorageKey('backing'), new RamDiskStorageKey('container')); - const type = new EntityType(new Schema(['AnEntity'], {foo: 'Text'})).collectionOf(); // Use newHandle here rather than setting up a store inside the arc, as this ensures writeHandle and readHandle // are on top of different storage stacks. - const writeHandle = await newHandle(new StoreInfo({storageKey, type, id: 'write-handle'}), - new Runtime().newArc('testWritesArc')); - const readHandle = await newHandle(new StoreInfo({storageKey, type, id: 'read-handle'}), - new Runtime().newArc('testReadArc')); + const writeInfo = new StoreInfo({storageKey, type, id: 'write-handle'}); + const writeHandle = await newHandle(writeInfo, new Runtime().newArc('testWritesArc')); + + const readInfo = new StoreInfo({storageKey, type, id: 'read-handle'}); + const readHandle = await newHandle(readInfo, new Runtime().newArc('testReadArc')); readHandle.particle = new Particle(); const returnPromise = new Promise((resolve, reject) => { - let state = 0; - readHandle.particle['onHandleSync'] = async (handle, model) => { + console.warn('onHandleSync', model); if (state === 0) { assert.deepEqual(model, []); state = 1; @@ -50,9 +49,10 @@ describe('ReferenceModeStore Integration', async () => { resolve(); } }; - }); + console.warn('writeHandle.addFromData'); + //debugger; await writeHandle.addFromData({foo: 'This is text in foo'}); return returnPromise; }); diff --git a/src/runtime/tests/arc-test.ts b/src/runtime/tests/arc-test.ts index a0911ba8f0f..31d0f3f32af 100644 --- a/src/runtime/tests/arc-test.ts +++ b/src/runtime/tests/arc-test.ts @@ -64,7 +64,7 @@ async function setup(storageKeyPrefix: (arcId: ArcId) => StorageKey) { } describe('Arc new storage', () => { - it('preserves data when round-tripping through serialization', async () => { + it('FLOOB preserves data when round-tripping through serialization', async () => { // TODO(shans): deserialization currently uses a RamDisk store to deserialize into because we don't differentiate // between parsing a manifest for public consumption (e.g. with RamDisk resources in it) and parsing a serialized // arc (with an @activeRecipe). We'll fix this by adding a 'private' keyword to store serializations which will @@ -130,7 +130,7 @@ describe('Arc new storage', () => { await arc.instantiate(recipe); const serialization = await arc.serialize(); - console.log(serialization); + console.warn(serialization); arc.dispose(); await varHandle.clear(); @@ -138,7 +138,9 @@ describe('Arc new storage', () => { await refVarHandle.clear(); const {context, storageManager, driverFactory} = opts; + debugger; const arc2 = await Arc.deserialize({fileName: '', serialization, loader, context, storageManager, driverFactory}); + debugger; const varStore2 = arc2.findStoreById(varStore.id) as StoreInfo; const colStore2 = arc2.findStoreById(colStore.id) as StoreInfo; const refVarStore2 = arc2.findStoreById(refVarStore.id) as StoreInfo; diff --git a/tools/sigh.ts b/tools/sigh.ts index a1c18c6b4c6..31da87ffca2 100644 --- a/tools/sigh.ts +++ b/tools/sigh.ts @@ -68,6 +68,7 @@ const buildShells = () => globalOptions.bazel ? true : buildPkg('shells'); const steps: {[index: string]: ((args?: string[]) => boolean|Promise)[]} = { peg: [peg, railroad], test: [peg, build, buildShells, runTestsOrHealthOnCron], + justTest: [runTestsOrHealthOnCron], testShells: [peg, build, buildShells, webpack, webpackStorage, devServerAsync, testWdioShells], testWdioShells: [testWdioShells], webpack: [peg, build, buildShells, webpack], From 6a9f14d52f9442b990466faf2b0b091eb6bca429 Mon Sep 17 00:00:00 2001 From: Jason LeBrun Date: Mon, 7 Dec 2020 17:44:14 -0800 Subject: [PATCH 15/31] Don't call idle immediately after closing StorageEndpoint Unbinding from an Android service is only an indication that the client no longer wants connected/disconnected events from the storage service, and that from the client's point of view, it is OK for the service to shut down. But it *doesn't* make any acquired bindings stop work. This means that we called close, and then called idle, which succeeds, since the binding still works. The `idle` call coming in after `onUnbind` was causing a store to get re-created (and then never cleaned up, since the binding in question would never be bound/unbound again). PiperOrigin-RevId: 346218079 --- java/arcs/core/storage/StorageProxyImpl.kt | 1 - .../storage/AndroidStorageServiceEndpointManager.kt | 12 ++++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/java/arcs/core/storage/StorageProxyImpl.kt b/java/arcs/core/storage/StorageProxyImpl.kt index 069e936df10..06fe5dc940c 100644 --- a/java/arcs/core/storage/StorageProxyImpl.kt +++ b/java/arcs/core/storage/StorageProxyImpl.kt @@ -242,7 +242,6 @@ class StorageProxyImpl private con // Again, if it takes too long, cancel the job. val storeCloseJob = scheduler.scope.launch { store.close() - store.idle() } try { diff --git a/java/arcs/sdk/android/storage/AndroidStorageServiceEndpointManager.kt b/java/arcs/sdk/android/storage/AndroidStorageServiceEndpointManager.kt index 3e99505202d..6bebd28ef0e 100644 --- a/java/arcs/sdk/android/storage/AndroidStorageServiceEndpointManager.kt +++ b/java/arcs/sdk/android/storage/AndroidStorageServiceEndpointManager.kt @@ -21,6 +21,7 @@ import arcs.sdk.android.storage.service.BindHelper import arcs.sdk.android.storage.service.StorageService import arcs.sdk.android.storage.service.StorageServiceIntentHelpers import arcs.sdk.android.storage.service.bindForIntent +import kotlinx.atomicfu.atomic import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineStart import kotlinx.coroutines.ExperimentalCoroutinesApi @@ -78,7 +79,13 @@ class AndroidStorageEndpoint const private val log = TaggedLog { "AndroidStorageEndpoint" } + private val closed = atomic(false) + override suspend fun idle() { + if (closed.value) { + // TODO(b/175070424) Crash here rather than just logging. + log.warning { "idle called after close" } + } log.debug { "Waiting for service store to be idle" } outgoingMessagesCount.flow.first { it == 0 } suspendForResultCallback { resultCallback -> @@ -88,6 +95,10 @@ class AndroidStorageEndpoint const } override suspend fun onProxyMessage(message: ProxyMessage) { + if (closed.value) { + // TODO(b/175070424) Crash here rather than just logging. + log.warning { "onProxyMessage called after close" } + } outgoingMessagesCount.increment() try { suspendForResultCallback { resultCallback -> @@ -105,6 +116,7 @@ class AndroidStorageEndpoint const } override suspend fun close() { + closed.value = true suspendForResultCallback { resultCallback -> service.unregisterCallback(channelId, resultCallback) } From 9482eef419ec5bcb27b88cc67cc99f8edf87fb27 Mon Sep 17 00:00:00 2001 From: Cameron Silvestrini Date: Mon, 7 Dec 2020 18:19:00 -0800 Subject: [PATCH 16/31] Refactors several classes to use injected StorageKeyManager instances instead of accessing the StorageMeyManager.GLOBAL_INSTANCE directly. I've only updated some really simple usages to use the DI pattern. There shouldn't be any change in behaviour; everything still points to the global singleton instance for now. PiperOrigin-RevId: 346223334 --- java/arcs/android/common/resurrection/DbHelper.kt | 3 ++- .../common/resurrection/ResurrectorService.kt | 14 ++++++++++---- .../database/AndroidSqliteDatabaseManager.kt | 6 +++++- java/arcs/android/storage/database/DatabaseImpl.kt | 9 +++++---- java/arcs/core/host/AbstractArcHost.kt | 11 ++++++++++- java/arcs/core/host/ArcHostContextParticle.kt | 3 ++- java/arcs/sdk/android/labs/host/ArcHostHelper.kt | 3 ++- java/arcs/sdk/android/labs/host/ArcHostService.kt | 6 +++++- .../android/common/resurrection/DbHelperTest.kt | 6 +++++- javatests/arcs/android/host/ArcHostHelperTest.kt | 5 ++--- .../android/host/TestExternalArcHostService.kt | 6 +++++- .../storage/database/DatabaseDowngradeTest.kt | 2 ++ .../android/storage/database/DatabaseImplTest.kt | 8 +++++++- 13 files changed, 62 insertions(+), 20 deletions(-) diff --git a/java/arcs/android/common/resurrection/DbHelper.kt b/java/arcs/android/common/resurrection/DbHelper.kt index d49e5a406c0..9ceb042a9c9 100644 --- a/java/arcs/android/common/resurrection/DbHelper.kt +++ b/java/arcs/android/common/resurrection/DbHelper.kt @@ -32,6 +32,7 @@ import arcs.core.storage.api.DriverAndKeyConfigurator @VisibleForTesting(otherwise = VisibleForTesting.PACKAGE_PRIVATE) class DbHelper( context: Context, + private val storageKeyManager: StorageKeyManager, dbName: String = RESURRECTION_DB_NAME ) : SQLiteOpenHelper( context, @@ -157,7 +158,7 @@ class DbHelper( val notifiers = notifiersByComponentName[requestedNotifier] ?: mutableListOf() - notifiers.add(StorageKeyManager.GLOBAL_INSTANCE.parse(key)) + notifiers.add(storageKeyManager.parse(key)) notifiersByComponentName[requestedNotifier] = notifiers } diff --git a/java/arcs/android/common/resurrection/ResurrectorService.kt b/java/arcs/android/common/resurrection/ResurrectorService.kt index 24bd7b75210..cd9826f2fc8 100644 --- a/java/arcs/android/common/resurrection/ResurrectorService.kt +++ b/java/arcs/android/common/resurrection/ResurrectorService.kt @@ -17,6 +17,7 @@ import android.os.Bundle import androidx.annotation.VisibleForTesting import arcs.android.common.resurrection.ResurrectionRequest.UnregisterRequest import arcs.core.storage.StorageKey +import arcs.core.storage.StorageKeyManager import arcs.core.util.guardedBy import java.io.PrintWriter import kotlinx.coroutines.CoroutineName @@ -42,13 +43,18 @@ abstract class ResurrectorService : Service() { protected open val job = Job() + Dispatchers.IO + CoroutineName("ResurrectorService") - private val dbHelper: DbHelper by lazy { DbHelper(this, resurrectionDatabaseName) } + // TODO(b/174432505): Don't use the GLOBAL_INSTANCE, accept as a constructor param instead. + protected val storageKeyManager = StorageKeyManager.GLOBAL_INSTANCE + + private val dbHelper: DbHelper by lazy { + DbHelper(this, storageKeyManager, resurrectionDatabaseName) + } private val mutex = Mutex() private var registeredRequests: Set - by guardedBy(mutex, setOf()) + by guardedBy(mutex, setOf()) private var registeredRequestsByNotifiers: Map> - by guardedBy(mutex, mapOf()) + by guardedBy(mutex, mapOf()) @VisibleForTesting var loadJob: Job? = null @@ -118,7 +124,7 @@ abstract class ResurrectorService : Service() { """ Resurrection Requests --------------------- - """.trimIndent() + """.trimIndent() ) val requests = StringBuilder().apply { diff --git a/java/arcs/android/storage/database/AndroidSqliteDatabaseManager.kt b/java/arcs/android/storage/database/AndroidSqliteDatabaseManager.kt index 32a22d50019..8db21a77646 100644 --- a/java/arcs/android/storage/database/AndroidSqliteDatabaseManager.kt +++ b/java/arcs/android/storage/database/AndroidSqliteDatabaseManager.kt @@ -14,6 +14,7 @@ package arcs.android.storage.database import android.content.Context import androidx.lifecycle.LifecycleObserver import arcs.core.storage.StorageKey +import arcs.core.storage.StorageKeyManager import arcs.core.storage.database.Database import arcs.core.storage.database.DatabaseIdentifier import arcs.core.storage.database.DatabaseManager @@ -39,6 +40,9 @@ class AndroidSqliteDatabaseManager( private val dbCache by guardedBy(mutex, mutableMapOf()) override val registry = AndroidSqliteDatabaseRegistry(context) + // TODO(b/174432505): Don't use the GLOBAL_INSTANCE, accept as a constructor param instead. + private val storageKeyManager = StorageKeyManager.GLOBAL_INSTANCE + suspend fun close() { mutex.withLock { dbCache.values.forEach { it.close() } @@ -52,7 +56,7 @@ class AndroidSqliteDatabaseManager( val entry = registry.register(name, persistent) return mutex.withLock { dbCache[entry.name to entry.isPersistent] - ?: DatabaseImpl(context, name, persistent) { + ?: DatabaseImpl(context, storageKeyManager, name, persistent) { mutex.withLock { dbCache.remove(entry.name to entry.isPersistent) } diff --git a/java/arcs/android/storage/database/DatabaseImpl.kt b/java/arcs/android/storage/database/DatabaseImpl.kt index 36dffcec505..f75a8b0f18b 100644 --- a/java/arcs/android/storage/database/DatabaseImpl.kt +++ b/java/arcs/android/storage/database/DatabaseImpl.kt @@ -108,6 +108,7 @@ typealias ReferenceId = Long // Our helper extension methods close Cursors correctly. class DatabaseImpl( context: Context, + private val storageKeyManager: StorageKeyManager, databaseName: String, persistent: Boolean = true, val onDatabaseClose: suspend () -> Unit = {} @@ -435,7 +436,7 @@ class DatabaseImpl( } else { Reference( id = it.getString(6), - storageKey = StorageKeyManager.GLOBAL_INSTANCE.parse(it.getString(7)), + storageKey = storageKeyManager.parse(it.getString(7)), version = it.getVersionMap(8), _creationTimestamp = it.getLong(9), _expirationTimestamp = it.getLong(10), @@ -1052,7 +1053,7 @@ class DatabaseImpl( arrayOf() ).forEach { val storageKeyId = it.getLong(0) - val storageKey = StorageKeyManager.GLOBAL_INSTANCE.parse(it.getString(1)) + val storageKey = storageKeyManager.parse(it.getString(1)) val orphan = it.getNullableBoolean(2) ?: false val noRef = it.getBoolean(3) if (orphan && noRef) { @@ -1379,7 +1380,7 @@ class DatabaseImpl( ) (storageKeys union updatedContainersStorageKeys).map { storageKey -> - notifyClients(StorageKeyManager.GLOBAL_INSTANCE.parse(storageKey)) { + notifyClients(storageKeyManager.parse(storageKey)) { it.onDatabaseDelete(null) } } @@ -1720,7 +1721,7 @@ class DatabaseImpl( ReferenceWithVersion( Reference( id = it.getString(0), - storageKey = StorageKeyManager.GLOBAL_INSTANCE.parse(it.getString(3)), + storageKey = storageKeyManager.parse(it.getString(3)), version = it.getVersionMap(4), _creationTimestamp = it.getLong(1), _expirationTimestamp = it.getLong(2) diff --git a/java/arcs/core/host/AbstractArcHost.kt b/java/arcs/core/host/AbstractArcHost.kt index c756bfc0313..5c67e1c465f 100644 --- a/java/arcs/core/host/AbstractArcHost.kt +++ b/java/arcs/core/host/AbstractArcHost.kt @@ -26,6 +26,7 @@ import arcs.core.host.api.HandleHolder import arcs.core.host.api.Particle import arcs.core.storage.StorageEndpointManager import arcs.core.storage.StorageKey +import arcs.core.storage.StorageKeyManager import arcs.core.util.LruCacheMap import arcs.core.util.Scheduler import arcs.core.util.TaggedLog @@ -87,6 +88,9 @@ abstract class AbstractArcHost( private val particleConstructors: MutableMap = mutableMapOf() + // TODO(b/174432505): Don't use the GLOBAL_INSTANCE, accept as a constructor param instead. + private val storageKeyManager = StorageKeyManager.GLOBAL_INSTANCE + private val cacheMutex = Mutex() /** In memory cache of [ArcHostContext] state. */ @@ -353,7 +357,12 @@ abstract class AbstractArcHost( ): ArcHostContextParticle { val handleManager = entityHandleManager("$hostId-${arcHostContext.arcId}") - return ArcHostContextParticle(hostId, handleManager, this::instantiateParticle).apply { + return ArcHostContextParticle( + hostId, + handleManager, + storageKeyManager, + this::instantiateParticle + ).apply { val partition = createArcHostContextPersistencePlan( arcHostContextCapability, arcHostContext.arcId diff --git a/java/arcs/core/host/ArcHostContextParticle.kt b/java/arcs/core/host/ArcHostContextParticle.kt index 5ba5601cbe5..e5338a4cac8 100755 --- a/java/arcs/core/host/ArcHostContextParticle.kt +++ b/java/arcs/core/host/ArcHostContextParticle.kt @@ -47,6 +47,7 @@ typealias ArcHostContextParticle_PlanHandle = AbstractArcHostContextParticle.Pla class ArcHostContextParticle( private val hostId: String, private val handleManager: HandleManager, + private val storageKeyManager: StorageKeyManager, private val instantiateParticle: suspend (ParticleIdentifier, Plan.Particle?) -> Particle, private val instantiatedParticles: MutableMap = mutableMapOf() ) : AbstractArcHostContextParticle() { @@ -198,7 +199,7 @@ class ArcHostContextParticle( } handle.connectionName to Plan.HandleConnection( Plan.Handle( - StorageKeyManager.GLOBAL_INSTANCE.parse(planHandle.storageKey), + storageKeyManager.parse(planHandle.storageKey), // TODO(b/161818462): Properly serialize serialize Handle Type's schema. fromTag(arcId, particle, planHandle.type, handle.connectionName), emptyList() diff --git a/java/arcs/sdk/android/labs/host/ArcHostHelper.kt b/java/arcs/sdk/android/labs/host/ArcHostHelper.kt index 0e8500ee92e..d9a42cb4e8b 100644 --- a/java/arcs/sdk/android/labs/host/ArcHostHelper.kt +++ b/java/arcs/sdk/android/labs/host/ArcHostHelper.kt @@ -70,6 +70,7 @@ import kotlinx.coroutines.launch */ class ArcHostHelper( private val service: Service, + private val storageKeyManager: StorageKeyManager, vararg arcHosts: ArcHost ) { private val job = SupervisorJob() + Dispatchers.Unconfined + CoroutineName("ArcHostHelper") @@ -107,7 +108,7 @@ class ArcHostHelper( arcHost.onResurrected( targetId, - notifiers.map(StorageKeyManager.GLOBAL_INSTANCE::parse) + notifiers.map(storageKeyManager::parse) ) } diff --git a/java/arcs/sdk/android/labs/host/ArcHostService.kt b/java/arcs/sdk/android/labs/host/ArcHostService.kt index 4b5f9507181..5a545866fdb 100644 --- a/java/arcs/sdk/android/labs/host/ArcHostService.kt +++ b/java/arcs/sdk/android/labs/host/ArcHostService.kt @@ -13,6 +13,7 @@ package arcs.sdk.android.labs.host import android.content.Intent import androidx.lifecycle.LifecycleService import arcs.core.host.ArcHost +import arcs.core.storage.StorageKeyManager import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.MainScope import kotlinx.coroutines.cancel @@ -23,6 +24,9 @@ import kotlinx.coroutines.cancel abstract class ArcHostService : LifecycleService() { protected val scope: CoroutineScope = MainScope() + // TODO(b/174432505): Don't use the GLOBAL_INSTANCE, accept as a constructor param instead. + private val storageKeyManager = StorageKeyManager.GLOBAL_INSTANCE + // TODO: remove after G3 fixed abstract val arcHost: ArcHost @@ -32,7 +36,7 @@ abstract class ArcHostService : LifecycleService() { open val arcHosts: List by lazy { listOf(arcHost) } val arcHostHelper: ArcHostHelper by lazy { - ArcHostHelper(this, *arcHosts.toTypedArray()) + ArcHostHelper(this, storageKeyManager, *arcHosts.toTypedArray()) } override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int { diff --git a/javatests/arcs/android/common/resurrection/DbHelperTest.kt b/javatests/arcs/android/common/resurrection/DbHelperTest.kt index aadbb3654a8..76d36f159fe 100644 --- a/javatests/arcs/android/common/resurrection/DbHelperTest.kt +++ b/javatests/arcs/android/common/resurrection/DbHelperTest.kt @@ -15,6 +15,7 @@ import android.content.ComponentName import android.os.PersistableBundle import androidx.test.core.app.ApplicationProvider import androidx.test.ext.junit.runners.AndroidJUnit4 +import arcs.core.storage.StorageKeyManager import arcs.core.storage.keys.RamDiskStorageKey import com.google.common.truth.Truth.assertThat import org.junit.After @@ -57,7 +58,10 @@ class DbHelperTest { @Before fun setUp() { - dbHelper = DbHelper(ApplicationProvider.getApplicationContext()) + dbHelper = DbHelper( + ApplicationProvider.getApplicationContext(), + StorageKeyManager.GLOBAL_INSTANCE + ) } @After diff --git a/javatests/arcs/android/host/ArcHostHelperTest.kt b/javatests/arcs/android/host/ArcHostHelperTest.kt index 23bbaf552c9..f8081c57bf4 100644 --- a/javatests/arcs/android/host/ArcHostHelperTest.kt +++ b/javatests/arcs/android/host/ArcHostHelperTest.kt @@ -34,9 +34,9 @@ import arcs.core.host.ArcStateChangeCallback import arcs.core.host.ArcStateChangeRegistration import arcs.core.host.ParticleIdentifier import arcs.core.storage.StorageKey -import arcs.core.storage.StorageKeyManager import arcs.core.storage.keys.VolatileStorageKey import arcs.core.storage.testutil.DummyStorageKey +import arcs.core.storage.testutil.DummyStorageKeyManager import arcs.core.util.guardedBy import arcs.sdk.android.labs.host.ArcHostHelper import arcs.sdk.android.labs.host.ResurrectableHost @@ -170,8 +170,7 @@ class ArcHostHelperTest { context = InstrumentationRegistry.getInstrumentation().targetContext service = Robolectric.setupService(TestAndroidArcHostService::class.java) arcHost = TestArcHost(context) - helper = ArcHostHelper(service, arcHost) - StorageKeyManager.GLOBAL_INSTANCE.addParser(DummyStorageKey) + helper = ArcHostHelper(service, DummyStorageKeyManager(), arcHost) } @Test diff --git a/javatests/arcs/android/host/TestExternalArcHostService.kt b/javatests/arcs/android/host/TestExternalArcHostService.kt index 7da0408f969..35d99a4531e 100644 --- a/javatests/arcs/android/host/TestExternalArcHostService.kt +++ b/javatests/arcs/android/host/TestExternalArcHostService.kt @@ -10,6 +10,7 @@ import arcs.core.host.ParticleRegistration import arcs.core.host.SchedulerProvider import arcs.core.host.SimpleSchedulerProvider import arcs.core.host.TestingHost +import arcs.core.storage.StorageKeyManager import arcs.sdk.android.labs.host.ArcHostHelper import arcs.sdk.android.labs.host.ResurrectableHost import arcs.sdk.android.storage.AndroidStorageServiceEndpointManager @@ -30,8 +31,11 @@ abstract class TestExternalArcHostService : Service() { val schedulerProvider = SimpleSchedulerProvider(Dispatchers.Default) + // TODO(b/174432505): Don't use the GLOBAL_INSTANCE, use a test-specific instance. + private val storageKeyManager = StorageKeyManager.GLOBAL_INSTANCE + private val arcHostHelper: ArcHostHelper by lazy { - ArcHostHelper(this, arcHost) + ArcHostHelper(this, storageKeyManager, arcHost) } override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int { diff --git a/javatests/arcs/android/storage/database/DatabaseDowngradeTest.kt b/javatests/arcs/android/storage/database/DatabaseDowngradeTest.kt index 381871df7f3..ea6a4d5a6e1 100644 --- a/javatests/arcs/android/storage/database/DatabaseDowngradeTest.kt +++ b/javatests/arcs/android/storage/database/DatabaseDowngradeTest.kt @@ -18,6 +18,7 @@ import androidx.test.core.app.ApplicationProvider import androidx.test.ext.junit.runners.AndroidJUnit4 import arcs.android.common.map import arcs.android.common.transaction +import arcs.core.storage.testutil.DummyStorageKeyManager import com.google.common.truth.Truth.assertThat import org.junit.Test import org.junit.runner.RunWith @@ -44,6 +45,7 @@ class DatabaseDowngradeTest { val databaseImpl = DatabaseImpl( ApplicationProvider.getApplicationContext(), + DummyStorageKeyManager(), "arcs", true ) diff --git a/javatests/arcs/android/storage/database/DatabaseImplTest.kt b/javatests/arcs/android/storage/database/DatabaseImplTest.kt index 609fca9103b..d38a7f54f2c 100644 --- a/javatests/arcs/android/storage/database/DatabaseImplTest.kt +++ b/javatests/arcs/android/storage/database/DatabaseImplTest.kt @@ -37,6 +37,7 @@ import arcs.core.storage.database.DatabaseClient import arcs.core.storage.database.DatabaseData import arcs.core.storage.database.ReferenceWithVersion import arcs.core.storage.testutil.DummyStorageKey +import arcs.core.storage.testutil.DummyStorageKeyManager import arcs.core.testutil.assertSuspendingThrows import arcs.core.util.ArcsDuration import arcs.core.util.ArcsInstant @@ -64,7 +65,11 @@ class DatabaseImplTest { @Before fun setUp() { - database = DatabaseImpl(ApplicationProvider.getApplicationContext(), "test.sqlite3") + database = DatabaseImpl( + ApplicationProvider.getApplicationContext(), + DummyStorageKeyManager(), + "test.sqlite3" + ) db = database.writableDatabase StorageKeyManager.GLOBAL_INSTANCE.addParser(DummyStorageKey) } @@ -3784,6 +3789,7 @@ class DatabaseImplTest { // Makes sure in memory database can also return valid size. val inMemoryDatabase = DatabaseImpl( ApplicationProvider.getApplicationContext(), + DummyStorageKeyManager(), "test.sqlite3", persistent = false ) From 7f2ea17989b80745d316621dcaae386e243afd46 Mon Sep 17 00:00:00 2001 From: Filippo Galgani Date: Mon, 7 Dec 2020 20:19:31 -0800 Subject: [PATCH 17/31] rename EntitySpecTest.kt to javatests/arcs/sdk/GeneratedEntityTest.kt add a comment saying it is a test for the generated entity classes use fixture entities test more field types PiperOrigin-RevId: 346237958 --- .../core/entity/testutil/FixtureEntities.kt | 8 +- javatests/arcs/sdk/BUILD | 8 +- javatests/arcs/sdk/GeneratedEntityTest.kt | 471 ++++++++++++++++++ javatests/arcs/sdk/spec/EntitySpecTest.kt | 354 ------------- javatests/arcs/sdk/spec/entity.arcs | 37 -- 5 files changed, 482 insertions(+), 396 deletions(-) create mode 100755 javatests/arcs/sdk/GeneratedEntityTest.kt delete mode 100755 javatests/arcs/sdk/spec/EntitySpecTest.kt delete mode 100644 javatests/arcs/sdk/spec/entity.arcs diff --git a/java/arcs/core/entity/testutil/FixtureEntities.kt b/java/arcs/core/entity/testutil/FixtureEntities.kt index 7a18e2fb074..afe78f771b0 100644 --- a/java/arcs/core/entity/testutil/FixtureEntities.kt +++ b/java/arcs/core/entity/testutil/FixtureEntities.kt @@ -53,10 +53,10 @@ class FixtureEntities { boolListField = listOf(true, false, true), instantsField = setOf(ArcsInstant.ofEpochMilli(1), ArcsInstant.ofEpochMilli(2)), bigintsField = setOf(BigInt.ONE, BigInt.TEN), - inlineEntityField = getInnerEntity(), - inlineListField = listOf(getInnerEntity(), getInnerEntity()), + inlineEntityField = generateInnerEntity(), + inlineListField = listOf(generateInnerEntity(), generateInnerEntity()), // TODO(b/174426876): add more than one entity. Currently does not work due to b/174426876. - inlinesField = setOf(getInnerEntity()), + inlinesField = setOf(generateInnerEntity()), referenceField = createInnerEntityReference("ref-$entityCounter"), hardReferenceField = createInnerEntityReference("hardref-$entityCounter"), referencesField = setOf( @@ -72,7 +72,7 @@ class FixtureEntities { fun generateEmpty() = FixtureEntity() - private fun getInnerEntity(): InnerEntity { + fun generateInnerEntity(): InnerEntity { innerEntityCounter++ return InnerEntity( textField = "inline text $innerEntityCounter", diff --git a/javatests/arcs/sdk/BUILD b/javatests/arcs/sdk/BUILD index ecc8323ed02..89be50348b6 100644 --- a/javatests/arcs/sdk/BUILD +++ b/javatests/arcs/sdk/BUILD @@ -11,13 +11,17 @@ licenses(["notice"]) package(default_visibility = ["//java/arcs:allowed-packages"]) arcs_kt_jvm_test_suite( - name = "host", + name = "sdk", srcs = glob(["*Test.kt"]), package = "arcs.sdk", deps = [ ":schemas", + "//java/arcs/core/common", "//java/arcs/core/data", + "//java/arcs/core/data:schema_fields", + "//java/arcs/core/data/util:data-util", "//java/arcs/core/entity", + "//java/arcs/core/entity/testutil", "//java/arcs/core/host", "//java/arcs/core/storage", "//java/arcs/core/storage/api", @@ -25,8 +29,10 @@ arcs_kt_jvm_test_suite( "//java/arcs/core/storage/keys", "//java/arcs/core/storage/referencemode", "//java/arcs/core/storage/testutil", + "//java/arcs/core/testutil", "//java/arcs/core/testutil/handles", "//java/arcs/core/util", + "//java/arcs/core/util:utils-platform-dependencies", "//java/arcs/core/util/testutil", "//java/arcs/jvm/util/testutil", "//java/arcs/sdk:sdk-kt", diff --git a/javatests/arcs/sdk/GeneratedEntityTest.kt b/javatests/arcs/sdk/GeneratedEntityTest.kt new file mode 100755 index 00000000000..2d96461b86e --- /dev/null +++ b/javatests/arcs/sdk/GeneratedEntityTest.kt @@ -0,0 +1,471 @@ +package arcs.sdk + +import arcs.core.common.Id +import arcs.core.data.Capability.Ttl +import arcs.core.data.FieldType +import arcs.core.data.RawEntity +import arcs.core.data.RawEntity.Companion.NO_REFERENCE_ID +import arcs.core.data.SchemaRegistry +import arcs.core.data.util.ReferencableList +import arcs.core.data.util.toReferencable +import arcs.core.entity.testutil.FixtureEntities +import arcs.core.entity.testutil.FixtureEntity +import arcs.core.entity.testutil.InnerEntity +import arcs.core.testutil.runTest +import arcs.core.util.testutil.LogRule +import arcs.jvm.util.testutil.FakeTime +import com.google.common.truth.Truth.assertThat +import kotlinx.coroutines.ExperimentalCoroutinesApi +import org.junit.Before +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +/** Tests for code-generated entity classes. */ +@OptIn(ExperimentalCoroutinesApi::class) +@RunWith(JUnit4::class) +class GeneratedEntityTest { + + private lateinit var idGenerator: Id.Generator + private var currentTime: Long = 500L + private val fixtureEntities = FixtureEntities() + + @get:Rule + val log = LogRule() + + @Before + fun setUp() { + idGenerator = Id.Generator.newForTest("session") + } + + @Test + fun createEmptyInstance() { + val entity = FixtureEntity() + + assertThat(entity.boolField).isFalse() + assertThat(entity.numField).isEqualTo(0.0) + assertThat(entity.textField).isEqualTo("") + assertThat(entity.referenceField).isNull() + assertThat(entity.byteField).isEqualTo(0) + assertThat(entity.shortField).isEqualTo(0) + assertThat(entity.intField).isEqualTo(0) + assertThat(entity.longField).isEqualTo(0L) + assertThat(entity.charField).isEqualTo('\u0000') + assertThat(entity.floatField).isEqualTo(0.0f) + assertThat(entity.doubleField).isEqualTo(0.0) + assertThat(entity.instantField).isEqualTo(ArcsInstant.ofEpochMilli(-1L)) + assertThat(entity.bigintField).isEqualTo(BigInt.ZERO) + assertThat(entity.boolsField).isEmpty() + assertThat(entity.numsField).isEmpty() + assertThat(entity.textsField).isEmpty() + assertThat(entity.referencesField).isEmpty() + assertThat(entity.bytesField).isEmpty() + assertThat(entity.shortsField).isEmpty() + assertThat(entity.intsField).isEmpty() + assertThat(entity.longsField).isEmpty() + assertThat(entity.charsField).isEmpty() + assertThat(entity.floatsField).isEmpty() + assertThat(entity.doublesField).isEmpty() + assertThat(entity.instantsField).isEmpty() + assertThat(entity.bigintsField).isEmpty() + assertThat(entity.textListField).isEmpty() + assertThat(entity.numListField).isEmpty() + assertThat(entity.boolListField).isEmpty() + assertThat(entity.inlineEntityField).isEqualTo(InnerEntity()) + assertThat(entity.inlineListField).isEmpty() + assertThat(entity.inlinesField).isEmpty() + assertThat(entity.referenceListField).isEmpty() + } + + @Test + fun createWithFieldValues() = runTest { + val ref1 = fixtureEntities.createInnerEntityReference("bar1") + val ref2 = fixtureEntities.createInnerEntityReference("bar2") + val ref3 = fixtureEntities.createInnerEntityReference("bar3") + val inline1 = fixtureEntities.generateInnerEntity() + val inline2 = fixtureEntities.generateInnerEntity() + val inline3 = fixtureEntities.generateInnerEntity() + val entity = FixtureEntity( + boolField = true, + numField = 123.0, + textField = "abc", + referenceField = ref1, + byteField = 47, + shortField = 30000, + intField = 1000000000, + longField = 15000000000L, + charField = 'A', + floatField = 43.23f, + doubleField = 77.66E200, + instantField = ArcsInstant.ofEpochMilli(10.toLong()), + bigintField = BigInt.TEN, + boolsField = setOf(false), + numsField = setOf(456.0, 789.0), + textsField = setOf("def", "ghi"), + referencesField = setOf(ref2, ref3), + bytesField = setOf(23, 34), + shortsField = setOf(234, 345), + intsField = setOf(234567, 345678), + longsField = setOf(1L, 1234567890123L), + charsField = setOf('A', 'R', 'C', 'S'), + floatsField = setOf(2.3f, 3.4f), + doublesField = setOf(2.3E200, 3.4E100), + textListField = listOf("text 1", "text 2"), + numListField = listOf(123.0, 456.0), + boolListField = listOf(true, false, true), + instantsField = setOf(ArcsInstant.ofEpochMilli(1), ArcsInstant.ofEpochMilli(2)), + bigintsField = setOf(BigInt.ONE, BigInt.TEN), + inlineEntityField = inline1, + inlineListField = listOf(inline2, inline3), + inlinesField = setOf(inline2, inline1), + referenceListField = listOf(ref1, ref2) + ) + + assertThat(entity.boolField).isEqualTo(true) + assertThat(entity.numField).isEqualTo(123.0) + assertThat(entity.textField).isEqualTo("abc") + assertThat(entity.referenceField).isEqualTo(ref1) + assertThat(entity.byteField).isEqualTo(47) + assertThat(entity.shortField).isEqualTo(30000) + assertThat(entity.intField).isEqualTo(1000000000) + assertThat(entity.longField).isEqualTo(15000000000L) + assertThat(entity.charField).isEqualTo('A') + assertThat(entity.floatField).isEqualTo(43.23f) + assertThat(entity.doubleField).isEqualTo(77.66E200) + assertThat(entity.instantField).isEqualTo(ArcsInstant.ofEpochMilli(10.toLong())) + assertThat(entity.bigintField).isEqualTo(BigInt.TEN) + assertThat(entity.boolsField).containsExactly(false) + assertThat(entity.numsField).containsExactly(456.0, 789.0) + assertThat(entity.textsField).containsExactly("def", "ghi") + assertThat(entity.referencesField).containsExactly(ref2, ref3) + assertThat(entity.bytesField).containsExactly(23.toByte(), 34.toByte()) + assertThat(entity.shortsField).containsExactly(234.toShort(), 345.toShort()) + assertThat(entity.intsField).containsExactly(234567, 345678) + assertThat(entity.longsField).containsExactly(1L, 1234567890123L) + assertThat(entity.charsField).containsExactly('A', 'R', 'C', 'S') + assertThat(entity.floatsField).containsExactly(2.3f, 3.4f) + assertThat(entity.doublesField).containsExactly(2.3E200, 3.4E100) + assertThat(entity.textListField).containsExactly("text 1", "text 2") + assertThat(entity.numListField).containsExactly(123.0, 456.0) + assertThat(entity.boolListField).containsExactly(true, false, true) + assertThat(entity.instantsField).containsExactly( + ArcsInstant.ofEpochMilli(1), + ArcsInstant.ofEpochMilli(2) + ) + assertThat(entity.bigintsField).containsExactly(BigInt.ONE, BigInt.TEN) + assertThat(entity.inlineEntityField).isEqualTo(inline1) + assertThat(entity.inlineListField).containsExactly(inline2, inline3) + assertThat(entity.inlinesField).containsExactly(inline2, inline1) + assertThat(entity.referenceListField).containsExactly(ref1, ref2) + } + + @Test + fun ensureEntityFields() { + val entity = FixtureEntity() + assertThat(entity.entityId).isNull() + + entity.ensureEntityFields(idGenerator, "handle", FakeTime(currentTime)) + val entityId = entity.entityId + + // Check that the entity ID has been set to *something*. + assertThat(entityId).isNotNull() + assertThat(entityId).isNotEmpty() + assertThat(entityId).isNotEqualTo(NO_REFERENCE_ID) + assertThat(entityId).contains("handle") + + val creationTimestamp = entity.serialize().creationTimestamp + + assertThat(creationTimestamp).isEqualTo(currentTime) + + // Calling it again doesn't overwrite id and timestamp. + entity.ensureEntityFields( + idGenerator, + "something-else", + FakeTime(currentTime + 10) + ) + assertThat(entity.entityId).isEqualTo(entityId) + assertThat(entity.serialize().creationTimestamp).isEqualTo(creationTimestamp) + } + + @Test + fun expiryTimestamp() { + val entity = FixtureEntity() + entity.ensureEntityFields( + idGenerator, + "handle", + FakeTime(currentTime), + Ttl.Minutes(1) + ) + val expirationTimestamp = entity.serialize().expirationTimestamp + + assertThat(expirationTimestamp).isEqualTo(currentTime + 60000) // 1 minute = 60,000 ms. + } + + @Test + fun copy() = runTest { + val ref1 = fixtureEntities.createInnerEntityReference("bar1") + val ref2 = fixtureEntities.createInnerEntityReference("bar2") + val ref3 = fixtureEntities.createInnerEntityReference("bar3") + val inline1 = fixtureEntities.generateInnerEntity() + val inline2 = fixtureEntities.generateInnerEntity() + val inline3 = fixtureEntities.generateInnerEntity() + val entity = FixtureEntity( + boolField = true, + numField = 123.0, + textField = "abc", + referenceField = ref1, + byteField = 47, + shortField = 30000, + intField = 1000000000, + longField = 15000000000L, + charField = 'A', + floatField = 43.23f, + doubleField = 77.66E200, + instantField = ArcsInstant.ofEpochMilli(10.toLong()), + bigintField = BigInt.TEN, + boolsField = setOf(false), + numsField = setOf(456.0, 789.0), + textsField = setOf("def", "ghi"), + referencesField = setOf(ref2, ref3), + bytesField = setOf(23, 34), + shortsField = setOf(234, 345), + intsField = setOf(234567, 345678), + longsField = setOf(1L, 1234567890123L), + charsField = setOf('A', 'R', 'C', 'S'), + floatsField = setOf(2.3f, 3.4f), + doublesField = setOf(2.3E200, 3.4E100), + textListField = listOf("text 1", "text 2"), + numListField = listOf(123.0, 456.0), + boolListField = listOf(true, false, true), + instantsField = setOf(ArcsInstant.ofEpochMilli(1), ArcsInstant.ofEpochMilli(2)), + bigintsField = setOf(BigInt.ONE, BigInt.TEN), + inlineEntityField = inline1, + inlineListField = listOf(inline2, inline3), + inlinesField = setOf(inline2, inline1), + referenceListField = listOf(ref1, ref2) + ) + + // Copying an unidentified entity should give an exact copy of the entity. + assertThat(entity.copy()).isEqualTo(entity) + + // Copying an identified entity should reset the ID. + entity.identify() + val copy1 = entity.copy() + + assertThat(copy1.entityId).isNull() + assertThat(copy1).isNotEqualTo(entity) + + // Copying an entity with replacement fields should overwrite those fields in the copy. + val copy2 = entity.copy( + boolField = false, + numField = 456.0, + textField = "xyz", + referenceField = ref2, + byteField = 25, + shortField = -20000, + intField = -900000000, + longField = -16000000000L, + charField = 'a', + floatField = 23.43f, + doubleField = 66.77E100, + instantField = ArcsInstant.ofEpochMilli(20.toLong()), + bigintField = BigInt.ONE, + boolsField = setOf(true), + numsField = setOf(111.0, 222.0), + textsField = setOf("aaa", "bbb"), + referencesField = setOf(ref1, ref3), + bytesField = setOf(45, 56), + shortsField = setOf(456, 567), + intsField = setOf(456789, 567890), + longsField = setOf(1L, 2345678901234L), + charsField = setOf('R', 'O', 'C', 'K', 'S'), + floatsField = setOf(4.5f, 5.6f), + doublesField = setOf(4.5E50, 5.6E60), + textListField = listOf("text 3", "text 4"), + numListField = listOf(789.0, 111.0), + boolListField = listOf(false, false, false), + instantsField = setOf(ArcsInstant.ofEpochMilli(6), ArcsInstant.ofEpochMilli(7)), + bigintsField = setOf(BigInt.ZERO, BigInt.TEN), + inlineEntityField = inline2, + inlineListField = listOf(inline3, inline1), + inlinesField = setOf(inline3, inline2), + referenceListField = listOf(ref3, ref2) + ) + + assertThat(copy2.entityId).isNull() + assertThat(copy2.boolField).isFalse() + assertThat(copy2.numField).isEqualTo(456.0) + assertThat(copy2.textField).isEqualTo("xyz") + assertThat(copy2.referenceField).isEqualTo(ref2) + assertThat(copy2.byteField).isEqualTo(25) + assertThat(copy2.shortField).isEqualTo(-20000) + assertThat(copy2.intField).isEqualTo(-900000000) + assertThat(copy2.longField).isEqualTo(-16000000000L) + assertThat(copy2.charField).isEqualTo('a') + assertThat(copy2.floatField).isEqualTo(23.43f) + assertThat(copy2.doubleField).isEqualTo(66.77E100) + assertThat(copy2.instantField).isEqualTo(ArcsInstant.ofEpochMilli(20.toLong())) + assertThat(copy2.bigintField).isEqualTo(BigInt.ONE) + assertThat(copy2.boolsField).containsExactly(true) + assertThat(copy2.numsField).containsExactly(111.0, 222.0) + assertThat(copy2.textsField).containsExactly("aaa", "bbb") + assertThat(copy2.referencesField).containsExactly(ref1, ref3) + assertThat(copy2.bytesField).containsExactly(45.toByte(), 56.toByte()) + assertThat(copy2.shortsField).containsExactly(456.toShort(), 567.toShort()) + assertThat(copy2.intsField).containsExactly(456789, 567890) + assertThat(copy2.longsField).containsExactly(1L, 2345678901234L) + assertThat(copy2.charsField).containsExactly('R', 'O', 'C', 'K', 'S') + assertThat(copy2.floatsField).containsExactly(4.5f, 5.6f) + assertThat(copy2.doublesField).containsExactly(4.5E50, 5.6E60) + assertThat(copy2.textListField).containsExactly("text 3", "text 4") + assertThat(copy2.numListField).containsExactly(789.0, 111.0) + assertThat(copy2.boolListField).containsExactly(false, false, false) + assertThat(copy2.instantsField).containsExactly( + ArcsInstant.ofEpochMilli(6), + ArcsInstant.ofEpochMilli(7) + ) + assertThat(copy2.bigintsField).containsExactly(BigInt.ZERO, BigInt.TEN) + assertThat(copy2.inlineEntityField).isEqualTo(inline2) + assertThat(copy2.inlineListField).containsExactly(inline3, inline1) + assertThat(copy2.inlinesField).containsExactly(inline3, inline2) + assertThat(copy2.referenceListField).containsExactly(ref3, ref2) + } + + @Test + fun serialize_roundTrip() = runTest { + val ref1 = fixtureEntities.createInnerEntityReference("bar1") + val ref2 = fixtureEntities.createInnerEntityReference("bar2") + val ref3 = fixtureEntities.createInnerEntityReference("bar3") + val inline1 = fixtureEntities.generateInnerEntity() + val inline2 = fixtureEntities.generateInnerEntity() + val inline3 = fixtureEntities.generateInnerEntity() + val entity = FixtureEntity( + boolField = true, + numField = 123.0, + textField = "abc", + referenceField = ref1, + byteField = 47, + shortField = 30000, + intField = 1000000000, + longField = 15000000000L, + charField = 'A', + floatField = 43.23f, + doubleField = 77.66E200, + instantField = ArcsInstant.ofEpochMilli(10.toLong()), + bigintField = BigInt.TEN, + boolsField = setOf(false), + numsField = setOf(456.0, 789.0), + textsField = setOf("def", "ghi"), + referencesField = setOf(ref2, ref3), + bytesField = setOf(23, 34), + shortsField = setOf(234, 345), + intsField = setOf(234567, 345678), + longsField = setOf(1L, 1234567890123L), + charsField = setOf('A', 'R', 'C', 'S'), + floatsField = setOf(2.3f, 3.4f), + doublesField = setOf(2.3E200, 3.4E100), + textListField = listOf("text 1", "text 2"), + numListField = listOf(123.0, 456.0), + boolListField = listOf(true, false, true), + instantsField = setOf(ArcsInstant.ofEpochMilli(1), ArcsInstant.ofEpochMilli(2)), + bigintsField = setOf(BigInt.ONE, BigInt.TEN), + inlineEntityField = inline1, + inlineListField = listOf(inline2, inline3), + inlinesField = setOf(inline2, inline1), + referenceListField = listOf(ref1, ref3) + ) + val entityId = entity.identify() + val rawEntity = entity.serialize() + val expected = + RawEntity( + entityId, + singletons = mapOf( + "textField" to "abc".toReferencable(), + "numField" to 123.0.toReferencable(), + "boolField" to true.toReferencable(), + "byteField" to 47.toByte().toReferencable(), + "shortField" to 30000.toShort().toReferencable(), + "intField" to 1000000000.toReferencable(), + "longField" to 15000000000L.toReferencable(), + "charField" to 'A'.toReferencable(), + "floatField" to 43.23f.toReferencable(), + "doubleField" to 77.66E200.toReferencable(), + "instantField" to ArcsInstant.ofEpochMilli(10.toLong()).toReferencable(), + "bigintField" to BigInt.TEN.toReferencable(), + "textListField" to listOf( + "text 1".toReferencable(), + "text 2".toReferencable() + ).toReferencable(FieldType.ListOf(FieldType.Text)), + "numListField" to listOf(123.0.toReferencable(), 456.0.toReferencable()).toReferencable( + FieldType.ListOf(FieldType.Number) + ), + "boolListField" to listOf( + true.toReferencable(), + false.toReferencable(), + true.toReferencable() + ).toReferencable(FieldType.ListOf(FieldType.Boolean)), + "inlineEntityField" to inline1.serialize(), + "inlineListField" to listOf(inline2.serialize(), inline3.serialize()).toReferencable( + FieldType.ListOf(FieldType.InlineEntity(InnerEntity.SCHEMA.hash)) + ), + "referenceField" to ref1.toReferencable(), + "referenceListField" to ReferencableList( + listOf( + ref1.toReferencable(), + ref3.toReferencable() + ), + FieldType.ListOf(FieldType.EntityRef(InnerEntity.SCHEMA.hash)) + ), + "hardReferenceField" to null + ), + collections = mapOf( + "textsField" to setOf("def".toReferencable(), "ghi".toReferencable()), + "numsField" to setOf(456.0.toReferencable(), 789.0.toReferencable()), + "boolsField" to setOf(false.toReferencable()), + "bytesField" to setOf(23.toByte().toReferencable(), 34.toByte().toReferencable()), + "shortsField" to setOf( + 234.toShort().toReferencable(), + 345.toShort().toReferencable() + ), + "intsField" to setOf(234567.toReferencable(), 345678.toReferencable()), + "longsField" to setOf(1L.toReferencable(), 1234567890123L.toReferencable()), + "charsField" to setOf( + 'A'.toReferencable(), + 'R'.toReferencable(), + 'C'.toReferencable(), + 'S'.toReferencable() + ), + "floatsField" to setOf(2.3f.toReferencable(), 3.4f.toReferencable()), + "doublesField" to setOf(2.3E200.toReferencable(), 3.4E100.toReferencable()), + "instantsField" to setOf( + ArcsInstant.ofEpochMilli(1).toReferencable(), + ArcsInstant.ofEpochMilli(2).toReferencable() + ), + "bigintsField" to setOf(BigInt.ONE.toReferencable(), BigInt.TEN.toReferencable()), + "inlinesField" to setOf(inline2.serialize(), inline1.serialize()), + "referencesField" to setOf(ref2.toReferencable(), ref3.toReferencable()) + ), + creationTimestamp = 500L + ) + + assertThat(rawEntity).isEqualTo(expected) + assertThat(FixtureEntity.deserialize(rawEntity)).isEqualTo(entity) + } + + @Test + fun schemaRegistry() { + // The entity class should have registered itself statically. + val hash = FixtureEntity.SCHEMA.hash + + assertThat(SchemaRegistry.getSchema(hash)).isEqualTo(FixtureEntity.SCHEMA) + } + + /** Generates and returns an ID for the entity. */ + private fun FixtureEntity.identify(): String { + assertThat(entityId).isNull() + ensureEntityFields(idGenerator, "handleName", FakeTime(currentTime)) + assertThat(entityId).isNotNull() + return entityId!! + } +} diff --git a/javatests/arcs/sdk/spec/EntitySpecTest.kt b/javatests/arcs/sdk/spec/EntitySpecTest.kt deleted file mode 100755 index 54d4730d759..00000000000 --- a/javatests/arcs/sdk/spec/EntitySpecTest.kt +++ /dev/null @@ -1,354 +0,0 @@ -package arcs.sdk.spec - -import arcs.core.common.Id -import arcs.core.data.Capability.Ttl -import arcs.core.data.RawEntity -import arcs.core.data.RawEntity.Companion.NO_REFERENCE_ID -import arcs.core.data.SchemaRegistry -import arcs.core.data.util.toReferencable -import arcs.core.testutil.handles.dispatchCreateReference -import arcs.core.testutil.handles.dispatchStore -import arcs.core.testutil.runTest -import arcs.core.util.testutil.LogRule -import arcs.jvm.util.testutil.FakeTime -import arcs.sdk.Reference -import com.google.common.truth.Truth.assertThat -import kotlinx.coroutines.ExperimentalCoroutinesApi -import org.junit.Before -import org.junit.Rule -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -private typealias Foo = EntitySpecParticle_Foo -private typealias Bar = EntitySpecParticle_Bars - -/** Specification tests for entities. */ -@OptIn(ExperimentalCoroutinesApi::class) -@RunWith(JUnit4::class) -class EntitySpecTest { - - class EntitySpecParticle : AbstractEntitySpecParticle() - - private lateinit var idGenerator: Id.Generator - private var currentTime: Long = 500L - - @get:Rule - val log = LogRule() - - @get:Rule - val harness = EntitySpecParticleTestHarness { EntitySpecParticle() } - - @Before - fun setUp() { - idGenerator = Id.Generator.newForTest("session") - } - - @Test - fun createEmptyInstance() { - val entity = Foo() - assertThat(entity.bool).isFalse() - assertThat(entity.num).isEqualTo(0.0) - assertThat(entity.text).isEqualTo("") - assertThat(entity.ref).isNull() - assertThat(entity.bt).isEqualTo(0) - assertThat(entity.shrt).isEqualTo(0) - assertThat(entity.nt).isEqualTo(0) - assertThat(entity.lng).isEqualTo(0L) - assertThat(entity.chr).isEqualTo('\u0000') - assertThat(entity.flt).isEqualTo(0.0f) - assertThat(entity.dbl).isEqualTo(0.0) - assertThat(entity.bools).isEmpty() - assertThat(entity.nums).isEmpty() - assertThat(entity.texts).isEmpty() - assertThat(entity.refs).isEmpty() - assertThat(entity.bts).isEmpty() - assertThat(entity.shrts).isEmpty() - assertThat(entity.nts).isEmpty() - assertThat(entity.lngs).isEmpty() - assertThat(entity.chrs).isEmpty() - assertThat(entity.flts).isEmpty() - assertThat(entity.dbls).isEmpty() - } - - @Test - fun createWithFieldValues() = runTest { - harness.start() - - val ref1 = createBarReference(Bar(value = "bar1")) - val ref2 = createBarReference(Bar(value = "bar2")) - val ref3 = createBarReference(Bar(value = "bar3")) - val entity = Foo( - bool = true, - num = 123.0, - text = "abc", - ref = ref1, - bt = 47, - shrt = 30000, - nt = 1000000000, - lng = 15000000000L, - chr = 'A', - flt = 43.23f, - dbl = 77.66E200, - bools = setOf(false), - nums = setOf(456.0, 789.0), - texts = setOf("def", "ghi"), - refs = setOf(ref2, ref3), - bts = setOf(23, 34), - shrts = setOf(234, 345), - nts = setOf(234567, 345678), - lngs = setOf(1L, 1234567890123L), - chrs = setOf('A', 'R', 'C', 'S'), - flts = setOf(2.3f, 3.4f), - dbls = setOf(2.3E200, 3.4E100) - ) - assertThat(entity.bool).isEqualTo(true) - assertThat(entity.num).isEqualTo(123.0) - assertThat(entity.text).isEqualTo("abc") - assertThat(entity.ref).isEqualTo(ref1) - assertThat(entity.bt).isEqualTo(47) - assertThat(entity.shrt).isEqualTo(30000) - assertThat(entity.nt).isEqualTo(1000000000) - assertThat(entity.lng).isEqualTo(15000000000L) - assertThat(entity.chr).isEqualTo('A') - assertThat(entity.flt).isEqualTo(43.23f) - assertThat(entity.dbl).isEqualTo(77.66E200) - assertThat(entity.bools).containsExactly(false) - assertThat(entity.nums).containsExactly(456.0, 789.0) - assertThat(entity.texts).containsExactly("def", "ghi") - assertThat(entity.refs).containsExactly(ref2, ref3) - assertThat(entity.bts).containsExactly(23.toByte(), 34.toByte()) - assertThat(entity.shrts).containsExactly(234.toShort(), 345.toShort()) - assertThat(entity.nts).containsExactly(234567, 345678) - assertThat(entity.lngs).containsExactly(1L, 1234567890123L) - assertThat(entity.chrs).containsExactly('A', 'R', 'C', 'S') - assertThat(entity.flts).containsExactly(2.3f, 3.4f) - assertThat(entity.dbls).containsExactly(2.3E200, 3.4E100) - } - - @Test - fun ensureEntityFields() { - val entity = Foo() - assertThat(entity.entityId).isNull() - - entity.ensureEntityFields(idGenerator, "handle", FakeTime(currentTime)) - val entityId = entity.entityId - - // Check that the entity ID has been set to *something*. - assertThat(entityId).isNotNull() - assertThat(entityId).isNotEmpty() - assertThat(entityId).isNotEqualTo(NO_REFERENCE_ID) - assertThat(entityId).contains("handle") - - val creationTimestamp = entity.serialize().creationTimestamp - assertThat(creationTimestamp).isEqualTo(currentTime) - - // Calling it again doesn't overwrite id and timestamp. - entity.ensureEntityFields(idGenerator, "something-else", FakeTime(currentTime + 10)) - assertThat(entity.entityId).isEqualTo(entityId) - assertThat(entity.serialize().creationTimestamp).isEqualTo(creationTimestamp) - } - - @Test - fun expiryTimestamp() { - val entity = Foo() - - entity.ensureEntityFields(idGenerator, "handle", FakeTime(currentTime), Ttl.Minutes(1)) - - val expirationTimestamp = entity.serialize().expirationTimestamp - assertThat(expirationTimestamp).isEqualTo(currentTime + 60000) // 1 minute = 60,000 ms. - } - - @Test - fun copy() = runTest { - harness.start() - - val ref1 = createBarReference(Bar(value = "bar1")) - val ref2 = createBarReference(Bar(value = "bar2")) - val ref3 = createBarReference(Bar(value = "bar3")) - val entity = Foo( - bool = true, - num = 123.0, - text = "abc", - ref = ref1, - bt = 47, - shrt = 30000, - nt = 1000000000, - lng = 15000000000L, - chr = 'A', - flt = 43.23f, - dbl = 77.66E200, - bools = setOf(false), - nums = setOf(456.0, 789.0), - texts = setOf("def", "ghi"), - refs = setOf(ref2, ref3), - bts = setOf(23, 34), - shrts = setOf(234, 345), - nts = setOf(234567, 345678), - lngs = setOf(1L, 1234567890123L), - chrs = setOf('A', 'R', 'C', 'S'), - flts = setOf(2.3f, 3.4f), - dbls = setOf(2.3E200, 3.4E100) - ) - - // Copying an unidentified entity should give an exact copy of the entity. - assertThat(entity.copy()).isEqualTo(entity) - - // Copying an identified entity should reset the ID. - entity.identify() - val copy1 = entity.copy() - assertThat(copy1.entityId).isNull() - assertThat(copy1).isNotEqualTo(entity) - - // Copying an entity with replacement fields should overwrite those fields in the copy. - val copy2 = entity.copy( - bool = false, - num = 456.0, - text = "xyz", - ref = ref2, - bt = 25, - shrt = -20000, - nt = -900000000, - lng = -16000000000L, - chr = 'a', - flt = 23.43f, - dbl = 66.77E100, - bools = setOf(true), - nums = setOf(111.0, 222.0), - texts = setOf("aaa", "bbb"), - refs = setOf(ref1, ref3), - bts = setOf(45, 56), - shrts = setOf(456, 567), - nts = setOf(456789, 567890), - lngs = setOf(1L, 2345678901234L), - chrs = setOf('R', 'O', 'C', 'K', 'S'), - flts = setOf(4.5f, 5.6f), - dbls = setOf(4.5E50, 5.6E60) - ) - assertThat(copy2.entityId).isNull() - assertThat(copy2.bool).isFalse() - assertThat(copy2.num).isEqualTo(456.0) - assertThat(copy2.text).isEqualTo("xyz") - assertThat(copy2.ref).isEqualTo(ref2) - assertThat(copy2.bt).isEqualTo(25) - assertThat(copy2.shrt).isEqualTo(-20000) - assertThat(copy2.nt).isEqualTo(-900000000) - assertThat(copy2.lng).isEqualTo(-16000000000L) - assertThat(copy2.chr).isEqualTo('a') - assertThat(copy2.flt).isEqualTo(23.43f) - assertThat(copy2.dbl).isEqualTo(66.77E100) - assertThat(copy2.bools).containsExactly(true) - assertThat(copy2.nums).containsExactly(111.0, 222.0) - assertThat(copy2.texts).containsExactly("aaa", "bbb") - assertThat(copy2.refs).containsExactly(ref1, ref3) - assertThat(copy2.bts).containsExactly(45.toByte(), 56.toByte()) - assertThat(copy2.shrts).containsExactly(456.toShort(), 567.toShort()) - assertThat(copy2.nts).containsExactly(456789, 567890) - assertThat(copy2.lngs).containsExactly(1L, 2345678901234L) - assertThat(copy2.chrs).containsExactly('R', 'O', 'C', 'K', 'S') - assertThat(copy2.flts).containsExactly(4.5f, 5.6f) - assertThat(copy2.dbls).containsExactly(4.5E50, 5.6E60) - } - - @Test - fun serialize_roundTrip() = runTest { - harness.start() - - val ref1 = createBarReference(Bar(value = "bar1")) - val ref2 = createBarReference(Bar(value = "bar2")) - val ref3 = createBarReference(Bar(value = "bar3")) - val entity = Foo( - bool = true, - num = 123.0, - text = "abc", - ref = ref1, - bt = 47, - shrt = 30000, - nt = 1000000000, - lng = 15000000000L, - chr = 'A', - flt = 43.23f, - dbl = 77.66E200, - bools = setOf(false), - nums = setOf(456.0, 789.0), - texts = setOf("def", "ghi"), - refs = setOf(ref2, ref3), - bts = setOf(23, 34), - shrts = setOf(234, 345), - nts = setOf(234567, 345678), - lngs = setOf(1L, 1234567890123L), - chrs = setOf('A', 'R', 'C', 'S'), - flts = setOf(2.3f, 3.4f), - dbls = setOf(2.3E200, 3.4E100) - ) - val entityId = entity.identify() - - val rawEntity = entity.serialize() - - assertThat(rawEntity).isEqualTo( - RawEntity( - entityId, - singletons = mapOf( - "bool" to true.toReferencable(), - "num" to 123.0.toReferencable(), - "text" to "abc".toReferencable(), - "ref" to ref1.toReferencable(), - "bt" to 47.toByte().toReferencable(), - "shrt" to 30000.toShort().toReferencable(), - "nt" to 1000000000.toReferencable(), - "lng" to 15000000000L.toReferencable(), - "chr" to 'A'.toReferencable(), - "flt" to 43.23f.toReferencable(), - "dbl" to 77.66E200.toReferencable() - ), - collections = mapOf( - "bools" to setOf(false.toReferencable()), - "nums" to setOf(456.0.toReferencable(), 789.0.toReferencable()), - "texts" to setOf("def".toReferencable(), "ghi".toReferencable()), - "refs" to setOf(ref2.toReferencable(), ref3.toReferencable()), - "bts" to setOf(23.toByte().toReferencable(), 34.toByte().toReferencable()), - "shrts" to setOf( - 234.toShort().toReferencable(), - 345.toShort().toReferencable() - ), - "nts" to setOf(234567.toReferencable(), 345678.toReferencable()), - "lngs" to setOf(1L.toReferencable(), 1234567890123L.toReferencable()), - "chrs" to setOf( - 'A'.toReferencable(), - 'R'.toReferencable(), - 'C'.toReferencable(), - 'S'.toReferencable() - ), - "flts" to setOf(2.3f.toReferencable(), 3.4f.toReferencable()), - "dbls" to setOf(2.3E200.toReferencable(), 3.4E100.toReferencable()) - ), - creationTimestamp = 500L - ) - ) - assertThat(Foo.deserialize(rawEntity)).isEqualTo(entity) - } - - @Test - fun schemaRegistry() { - // The entity class should have registered itself statically. - val hash = Foo.SCHEMA.hash - assertThat(SchemaRegistry.getSchema(hash)).isEqualTo(Foo.SCHEMA) - } - - /** - * Stores the given [Bar] entity in a collection, and then creates and returns a reference to - * it. - */ - private suspend fun createBarReference(bar: Bar): Reference { - harness.bars.dispatchStore(bar) - return harness.bars.dispatchCreateReference(bar) - } - - /** Generates and returns an ID for the entity. */ - private fun (Foo).identify(): String { - assertThat(entityId).isNull() - ensureEntityFields(idGenerator, "handleName", FakeTime(currentTime)) - assertThat(entityId).isNotNull() - return entityId!! - } -} diff --git a/javatests/arcs/sdk/spec/entity.arcs b/javatests/arcs/sdk/spec/entity.arcs deleted file mode 100644 index d79b7d5b382..00000000000 --- a/javatests/arcs/sdk/spec/entity.arcs +++ /dev/null @@ -1,37 +0,0 @@ -meta - namespace: arcs.sdk.spec - -schema Bar - value: Text - -schema Foo - text: Text - num: Number - bool: Boolean - ref: &Bar - - bt: Byte - shrt: Short - nt: Int - lng: Long - chr: Char - flt: Float - dbl: Double - - texts: [Text] - nums: [Number] - bools: [Boolean] - refs: [&Bar] - bts: [Byte] - shrts: [Short] - nts: [Int] - lngs: [Long] - chrs: [Char] - flts: [Float] - dbls: [Double] - -particle EntitySpecParticle - foo: writes Foo {text, num, bool, ref, bt, shrt, nt, lng, chr, flt, dbl, texts, nums, bools, refs, bts, shrts, nts, lngs, chrs, flts, dbls} - - // Used to obtain references to Bar entities. - bars: reads writes [Bar {value}] From 7f5e9f6dc92113d7e26ca54df978cf4ae7b0c25d Mon Sep 17 00:00:00 2001 From: Cameron Silvestrini Date: Mon, 7 Dec 2020 22:48:15 -0800 Subject: [PATCH 18/31] Fix typo PiperOrigin-RevId: 346255649 --- java/arcs/sdk/storage/BUILD | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java/arcs/sdk/storage/BUILD b/java/arcs/sdk/storage/BUILD index 8b81927ecb0..a73dd1de8be 100644 --- a/java/arcs/sdk/storage/BUILD +++ b/java/arcs/sdk/storage/BUILD @@ -7,7 +7,7 @@ licenses(["notice"]) package(default_visibility = ["//java/arcs:allowed-packages"]) -# arcs_kt_library instead of arcs_kt_library because we have no srcs and arcs_kt_library doesn't +# arcs_java_library instead of arcs_kt_library because we have no srcs and arcs_kt_library doesn't # like that. arcs_java_library( name = "storage", From 2aff69007d9d04db45162a88a06d038449ad15cb Mon Sep 17 00:00:00 2001 From: Maria Mandlis Date: Tue, 8 Dec 2020 11:16:10 -0800 Subject: [PATCH 19/31] Tests for BaseHandle.kt Coverage: 91.49% (inline protected method not covered, but tested indirectly) PiperOrigin-RevId: 346367830 --- java/arcs/core/entity/BaseHandle.kt | 3 +- java/arcs/core/entity/testutil/BUILD | 2 + .../testutil/StorableReferencableEntity.kt | 46 +++ javatests/arcs/core/entity/BaseHandleTest.kt | 271 ++++++++++++++++++ .../arcs/core/entity/CollectionHandleTest.kt | 50 ++-- .../arcs/core/entity/SingletonHandleTest.kt | 101 +++---- 6 files changed, 375 insertions(+), 98 deletions(-) create mode 100644 java/arcs/core/entity/testutil/StorableReferencableEntity.kt create mode 100644 javatests/arcs/core/entity/BaseHandleTest.kt diff --git a/java/arcs/core/entity/BaseHandle.kt b/java/arcs/core/entity/BaseHandle.kt index 0048c0bf65b..b9dd58bf265 100644 --- a/java/arcs/core/entity/BaseHandle.kt +++ b/java/arcs/core/entity/BaseHandle.kt @@ -73,7 +73,8 @@ abstract class BaseHandle(config: BaseHandleConfig) : Handle { * entity is actually stored in the handle before calling this internal method. */ @Suppress("UNCHECKED_CAST") - protected fun createReferenceInternal(entity: E): Reference { + // VisibleForTesting + fun createReferenceInternal(entity: E): Reference { val storageKey = requireNotNull(storageProxy.storageKey as? ReferenceModeStorageKey) { "ReferenceModeStorageKey required in order to create references." } diff --git a/java/arcs/core/entity/testutil/BUILD b/java/arcs/core/entity/testutil/BUILD index 831e9d915a8..1e8150093f6 100644 --- a/java/arcs/core/entity/testutil/BUILD +++ b/java/arcs/core/entity/testutil/BUILD @@ -11,6 +11,7 @@ arcs_kt_jvm_library( exports = [":fixture_arcs_gen"], deps = [ ":fixture_arcs_gen", + "//java/arcs/core/common", "//java/arcs/core/data:annotations", "//java/arcs/core/data:data-kt", "//java/arcs/core/data:rawentity", @@ -19,6 +20,7 @@ arcs_kt_jvm_library( "//java/arcs/core/storage:reference", "//java/arcs/core/storage:storage_key", "//java/arcs/core/storage/testutil", + "//java/arcs/core/util", "//java/arcs/core/util:utils-platform-dependencies", ], ) diff --git a/java/arcs/core/entity/testutil/StorableReferencableEntity.kt b/java/arcs/core/entity/testutil/StorableReferencableEntity.kt new file mode 100644 index 00000000000..a7df797f69d --- /dev/null +++ b/java/arcs/core/entity/testutil/StorableReferencableEntity.kt @@ -0,0 +1,46 @@ +package arcs.core.entity.testutil + +import arcs.core.common.Id +import arcs.core.common.Referencable +import arcs.core.common.ReferenceId +import arcs.core.data.Capability.Ttl +import arcs.core.data.RawEntity +import arcs.core.data.Schema +import arcs.core.data.SchemaFields +import arcs.core.data.SchemaName +import arcs.core.entity.Entity +import arcs.core.entity.EntitySpec +import arcs.core.entity.Storable +import arcs.core.util.Time + +/** + * A fake entity class, that implements both - [Storable] and [Referencable] interfaces and used for + * Handles classes tests. + */ +class StorableReferencableEntity( + override val id: ReferenceId, + override val entityId: String? = null, + override val creationTimestamp: Long = RawEntity.UNINITIALIZED_TIMESTAMP, + override val expirationTimestamp: Long = RawEntity.UNINITIALIZED_TIMESTAMP +) : Entity, Storable, Referencable { + override fun ensureEntityFields( + idGenerator: Id.Generator, + handleName: String, + time: Time, + ttl: Ttl + ) {} + + final override fun reset() {} + + override fun serialize(storeSchema: Schema?) = RawEntity() + + companion object : EntitySpec { + override val SCHEMA = Schema( + setOf(SchemaName("StorableReferencableEntity")), + SchemaFields(emptyMap(), emptyMap()), + "abc123" + ) + + override fun deserialize(data: RawEntity) = StorableReferencableEntity("fake") + } +} diff --git a/javatests/arcs/core/entity/BaseHandleTest.kt b/javatests/arcs/core/entity/BaseHandleTest.kt new file mode 100644 index 00000000000..ddcb0b54ee4 --- /dev/null +++ b/javatests/arcs/core/entity/BaseHandleTest.kt @@ -0,0 +1,271 @@ +package arcs.core.entity + +import arcs.core.crdt.VersionMap +import arcs.core.data.EntityType +import arcs.core.data.HandleMode +import arcs.core.data.RawEntity +import arcs.core.data.SingletonType +import arcs.core.entity.testutil.StorableReferencableEntity +import arcs.core.storage.Dereferencer +import arcs.core.storage.Reference as StorageReference +import arcs.core.storage.StorageProxy.CallbackIdentifier +import arcs.core.storage.keys.RamDiskStorageKey +import arcs.core.storage.referencemode.ReferenceModeStorageKey +import arcs.core.type.Type +import com.google.common.truth.Truth.assertThat +import com.nhaarman.mockitokotlin2.any +import com.nhaarman.mockitokotlin2.eq +import com.nhaarman.mockitokotlin2.mock +import com.nhaarman.mockitokotlin2.never +import com.nhaarman.mockitokotlin2.verify +import com.nhaarman.mockitokotlin2.whenever +import kotlin.test.assertFailsWith +import kotlinx.coroutines.CompletableDeferred +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.test.runBlockingTest +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +private typealias TestStorageAdapter = + StorageAdapter + +private class TestBaseHandle( + config: BaseHandleConfig +) : BaseHandle(config) { + fun callCheckPreconditions() = checkPreconditions {} +} + +@OptIn(ExperimentalCoroutinesApi::class) +@RunWith(JUnit4::class) +class BaseHandleTest { + // TODO(b/175070564): move the mocking methods into testutil. + private fun mockStorageProxy(): SingletonProxy { + val proxyVersionMap = VersionMap() + return mock { + on { getVersionMap() }.then { proxyVersionMap } + on { applyOp(any()) }.then { CompletableDeferred(true) } + on { applyOps(any()) }.then { CompletableDeferred(true) } + on { prepareForSync() }.then { Unit } + on { addOnUpdate(any(), any()) }.then { Unit } + on { addOnResync(any(), any()) }.then { Unit } + on { addOnDesync(any(), any()) }.then { Unit } + } + } + + private fun mockStorageAdapter(): TestStorageAdapter { + return mock { + on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + } + } + + private fun createHandle( + handleName: String = "defaultHandle", + particleName: String = "defaultParticle", + type: Type = SingletonType(EntityType(StorableReferencableEntity.SCHEMA)), + handleMode: HandleMode = HandleMode.ReadWriteQuery, + proxy: SingletonProxy = mockStorageProxy(), + storageAdapter: TestStorageAdapter = mockStorageAdapter(), + dereferencerFactory: EntityDereferencerFactory = mock() + ): TestBaseHandle { + val config = SingletonHandle.Config( + handleName, + HandleSpec( + "handle", + handleMode, + type, + setOf(EntityBaseSpec(StorableReferencableEntity.SCHEMA)) + ), + proxy, + storageAdapter, + dereferencerFactory, + particleName + ) + return TestBaseHandle(config) + } + + private fun EntityDereferencerFactory.mockDereferencer(entity: RawEntity?) { + whenever(injectDereferencers(any(), any())).then { invocation -> + assertThat(invocation.arguments[1]).isInstanceOf(StorageReference::class.java) + if (invocation.arguments[1] is StorageReference) { + (invocation.arguments[1] as StorageReference).dereferencer = + object : Dereferencer { + override suspend fun dereference(reference: StorageReference) = entity + } + } + } + } + + @Test + fun init_readSpec_storageProxyPrepareForSync() { + val proxy = mockStorageProxy() + createHandle(handleMode = HandleMode.Read, proxy = proxy) + verify(proxy).prepareForSync() + } + + @Test + fun init_readWriteSpec_storageProxyPrepareForSync() { + val proxy = mockStorageProxy() + createHandle(handleMode = HandleMode.ReadWrite, proxy = proxy) + verify(proxy).prepareForSync() + } + + @Test + fun init_writeOnlySpec_storageProxyDoesNotPrepareForSync() { + val proxy = mockStorageProxy() + createHandle(handleMode = HandleMode.Write, proxy = proxy) + verify(proxy, never()).prepareForSync() + } + + @Test + fun registerForStorageEvents_callStorageProxyRegisterForStorageEvents() { + val proxy = mockStorageProxy() + val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) + + handle.registerForStorageEvents({}) + + verify(proxy) + .registerForStorageEvents(eq(CallbackIdentifier(HANDLE_NAME, PARTICLE_NAME)), any()) + } + + @Test + fun maybeInitiateSync_callStorageProxyMaybeInitiateSync() { + val proxy = mockStorageProxy() + val handle = createHandle(proxy = proxy) + + handle.maybeInitiateSync() + + verify(proxy).maybeInitiateSync() + } + + @Test + fun getProxy() { + val proxy = mockStorageProxy() + val handle = createHandle(proxy = proxy) + + assertThat(handle.getProxy()).isEqualTo(proxy) + } + + @Test + fun dispatcher_returnsStorageProxyDispatcher() { + val proxy = mockStorageProxy() + val handle = createHandle(proxy = proxy) + val dispatcher: CoroutineDispatcher = mock {} + whenever(proxy.dispatcher).thenReturn(dispatcher) + + assertThat(handle.dispatcher).isEqualTo(dispatcher) + } + + @Test + fun onReady() { + val proxy = mockStorageProxy() + val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) + + handle.onReady({}) + + verify(proxy).addOnReady(eq(CallbackIdentifier(HANDLE_NAME, PARTICLE_NAME)), any()) + } + + @Test + fun checkPreconditions_closed() { + val handle = createHandle(handleName = HANDLE_NAME) + handle.close() + + val e = assertFailsWith { handle.callCheckPreconditions() } + + assertThat(e).hasMessageThat().isEqualTo("Handle $HANDLE_NAME is closed") + } + + @Test + fun checkPreconditions_notClosed() { + val handle = createHandle() + + handle.callCheckPreconditions() + } + + @Test + fun unregisterForStorageEvents() { + val proxy = mockStorageProxy() + val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) + + handle.unregisterForStorageEvents() + + verify(proxy).removeCallbacksForName(eq(CallbackIdentifier(HANDLE_NAME, PARTICLE_NAME))) + } + + @Test + fun close() { + val proxy = mockStorageProxy() + val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) + + handle.close() + + val e = assertFailsWith { handle.callCheckPreconditions() } + assertThat(e).hasMessageThat().isEqualTo("Handle $HANDLE_NAME is closed") + + // ensure callbacks are unregistered. + verify(proxy).removeCallbacksForName(eq(CallbackIdentifier(HANDLE_NAME, PARTICLE_NAME))) + } + + @Test + fun createReferenceInternal_notRefereceModeKey_fails() { + val handle = createHandle() + + val e = assertFailsWith { + handle.createReferenceInternal(StorableReferencableEntity("1", "fake-id")) + } + + assertThat(e).hasMessageThat().isEqualTo( + "ReferenceModeStorageKey required in order to create references." + ) + } + + @Test + fun createReferenceInternal_succeess() { + val proxy = mockStorageProxy() + val handle = createHandle(proxy = proxy) + whenever(proxy.storageKey).thenReturn( + ReferenceModeStorageKey(RamDiskStorageKey("x"), RamDiskStorageKey("y")) + ) + val entity = StorableReferencableEntity("1", "fake-id") + + val reference = handle.createReferenceInternal(entity) + assertThat(reference.entityId).isEqualTo(entity.entityId) + } + + @Test + fun createForeignReference_noDereferencer_throws() = runBlockingTest { + val handle = createHandle() + val e = assertFailsWith { + handle.createForeignReference(StorableReferencableEntity, "1") + } + assertThat(e).hasMessageThat().isEqualTo("No dereferencer installed on Reference object") + } + + @Test + fun createForeignReference_nullDereference_returnsNull() = runBlockingTest { + val dereferencerFactory = mock() + dereferencerFactory.mockDereferencer(null) + val handle = createHandle(dereferencerFactory = dereferencerFactory) + + assertThat(handle.createForeignReference(StorableReferencableEntity, "1")).isNull() + } + + @Test + fun createForeignReference_dereferenceEntity() = runBlockingTest { + val dereferencerFactory = mock() + dereferencerFactory.mockDereferencer(RawEntity("entity1")) + val handle = createHandle(dereferencerFactory = dereferencerFactory) + + val reference = handle.createForeignReference(StorableReferencableEntity, "the-entity-id")!! + + assertThat(reference.entityId).isEqualTo("the-entity-id") + } + + companion object { + private const val HANDLE_NAME = "myHandle" + private const val PARTICLE_NAME = "myParticle" + } +} diff --git a/javatests/arcs/core/entity/CollectionHandleTest.kt b/javatests/arcs/core/entity/CollectionHandleTest.kt index 1f99846923b..f81150fc50d 100644 --- a/javatests/arcs/core/entity/CollectionHandleTest.kt +++ b/javatests/arcs/core/entity/CollectionHandleTest.kt @@ -1,15 +1,11 @@ package arcs.core.entity -import arcs.core.common.Referencable -import arcs.core.common.ReferenceId import arcs.core.crdt.CrdtSet.Operation import arcs.core.crdt.VersionMap import arcs.core.data.CollectionType import arcs.core.data.EntityType import arcs.core.data.HandleMode -import arcs.core.data.Schema -import arcs.core.data.SchemaFields -import arcs.core.data.SchemaName +import arcs.core.entity.testutil.StorableReferencableEntity import com.nhaarman.mockitokotlin2.any import com.nhaarman.mockitokotlin2.eq import com.nhaarman.mockitokotlin2.mock @@ -28,9 +24,11 @@ import org.junit.runners.JUnit4 class CollectionHandleTest { private lateinit var proxyVersionMap: VersionMap private lateinit var dereferencerFactory: EntityDereferencerFactory - private lateinit var proxy: CollectionProxy - private lateinit var storageAdapter: StorageAdapter - private lateinit var handle: CollectionHandle + private lateinit var proxy: CollectionProxy + private lateinit var storageAdapter: + StorageAdapter + private lateinit var handle: + CollectionHandle @Before fun setUp() { @@ -42,8 +40,8 @@ class CollectionHandleTest { on { prepareForSync() }.then { Unit } } storageAdapter = mock { - on { referencableToStorable(any()) }.then { it.arguments[0] as FakeEntity } - on { storableToReferencable(any()) }.then { it.arguments[0] as FakeEntity } + on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } } dereferencerFactory = mock { // Maybe add mock endpoints here, if needed. @@ -54,7 +52,7 @@ class CollectionHandleTest { HandleSpec( "handle", HandleMode.ReadWriteQuery, - CollectionType(EntityType(FakeEntity.SCHEMA)), + CollectionType(EntityType(StorableReferencableEntity.SCHEMA)), emptySet() ), proxy, @@ -67,12 +65,12 @@ class CollectionHandleTest { @Test fun storeAll() = runBlockingTest { - val items = listOf( - FakeEntity("1"), - FakeEntity("2"), - FakeEntity("3"), - FakeEntity("4") - ) + val entity1 = StorableReferencableEntity("1") + val entity2 = StorableReferencableEntity("2") + val entity3 = StorableReferencableEntity("3") + val entity4 = StorableReferencableEntity("4") + + val items = listOf(entity1, entity2, entity3, entity4) val result = handle.storeAll(items) result.join() @@ -83,38 +81,28 @@ class CollectionHandleTest { Operation.Add( HANDLE_NAME, VersionMap().also { it[HANDLE_NAME] = 1 }, - FakeEntity("1") + entity1 ), Operation.Add( HANDLE_NAME, VersionMap().also { it[HANDLE_NAME] = 2 }, - FakeEntity("2") + entity2 ), Operation.Add( HANDLE_NAME, VersionMap().also { it[HANDLE_NAME] = 3 }, - FakeEntity("3") + entity3 ), Operation.Add( HANDLE_NAME, VersionMap().also { it[HANDLE_NAME] = 4 }, - FakeEntity("4") + entity4 ) ) ) ) } - private data class FakeEntity(override val id: ReferenceId) : Storable, Referencable { - companion object { - val SCHEMA = Schema( - setOf(SchemaName("FakeEntity")), - SchemaFields(emptyMap(), emptyMap()), - "abc123" - ) - } - } - companion object { private const val HANDLE_NAME = "myHandle" } diff --git a/javatests/arcs/core/entity/SingletonHandleTest.kt b/javatests/arcs/core/entity/SingletonHandleTest.kt index d57c3f0d418..64fff8b1226 100644 --- a/javatests/arcs/core/entity/SingletonHandleTest.kt +++ b/javatests/arcs/core/entity/SingletonHandleTest.kt @@ -1,24 +1,16 @@ package arcs.core.entity -import arcs.core.common.Id -import arcs.core.common.Referencable -import arcs.core.common.ReferenceId import arcs.core.crdt.CrdtSingleton.Operation import arcs.core.crdt.VersionMap -import arcs.core.data.Capability.Ttl import arcs.core.data.CollectionType import arcs.core.data.EntityType import arcs.core.data.HandleMode -import arcs.core.data.RawEntity -import arcs.core.data.Schema -import arcs.core.data.SchemaFields -import arcs.core.data.SchemaName import arcs.core.data.SingletonType +import arcs.core.entity.testutil.StorableReferencableEntity import arcs.core.storage.StorageProxy.CallbackIdentifier import arcs.core.storage.keys.RamDiskStorageKey import arcs.core.storage.referencemode.ReferenceModeStorageKey import arcs.core.type.Type -import arcs.core.util.Time import com.google.common.truth.Truth.assertThat import com.nhaarman.mockitokotlin2.any import com.nhaarman.mockitokotlin2.argumentCaptor @@ -38,12 +30,13 @@ import org.junit.runners.JUnit4 @Suppress("DeferredResultUnused") @RunWith(JUnit4::class) class SingletonHandleTest { - private lateinit var proxy: SingletonProxy - private lateinit var storageAdapter: StorageAdapter + private lateinit var proxy: SingletonProxy + private lateinit var storageAdapter: + StorageAdapter private fun createHandle( - type: Type = SingletonType(EntityType(FakeEntity.SCHEMA)) - ): SingletonHandle { + type: Type = SingletonType(EntityType(StorableReferencableEntity.SCHEMA)) + ): SingletonHandle { val proxyVersionMap = VersionMap() proxy = mock { on { getVersionMap() }.then { proxyVersionMap } @@ -55,8 +48,8 @@ class SingletonHandleTest { on { addOnDesync(any(), any()) }.then { Unit } } storageAdapter = mock { - on { referencableToStorable(any()) }.then { it.arguments[0] as FakeEntity } - on { storableToReferencable(any()) }.then { it.arguments[0] as FakeEntity } + on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } } val dereferencerFactory: EntityDereferencerFactory = mock { // Maybe add mock endpoints here, if needed. @@ -68,7 +61,7 @@ class SingletonHandleTest { "handle", HandleMode.ReadWriteQuery, type, - setOf(EntityBaseSpec(FakeEntity.SCHEMA)) + setOf(EntityBaseSpec(StorableReferencableEntity.SCHEMA)) ), proxy, storageAdapter, @@ -82,7 +75,7 @@ class SingletonHandleTest { @Test fun init_wrongContainerType_throwsException() { assertFailsWith { - createHandle(type = CollectionType(EntityType(FakeEntity.SCHEMA))) + createHandle(type = CollectionType(EntityType(StorableReferencableEntity.SCHEMA))) } } @@ -97,14 +90,15 @@ class SingletonHandleTest { @Test fun onUpdate_callbackInput_singletonDelta() { val handle = createHandle() - val oldEntity = FakeEntity("1", "old") - val newEntity = FakeEntity("2", "new") - val captor = argumentCaptor<(FakeEntity?, FakeEntity?) -> Unit>() + val oldEntity = StorableReferencableEntity("1", "old") + val newEntity = StorableReferencableEntity("2", "new") + val captor = + argumentCaptor<(StorableReferencableEntity?, StorableReferencableEntity?) -> Unit>() whenever(proxy.addOnUpdate(any(), captor.capture())).then { captor.firstValue(oldEntity, newEntity) } - var singletonDelta: SingletonDelta? = null + var singletonDelta: SingletonDelta? = null handle.onUpdate({ delta -> singletonDelta = delta }) assertThat(singletonDelta!!.old!!.entityId).isEqualTo(oldEntity.entityId) @@ -114,9 +108,10 @@ class SingletonHandleTest { @Test fun onUpdate_valuesAreAdapted() { val handle = createHandle() - val oldEntity = FakeEntity("1", "old") - val newEntity = FakeEntity("2", "new") - val captor = argumentCaptor<(FakeEntity?, FakeEntity?) -> Unit>() + val oldEntity = StorableReferencableEntity("1", "old") + val newEntity = StorableReferencableEntity("2", "new") + val captor = + argumentCaptor<(StorableReferencableEntity?, StorableReferencableEntity?) -> Unit>() whenever(proxy.addOnUpdate(any(), captor.capture())).then { captor.firstValue(oldEntity, newEntity) } @@ -149,7 +144,7 @@ class SingletonHandleTest { @Test fun createReference_noEntityId_throws() = runBlockingTest { - val entity = FakeEntity("1") + val entity = StorableReferencableEntity("1") val handle = createHandle() val e = assertFailsWith { handle.createReference(entity) } @@ -159,7 +154,7 @@ class SingletonHandleTest { @Test fun createReference_notStored_throws() = runBlockingTest { - val entity = FakeEntity("1", "fake-id") + val entity = StorableReferencableEntity("1", "fake-id") val handle = createHandle() val e = assertFailsWith { handle.createReference(entity) } @@ -170,10 +165,10 @@ class SingletonHandleTest { @Test fun createReference_wrongId_throws() = runBlockingTest { val handle = createHandle() - whenever(proxy.getParticleViewUnsafe()).thenReturn(FakeEntity("1", "other-id")) + whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "other-id")) val e = assertFailsWith { - handle.createReference(FakeEntity("1", "fake-id")) + handle.createReference(StorableReferencableEntity("1", "fake-id")) } assertThat(e).hasMessageThat().isEqualTo("Cannot createReference for unmatching entity id.") } @@ -181,10 +176,10 @@ class SingletonHandleTest { @Test fun createReference_notReferenceModeStorageProxy_throws() = runBlockingTest { val handle = createHandle() - whenever(proxy.getParticleViewUnsafe()).thenReturn(FakeEntity("1", "fake-id")) + whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "fake-id")) val e = assertFailsWith { - handle.createReference(FakeEntity("2", "fake-id")) + handle.createReference(StorableReferencableEntity("2", "fake-id")) } assertThat(e).hasMessageThat().isEqualTo( "ReferenceModeStorageKey required in order to create references." @@ -194,12 +189,12 @@ class SingletonHandleTest { @Test fun createReference_success() = runBlockingTest { val handle = createHandle() - whenever(proxy.getParticleViewUnsafe()).thenReturn(FakeEntity("1", "fake-id")) + whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "fake-id")) whenever(proxy.storageKey).thenReturn( ReferenceModeStorageKey(RamDiskStorageKey("x"), RamDiskStorageKey("y")) ) - val entity = FakeEntity("2", "fake-id") + val entity = StorableReferencableEntity("2", "fake-id") val reference = handle.createReference(entity) assertThat(reference.entityId).isEqualTo(entity.entityId) @@ -214,7 +209,7 @@ class SingletonHandleTest { @Test fun fetch_initValues_success() { - val entity = FakeEntity("1", "id") + val entity = StorableReferencableEntity("1", "id") val handle = createHandle() whenever(proxy.getParticleViewUnsafe()).thenReturn(entity) @@ -224,7 +219,7 @@ class SingletonHandleTest { @Test fun fetch_valueViaStorageAdapter_adapted() { - val entity = FakeEntity("1", "id") + val entity = StorableReferencableEntity("1", "id") val handle = createHandle() whenever(proxy.getParticleViewUnsafe()).thenReturn(entity) @@ -244,7 +239,7 @@ class SingletonHandleTest { @Test fun fetch_expiredEntities_filteredOut() { val handle = createHandle() - whenever(proxy.getParticleViewUnsafe()).thenReturn(FakeEntity("1", "id")) + whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "id")) whenever(storageAdapter.isExpired(any())).thenReturn(true) assertThat(handle.fetch()).isNull() @@ -254,7 +249,7 @@ class SingletonHandleTest { @Test fun store_validEntity_success() = runBlockingTest { - val entity = FakeEntity("1") + val entity = StorableReferencableEntity("1") val handle = createHandle() handle.store(entity).join() @@ -269,19 +264,19 @@ class SingletonHandleTest { val handle = createHandle() handle.close() - assertFailsWith { handle.store(FakeEntity("1")) } + assertFailsWith { handle.store(StorableReferencableEntity("1")) } } @Test fun store_incrementVersionMap() = runBlockingTest { - val entity1 = FakeEntity("1") + val entity1 = StorableReferencableEntity("1") val handle = createHandle() handle.store(entity1).join() verify(proxy).applyOp( eq(Operation.Update(HANDLE_NAME, VersionMap(HANDLE_NAME to 1), entity1)) ) - val entity2 = FakeEntity("2") + val entity2 = StorableReferencableEntity("2") handle.store(entity2).join() verify(proxy).applyOp( @@ -291,7 +286,7 @@ class SingletonHandleTest { @Test fun clear_handleWithValue_success() { - val entity = FakeEntity("1") + val entity = StorableReferencableEntity("1") val handle = createHandle() handle.store(entity) val versionMap = VersionMap(HANDLE_NAME to 1) @@ -319,32 +314,6 @@ class SingletonHandleTest { assertFailsWith { handle.clear() } } - private class FakeEntity( - override val id: ReferenceId, - override val entityId: String? = null, - override val creationTimestamp: Long = RawEntity.UNINITIALIZED_TIMESTAMP, - override val expirationTimestamp: Long = RawEntity.UNINITIALIZED_TIMESTAMP - ) : Entity, Storable, Referencable { - override fun ensureEntityFields( - idGenerator: Id.Generator, - handleName: String, - time: Time, - ttl: Ttl - ) {} - - final override fun reset() {} - - override fun serialize(storeSchema: Schema?) = RawEntity() - - companion object { - val SCHEMA = Schema( - setOf(SchemaName("FakeEntity")), - SchemaFields(emptyMap(), emptyMap()), - "abc123" - ) - } - } - companion object { private const val HANDLE_NAME = "myHandle" private const val PARTICLE_NAME = "myParticle" From bdfdce551e3fb9543b619351546b193970d7ec8c Mon Sep 17 00:00:00 2001 From: Arcs C3PO Date: Tue, 8 Dec 2020 12:37:59 -0800 Subject: [PATCH 20/31] Add missing strict deps for type aliases. These deps will be required soon due to a change in strict deps handling related to type aliases. Please do not remove these dependencies before b/169293220 is fixed, or risk breaking your build. Tested: TAP --sample ran all affected tests and none failed http://test/OCL:346364948:BASE:346351027:1607454378105:b69dba0a PiperOrigin-RevId: 346386927 --- java/arcs/core/storage/driver/volatiles/BUILD | 1 + 1 file changed, 1 insertion(+) diff --git a/java/arcs/core/storage/driver/volatiles/BUILD b/java/arcs/core/storage/driver/volatiles/BUILD index a7efc76ea8e..93b620b79ce 100644 --- a/java/arcs/core/storage/driver/volatiles/BUILD +++ b/java/arcs/core/storage/driver/volatiles/BUILD @@ -36,6 +36,7 @@ arcs_kt_library( ], deps = [ ":volatiles", + "//java/arcs/core/storage:driver", "//java/arcs/core/storage:storage_key", "//java/arcs/core/storage/keys", "//java/arcs/core/util", From b4bb177421646b252fa42d3acb4d7aa660f63dda Mon Sep 17 00:00:00 2001 From: Filippo Galgani Date: Tue, 8 Dec 2020 13:59:18 -0800 Subject: [PATCH 21/31] rename ReferenceSpecTest.kt to javatests/arcs/sdk/GeneratedReferenceTest.kt with a comment saying it is a test for the generated references PiperOrigin-RevId: 346404540 --- javatests/arcs/sdk/BUILD | 6 ++-- ...eSpecTest.kt => GeneratedReferenceTest.kt} | 7 ++-- javatests/arcs/sdk/{spec => }/reference.arcs | 2 +- javatests/arcs/sdk/spec/BUILD | 35 ------------------- 4 files changed, 7 insertions(+), 43 deletions(-) rename javatests/arcs/sdk/{spec/ReferenceSpecTest.kt => GeneratedReferenceTest.kt} (97%) rename javatests/arcs/sdk/{spec => }/reference.arcs (94%) delete mode 100644 javatests/arcs/sdk/spec/BUILD diff --git a/javatests/arcs/sdk/BUILD b/javatests/arcs/sdk/BUILD index 89be50348b6..30b558b1a14 100644 --- a/javatests/arcs/sdk/BUILD +++ b/javatests/arcs/sdk/BUILD @@ -16,6 +16,7 @@ arcs_kt_jvm_test_suite( package = "arcs.sdk", deps = [ ":schemas", + ":schemas_test_harness", "//java/arcs/core/common", "//java/arcs/core/data", "//java/arcs/core/data:schema_fields", @@ -44,9 +45,8 @@ arcs_kt_jvm_test_suite( arcs_kt_schema( name = "schemas", - srcs = [ - "person.arcs", - ], + srcs = glob(["*.arcs"]), + test_harness = True, ) arcs_kt_plan( diff --git a/javatests/arcs/sdk/spec/ReferenceSpecTest.kt b/javatests/arcs/sdk/GeneratedReferenceTest.kt similarity index 97% rename from javatests/arcs/sdk/spec/ReferenceSpecTest.kt rename to javatests/arcs/sdk/GeneratedReferenceTest.kt index 9a5466d3e2b..bebb430f798 100644 --- a/javatests/arcs/sdk/spec/ReferenceSpecTest.kt +++ b/javatests/arcs/sdk/GeneratedReferenceTest.kt @@ -1,11 +1,10 @@ -package arcs.sdk.spec +package arcs.sdk import arcs.core.testutil.handles.dispatchCreateReference import arcs.core.testutil.handles.dispatchFetch import arcs.core.testutil.handles.dispatchFetchAll import arcs.core.testutil.handles.dispatchStore import arcs.core.util.testutil.LogRule -import arcs.sdk.Reference import com.google.common.truth.Truth.assertThat import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.runBlocking @@ -18,10 +17,10 @@ import org.junit.runners.JUnit4 private typealias Child = ReferenceSpecParticle_SingletonChild private typealias Parent = ReferenceSpecParticle_Parents -/** Specification tests for [Reference]. */ +/** Tests the [Reference] generated classes, both as handle types as well as field types. */ @OptIn(ExperimentalCoroutinesApi::class) @RunWith(JUnit4::class) -class ReferenceSpecTest { +class GeneratedReferenceTest { class ReferenceSpecParticle : AbstractReferenceSpecParticle() @get:Rule diff --git a/javatests/arcs/sdk/spec/reference.arcs b/javatests/arcs/sdk/reference.arcs similarity index 94% rename from javatests/arcs/sdk/spec/reference.arcs rename to javatests/arcs/sdk/reference.arcs index a868e0c7b0a..923c55f5059 100644 --- a/javatests/arcs/sdk/spec/reference.arcs +++ b/javatests/arcs/sdk/reference.arcs @@ -1,5 +1,5 @@ meta - namespace: arcs.sdk.spec + namespace: arcs.sdk schema Child age: Number diff --git a/javatests/arcs/sdk/spec/BUILD b/javatests/arcs/sdk/spec/BUILD deleted file mode 100644 index 888e897fcdf..00000000000 --- a/javatests/arcs/sdk/spec/BUILD +++ /dev/null @@ -1,35 +0,0 @@ -load( - "//third_party/java/arcs/build_defs:build_defs.bzl", - "arcs_kt_jvm_test_suite", - "arcs_kt_schema", -) - -licenses(["notice"]) - -TEST_SRCS = glob(["*Test.kt"]) - -arcs_kt_jvm_test_suite( - name = "spec", - srcs = TEST_SRCS, - package = "arcs.sdk.spec", - deps = [ - ":schemas", - ":schemas_test_harness", - "//java/arcs/core/common", - "//java/arcs/core/testutil", - "//java/arcs/core/testutil/handles", - "//java/arcs/core/util/testutil", - "//java/arcs/jvm/util/testutil", - "//java/arcs/sdk", - "//third_party/java/junit:junit-android", - "//third_party/java/truth:truth-android", - "//third_party/kotlin/kotlinx_coroutines", - "//third_party/kotlin/kotlinx_coroutines:kotlinx_coroutines_test", - ], -) - -arcs_kt_schema( - name = "schemas", - srcs = glob(["*.arcs"]), - test_harness = True, -) From 7ebf1f09d114d028d806bcf17a654fac2ad2d484 Mon Sep 17 00:00:00 2001 From: Maria Mandlis Date: Tue, 8 Dec 2020 14:28:49 -0800 Subject: [PATCH 22/31] add support for inline schema field inside ordered list in fromLiteral (#6734) Closes #6734 COPYBARA_INTEGRATE_REVIEW=https://github.com/polymerlabs/arcs/pull/6734 from mmandlis:schemafields-inline a00ce683b04681a4c4ae7cb05dd0583ddb623e33 PiperOrigin-RevId: 346411248 --- src/types/internal/schema-from-literal.ts | 1 + src/types/tests/schema-field-test.ts | 94 +++++++++++++++++++++++ 2 files changed, 95 insertions(+) create mode 100644 src/types/tests/schema-field-test.ts diff --git a/src/types/internal/schema-from-literal.ts b/src/types/internal/schema-from-literal.ts index b058d51623e..f42bfffb40d 100644 --- a/src/types/internal/schema-from-literal.ts +++ b/src/types/internal/schema-from-literal.ts @@ -35,6 +35,7 @@ function fieldFromLiteral(field): FieldType { case SchemaFieldKind.Reference: return FieldType.create({...field, kind, schema: {kind: field.schema.kind, model: Type.fromLiteral(field.schema.model)}}); case SchemaFieldKind.Collection: + case SchemaFieldKind.OrderedList: return FieldType.create({...field, kind, schema: fieldFromLiteral(field.schema)}); case SchemaFieldKind.Inline: return FieldType.create({...field, kind, model: EntityType.fromLiteral(field.model)}); diff --git a/src/types/tests/schema-field-test.ts b/src/types/tests/schema-field-test.ts new file mode 100644 index 00000000000..f69b2811b4e --- /dev/null +++ b/src/types/tests/schema-field-test.ts @@ -0,0 +1,94 @@ +/** + * @license + * Copyright (c) 2020 Google Inc. All rights reserved. + * This code may only be used under the BSD style license found at + * http://polymer.github.io/LICENSE.txt + * Code distributed by Google as part of this project is also + * subject to an additional IP rights grant found at + * http://polymer.github.io/PATENTS.txt + */ + +import {assert} from '../../platform/chai-web.js'; +import {Manifest} from '../../runtime/manifest.js'; +import {Type} from '../lib-types.js'; + +describe('schema field', () => { + it('serializes type with inline ordered list fields', async () => { + const manifest = await Manifest.parse(` + schema MoreInline + textsField: [Text] + schema InnerEntity + moreInlinesField: [inline MoreInline] + schema Entity + referenceListField: List<&InnerEntity> + particle Writer in '.Writer' + output: writes Entity + recipe ReadWriteRecipe + data: create + Writer + output: data + `); + const type = manifest.recipes[0].particles[0].spec.connections[0].type; + assert.deepEqual(type.toLiteral(), Type.fromLiteral(type.toLiteral()).toLiteral()); + }); + + it('serializes type with many fields', async () => { + const manifest = await Manifest.parse(` + schema MoreInline + textsField: [Text] + + schema InnerEntity + textField: Text + longField: Long + numberField: Number + moreInlineField: inline MoreInline + moreInlinesField: [inline MoreInline] + + schema Entity + textField: Text + numField: Number + boolField: Boolean + byteField: Byte + shortField: Short + intField: Int + longField: Long + charField: Char + floatField: Float + doubleField: Double + instantField: Instant + bigintField: BigInt + textsField: [Text] + numsField: [Number] + boolsField: [Boolean] + bytesField: [Byte] + shortsField: [Short] + intsField: [Int] + longsField: [Long] + charsField: [Char] + floatsField: [Float] + doublesField: [Double] + instantsField: [Instant] + bigintsField: [BigInt] + textListField: List + numListField: List + boolListField: List + + inlineEntityField: inline InnerEntity + inlinesField: [inline InnerEntity] + inlineListField: List + + referenceField: &InnerEntity + referencesField: [&InnerEntity] + referenceListField: List<&InnerEntity> + + particle Writer in '.Writer' + output: writes Entity + recipe ReadWriteRecipe + data: create + Writer + output: data + `); + const type = manifest.recipes[0].particles[0].spec.connections[0].type; + assert.deepEqual(type.toLiteral(), Type.fromLiteral(type.toLiteral()).toLiteral()); + }); +}); From b81fbd238eef68883bc7bdd00d3521214a961cae Mon Sep 17 00:00:00 2001 From: Yuan Gu Date: Tue, 8 Dec 2020 15:01:08 -0800 Subject: [PATCH 23/31] Rename EntityHandleManager to HandleManagerImpl to reflect the actual relationship between them. #codehealth PiperOrigin-RevId: 346417894 --- java/arcs/android/demo/DemoActivity.kt | 4 +- java/arcs/core/allocator/Allocator.kt | 8 +- .../allocator/CollectionHandlePartitionMap.kt | 13 +- java/arcs/core/host/AbstractArcHost.kt | 6 +- ...yHandleManager.kt => HandleManagerImpl.kt} | 10 +- java/arcs/sdk/testing/BaseTestHarness.kt | 4 +- .../e2e/testapp/StorageAccessService.kt | 4 +- .../arcs/android/e2e/testapp/TestActivity.kt | 10 +- .../entity/DifferentHandleManagerTest.kt | 6 +- .../arcs/android/entity/HardReferenceTest.kt | 8 +- .../android/entity/SameHandleManagerTest.kt | 6 +- .../arcs/android/entity/TtlHandleTest.kt | 10 +- ...est.kt => AndroidHandleManagerImplTest.kt} | 40 ++-- ...tabaseGarbageCollectionPeriodicTaskTest.kt | 4 +- .../service/BulkDeletesIntegrationTest.kt | 4 +- .../service/StorageServiceManagerTest.kt | 6 +- .../storage/ttl/PeriodicCleanupTaskTest.kt | 4 +- .../systemhealth/testapp/StorageCore.kt | 16 +- .../systemhealth/testapp/TestActivity.kt | 10 +- .../arcs/core/allocator/AllocatorTestBase.kt | 6 +- ...fferentHandleManagerDifferentStoresTest.kt | 6 +- .../core/entity/DifferentHandleManagerTest.kt | 6 +- .../core/entity/HandleManagerCloseTest.kt | 8 +- .../arcs/core/entity/HandleManagerTestBase.kt | 190 ++++++++++-------- javatests/arcs/core/entity/ReferenceTest.kt | 10 +- .../arcs/core/entity/SameHandleManagerTest.kt | 6 +- javatests/arcs/core/host/HandleAdapterTest.kt | 30 +-- javatests/arcs/core/host/LifecycleTest.kt | 8 +- .../ReflectiveParticleConstructionTest.kt | 2 +- javatests/arcs/sdk/HandleUtilsTest.kt | 12 +- 30 files changed, 234 insertions(+), 223 deletions(-) rename java/arcs/core/host/{EntityHandleManager.kt => HandleManagerImpl.kt} (96%) rename javatests/arcs/android/host/{AndroidEntityHandleManagerTest.kt => AndroidHandleManagerImplTest.kt} (94%) diff --git a/java/arcs/android/demo/DemoActivity.kt b/java/arcs/android/demo/DemoActivity.kt index c1522b9120e..1a1e980e0ea 100644 --- a/java/arcs/android/demo/DemoActivity.kt +++ b/java/arcs/android/demo/DemoActivity.kt @@ -19,7 +19,7 @@ import androidx.appcompat.app.AppCompatActivity import arcs.android.labs.host.AndroidManifestHostRegistry import arcs.core.allocator.Allocator import arcs.core.entity.ForeignReferenceCheckerImpl -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.HostRegistry import arcs.core.host.SimpleSchedulerProvider import arcs.jvm.util.JvmTime @@ -62,7 +62,7 @@ class DemoActivity : AppCompatActivity() { allocator = Allocator.create( hostRegistry, - EntityHandleManager( + HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("personArc"), storageEndpointManager = storageEndpointManager, diff --git a/java/arcs/core/allocator/Allocator.kt b/java/arcs/core/allocator/Allocator.kt index 3d68f96767d..70d9761f4af 100644 --- a/java/arcs/core/allocator/Allocator.kt +++ b/java/arcs/core/allocator/Allocator.kt @@ -19,7 +19,7 @@ import arcs.core.entity.HandleSpec import arcs.core.host.ArcHost import arcs.core.host.ArcHostException import arcs.core.host.ArcHostNotFoundException -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.HostRegistry import arcs.core.host.ParticleNotFoundException import arcs.core.storage.CapabilitiesResolver @@ -179,16 +179,16 @@ class Allocator( companion object { /** * Creates an [Allocator] which serializes Arc/Particle state to the storage system backing - * the provided [handleManager]. + * the provided [handleManagerImpl]. */ fun create( hostRegistry: HostRegistry, - handleManager: EntityHandleManager, + handleManagerImpl: HandleManagerImpl, scope: CoroutineScope ): Allocator { return Allocator( hostRegistry, - CollectionHandlePartitionMap(handleManager), + CollectionHandlePartitionMap(handleManagerImpl), scope ) } diff --git a/java/arcs/core/allocator/CollectionHandlePartitionMap.kt b/java/arcs/core/allocator/CollectionHandlePartitionMap.kt index 84dddb6e2c5..60b0b60bc8c 100644 --- a/java/arcs/core/allocator/CollectionHandlePartitionMap.kt +++ b/java/arcs/core/allocator/CollectionHandlePartitionMap.kt @@ -16,7 +16,7 @@ import arcs.core.entity.EntityBaseSpec import arcs.core.entity.HandleSpec import arcs.core.entity.ReadWriteCollectionHandle import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.keys.RamDiskStorageKey import arcs.core.storage.referencemode.ReferenceModeStorageKey import arcs.core.util.TaggedLog @@ -26,12 +26,12 @@ import kotlinx.coroutines.withContext /** * An implementation of [Allocator.PartitionSerialization] that stores partition information in an Arcs - * collection handle, created by the [EntityHandleManager] provided at construction. The handle + * collection handle, created by the [HandleManagerImpl] provided at construction. The handle * will be created the first time any of the publicly exposed methods is called. */ @OptIn(ExperimentalCoroutinesApi::class) class CollectionHandlePartitionMap( - private val handleManager: EntityHandleManager + private val handleManagerImpl: HandleManagerImpl ) : Allocator.PartitionSerialization { private val log = TaggedLog { "CollectionHandlePartitionMap" } @@ -39,7 +39,7 @@ class CollectionHandlePartitionMap( @Suppress("UNCHECKED_CAST") private val collection = SuspendableLazy { val entitySpec = EntityBaseSpec(SCHEMA) - (handleManager.createHandle( + val handle = handleManagerImpl.createHandle( HandleSpec( "partitions", HandleMode.ReadWrite, @@ -47,9 +47,8 @@ class CollectionHandlePartitionMap( entitySpec ), STORAGE_KEY - ) as ReadWriteCollectionHandle).also { - it.awaitReady() - } + ) as ReadWriteCollectionHandle + handle.awaitReady() } /** Persists [ArcId] and associated [Plan.Partition]s */ diff --git a/java/arcs/core/host/AbstractArcHost.kt b/java/arcs/core/host/AbstractArcHost.kt index 5c67e1c465f..c3c0c151569 100644 --- a/java/arcs/core/host/AbstractArcHost.kt +++ b/java/arcs/core/host/AbstractArcHost.kt @@ -387,7 +387,7 @@ abstract class AbstractArcHost( * * Subclasses may override this to retrieve the [ArcHostContext] using a different implementation. * - * @property arcHostContext a prototype for the final arcHost containing [EntityHandleManager] + * @property arcHostContext a prototype for the final arcHost containing [HandleManagerImpl] */ protected open suspend fun readContextFromStorage( arcHostContext: ArcHostContext @@ -708,9 +708,9 @@ abstract class AbstractArcHost( registeredParticles().contains(ParticleIdentifier.from(particle.location)) /** - * Return an instance of [EntityHandleManager] to be used to create [Handle]s. + * Return an instance of [HandleManagerImpl] to be used to create [Handle]s. */ - open fun entityHandleManager(arcId: String): HandleManager = EntityHandleManager( + open fun entityHandleManager(arcId: String): HandleManager = HandleManagerImpl( arcId = arcId, hostId = hostId, time = platformTime, diff --git a/java/arcs/core/host/EntityHandleManager.kt b/java/arcs/core/host/HandleManagerImpl.kt similarity index 96% rename from java/arcs/core/host/EntityHandleManager.kt rename to java/arcs/core/host/HandleManagerImpl.kt index bfaf092a2ab..7ce8246222f 100644 --- a/java/arcs/core/host/EntityHandleManager.kt +++ b/java/arcs/core/host/HandleManagerImpl.kt @@ -64,14 +64,14 @@ import kotlinx.coroutines.sync.withLock * `arcs_kt_schema` on a manifest file to generate a `{ParticleName}Handles' class, and * invoke its default constructor, or obtain it from the [BaseParticle.handles] field. * - * The [scheduler] provided to the [EntityHandleManager] at construction-time will be shared across - * all handles and storage-proxies created by the [EntityHandleManager]. + * The [scheduler] provided to the [HandleManagerImpl] at construction-time will be shared across + * all handles and storage-proxies created by the [HandleManagerImpl]. * * Call [close] on an instance that will no longer be used to ensure that all [StorageProxy] - * instances created by this [EntityHandleManager] will also be closed. + * instances created by this [HandleManagerImpl] will also be closed. */ @OptIn(ExperimentalCoroutinesApi::class) -class EntityHandleManager( +class HandleManagerImpl( private val arcId: String = Id.Generator.newSession().newArcId("arc").toString(), private val hostId: String = "nohost", private val time: Time, @@ -168,7 +168,7 @@ class EntityHandleManager( HandleContainerType.Collection -> createCollectionHandle(config) } - /** Close all [StorageProxy] instances in this [EntityHandleManager]. */ + /** Close all [StorageProxy] instances in this [HandleManagerImpl]. */ override suspend fun close() { proxyMutex.withLock { // Needed to avoid receiving ModelUpdate after Proxy closed error diff --git a/java/arcs/sdk/testing/BaseTestHarness.kt b/java/arcs/sdk/testing/BaseTestHarness.kt index 7051854c589..fca7845faf5 100644 --- a/java/arcs/sdk/testing/BaseTestHarness.kt +++ b/java/arcs/sdk/testing/BaseTestHarness.kt @@ -5,7 +5,7 @@ import arcs.core.data.Plan import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.HandleDataType import arcs.core.entity.HandleSpec -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.ParticleContext import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.api.DriverAndKeyConfigurator @@ -123,7 +123,7 @@ open class BaseTestHarness

( val schedulerProvider = SimpleSchedulerProvider(Dispatchers.Default) scheduler = schedulerProvider("testArc_${this.javaClass.simpleName}") - val handleManager = EntityHandleManager( + val handleManager = HandleManagerImpl( arcId = "testHarness", hostId = "testHarnessHost", time = JvmTime, diff --git a/javatests/arcs/android/e2e/testapp/StorageAccessService.kt b/javatests/arcs/android/e2e/testapp/StorageAccessService.kt index 219da3aba87..2c6cf1ad840 100644 --- a/javatests/arcs/android/e2e/testapp/StorageAccessService.kt +++ b/javatests/arcs/android/e2e/testapp/StorageAccessService.kt @@ -10,7 +10,7 @@ import arcs.core.data.HandleMode import arcs.core.data.SingletonType import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.HandleSpec -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.util.Scheduler import arcs.jvm.util.JvmTime import arcs.sdk.WriteSingletonHandle @@ -42,7 +42,7 @@ class StorageAccessService : LifecycleService() { storageModeOrdinal?.run { TestEntity.StorageMode.values()[storageModeOrdinal] } scope.launch { - val handleManager = EntityHandleManager( + val handleManager = HandleManagerImpl( time = JvmTime, scheduler = Scheduler(coroutineContext), storageEndpointManager = storageEndpointManager, diff --git a/javatests/arcs/android/e2e/testapp/TestActivity.kt b/javatests/arcs/android/e2e/testapp/TestActivity.kt index ccd9f47ed1e..7e344eae8f9 100644 --- a/javatests/arcs/android/e2e/testapp/TestActivity.kt +++ b/javatests/arcs/android/e2e/testapp/TestActivity.kt @@ -31,7 +31,7 @@ import arcs.core.data.SingletonType import arcs.core.entity.HandleSpec import arcs.core.entity.awaitReady import arcs.core.entity.ForeignReferenceCheckerImpl -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.jvm.util.JvmTime import arcs.sdk.ReadWriteCollectionHandle @@ -155,7 +155,7 @@ class TestActivity : AppCompatActivity() { appendResultText(getString(R.string.waiting_for_result)) allocator = Allocator.create( AndroidManifestHostRegistry.create(this@TestActivity), - EntityHandleManager( + HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("readWriteArc"), storageEndpointManager = storageEndpointManager, @@ -171,7 +171,7 @@ class TestActivity : AppCompatActivity() { appendResultText(getString(R.string.waiting_for_result)) allocator = Allocator.create( AndroidManifestHostRegistry.create(this@TestActivity), - EntityHandleManager( + HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("resurrectionArc"), storageEndpointManager = storageEndpointManager, @@ -207,7 +207,7 @@ class TestActivity : AppCompatActivity() { val allocator = Allocator.create( AndroidManifestHostRegistry.create(this@TestActivity), - EntityHandleManager( + HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("allocator"), storageEndpointManager = storageEndpointManager, @@ -242,7 +242,7 @@ class TestActivity : AppCompatActivity() { appendResultText(getString(R.string.waiting_for_result)) - val handleManager = EntityHandleManager( + val handleManager = HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("handle"), storageEndpointManager = storageEndpointManager, diff --git a/javatests/arcs/android/entity/DifferentHandleManagerTest.kt b/javatests/arcs/android/entity/DifferentHandleManagerTest.kt index 345a68526fb..93819cfc291 100644 --- a/javatests/arcs/android/entity/DifferentHandleManagerTest.kt +++ b/javatests/arcs/android/entity/DifferentHandleManagerTest.kt @@ -6,7 +6,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4 import androidx.work.testing.WorkManagerTestInitHelper import arcs.android.storage.database.AndroidSqliteDatabaseManager import arcs.core.entity.HandleManagerTestBase -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.StorageEndpointManager import arcs.core.storage.driver.DatabaseDriverProvider import arcs.sdk.android.storage.AndroidStorageServiceEndpointManager @@ -42,7 +42,7 @@ class DifferentHandleManagerTest : HandleManagerTestBase() { bindHelper = TestBindHelper(app) ) monitorStorageEndpointManager = storageEndpointManager - readHandleManager = EntityHandleManager( + readHandleManagerImpl = HandleManagerImpl( arcId = "arcId", hostId = "hostId", time = fakeTime, @@ -50,7 +50,7 @@ class DifferentHandleManagerTest : HandleManagerTestBase() { storageEndpointManager = storageEndpointManager, foreignReferenceChecker = foreignReferenceChecker ) - writeHandleManager = EntityHandleManager( + writeHandleManagerImpl = HandleManagerImpl( arcId = "arcId", hostId = "hostId", time = fakeTime, diff --git a/javatests/arcs/android/entity/HardReferenceTest.kt b/javatests/arcs/android/entity/HardReferenceTest.kt index 3b4e39bc68e..eefa6eb4bd0 100644 --- a/javatests/arcs/android/entity/HardReferenceTest.kt +++ b/javatests/arcs/android/entity/HardReferenceTest.kt @@ -16,7 +16,7 @@ import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.HandleSpec import arcs.core.entity.ReadWriteCollectionHandle import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.StorageEndpointManager import arcs.core.storage.StorageKey @@ -85,8 +85,8 @@ class HardReferenceTest { } ) ) - private val handleManager: EntityHandleManager - get() = EntityHandleManager( + private val handleManagerImpl: HandleManagerImpl + get() = HandleManagerImpl( arcId = "arcId", hostId = "hostId", time = FakeTime(), @@ -335,7 +335,7 @@ class HardReferenceTest { key: StorageKey, entitySpec: EntitySpec ): ReadWriteCollectionHandle { - return handleManager.createHandle( + return handleManagerImpl.createHandle( HandleSpec( "name", HandleMode.ReadWrite, diff --git a/javatests/arcs/android/entity/SameHandleManagerTest.kt b/javatests/arcs/android/entity/SameHandleManagerTest.kt index fb0b13a327e..a6b500fd830 100644 --- a/javatests/arcs/android/entity/SameHandleManagerTest.kt +++ b/javatests/arcs/android/entity/SameHandleManagerTest.kt @@ -6,7 +6,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4 import androidx.work.testing.WorkManagerTestInitHelper import arcs.android.storage.database.AndroidSqliteDatabaseManager import arcs.core.entity.HandleManagerTestBase -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.StorageEndpointManager import arcs.core.storage.driver.DatabaseDriverProvider import arcs.sdk.android.storage.AndroidStorageServiceEndpointManager @@ -43,7 +43,7 @@ class SameHandleManagerTest : HandleManagerTestBase() { ) monitorStorageEndpointManager = storageEndpointManager - readHandleManager = EntityHandleManager( + readHandleManagerImpl = HandleManagerImpl( arcId = "arcId", hostId = "hostId", time = fakeTime, @@ -51,7 +51,7 @@ class SameHandleManagerTest : HandleManagerTestBase() { storageEndpointManager = storageEndpointManager, foreignReferenceChecker = foreignReferenceChecker ) - writeHandleManager = readHandleManager + writeHandleManagerImpl = readHandleManagerImpl } @After diff --git a/javatests/arcs/android/entity/TtlHandleTest.kt b/javatests/arcs/android/entity/TtlHandleTest.kt index 3bd0dd8f090..74f25fc654e 100644 --- a/javatests/arcs/android/entity/TtlHandleTest.kt +++ b/javatests/arcs/android/entity/TtlHandleTest.kt @@ -18,7 +18,7 @@ import arcs.core.entity.ReadWriteSingletonHandle import arcs.core.entity.awaitReady import arcs.core.entity.testutil.DummyEntity import arcs.core.entity.testutil.InlineDummyEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.StorageKey import arcs.core.storage.api.DriverAndKeyConfigurator @@ -77,9 +77,9 @@ class TtlHandleTest { TestBindHelper(app) ) - private val handleManager: EntityHandleManager + private val handleManagerImpl: HandleManagerImpl // Create a new handle manager on each call, to check different storage proxies. - get() = EntityHandleManager( + get() = HandleManagerImpl( time = fakeTime, scheduler = scheduler, storageEndpointManager = storageEndpointManager(), @@ -430,7 +430,7 @@ class TtlHandleTest { private suspend fun createCollectionHandle( ttl: Ttl = Ttl.Hours(1), key: StorageKey = collectionKey - ) = handleManager.createHandle( + ) = handleManagerImpl.createHandle( HandleSpec( "name", HandleMode.ReadWrite, @@ -443,7 +443,7 @@ class TtlHandleTest { @Suppress("UNCHECKED_CAST") private suspend fun createSingletonHandle() = - handleManager.createHandle( + handleManagerImpl.createHandle( HandleSpec( "name", HandleMode.ReadWrite, diff --git a/javatests/arcs/android/host/AndroidEntityHandleManagerTest.kt b/javatests/arcs/android/host/AndroidHandleManagerImplTest.kt similarity index 94% rename from javatests/arcs/android/host/AndroidEntityHandleManagerTest.kt rename to javatests/arcs/android/host/AndroidHandleManagerImplTest.kt index 7aecc1339a1..318a90d770b 100644 --- a/javatests/arcs/android/host/AndroidEntityHandleManagerTest.kt +++ b/javatests/arcs/android/host/AndroidHandleManagerImplTest.kt @@ -21,7 +21,7 @@ import arcs.core.entity.SingletonDelta import arcs.core.entity.WriteCollectionHandle import arcs.core.entity.WriteSingletonHandle import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.api.DriverAndKeyConfigurator import arcs.core.storage.driver.RamDisk @@ -61,7 +61,7 @@ fun Person.withQuery(): PersonWithQuery { @Suppress("EXPERIMENTAL_API_USAGE", "UNCHECKED_CAST") @RunWith(AndroidJUnit4::class) -class AndroidEntityHandleManagerTest { +class AndroidHandleManagerImplTest { @get:Rule val log = LogRule() @@ -70,7 +70,7 @@ class AndroidEntityHandleManagerTest { val entity1 = Person("Jason", 21.0, false) val entity2 = Person("Jason", 22.0, true) private lateinit var handleHolder: AbstractTestParticle.Handles - private lateinit var handleManager: EntityHandleManager + private lateinit var handleManagerImpl: HandleManagerImpl private val singletonKey = ReferenceModeStorageKey( backingKey = RamDiskStorageKey("single-back"), storageKey = RamDiskStorageKey("single-ent") @@ -99,7 +99,7 @@ class AndroidEntityHandleManagerTest { TestBindHelper(app) ) - handleManager = EntityHandleManager( + handleManagerImpl = HandleManagerImpl( arcId = "testArc", hostId = "testHost", time = FakeTime(), @@ -162,7 +162,7 @@ class AndroidEntityHandleManagerTest { @Test fun singletonHandle_writeInOnSyncNoDesync() = runBlocking { val writeHandle = createSingletonHandle( - handleManager, + handleManagerImpl, "writeHandle", HandleMode.Write ) @@ -181,7 +181,7 @@ class AndroidEntityHandleManagerTest { @Test fun singletonHandle_writeFollowedByReadWithOnUpdate() = runBlocking { val writeHandle = createSingletonHandle( - handleManager, + handleManagerImpl, "writeHandle", HandleMode.Write ) @@ -190,7 +190,7 @@ class AndroidEntityHandleManagerTest { handleHolder.writeHandle.dispatchStore(entity1) val readHandle = createSingletonHandle( - handleManager, + handleManagerImpl, "readHandle", HandleMode.Read ) @@ -200,7 +200,7 @@ class AndroidEntityHandleManagerTest { assertThat(handleHolder.readHandle.dispatchFetch()).isEqualTo(entity1) val readWriteHandle = createSingletonHandle( - handleManager, + handleManagerImpl, "readWriteHandle", HandleMode.ReadWrite ) @@ -224,7 +224,7 @@ class AndroidEntityHandleManagerTest { @Test fun collectionHandle_writeFollowedByReadWithOnUpdate() = runBlocking { val writeCollectionHandle = createCollectionHandle( - handleManager, + handleManagerImpl, "writeCollectionHandle", HandleMode.Write ) @@ -235,7 +235,7 @@ class AndroidEntityHandleManagerTest { handleHolder.writeCollectionHandle.dispatchStore(entity2) val readCollectionHandle = createCollectionHandle( - handleManager, + handleManagerImpl, "readCollectionHandle", HandleMode.Read ) @@ -246,7 +246,7 @@ class AndroidEntityHandleManagerTest { .containsExactly(entity1, entity2) val readWriteCollectionHandle = createCollectionHandle( - handleManager, + handleManagerImpl, "readWriteCollectionHandle", HandleMode.ReadWrite ) @@ -273,7 +273,7 @@ class AndroidEntityHandleManagerTest { @Test fun collectionHandle_writeFollowedByQuery() = runBlocking { val readWriteQueryCollectionHandle = createCollectionHandle( - handleManager, + handleManagerImpl, "readWriteQueryCollectionHandle", HandleMode.ReadWriteQuery ) as ReadWriteQueryCollectionHandle @@ -304,13 +304,13 @@ class AndroidEntityHandleManagerTest { @Test fun handle_nameIsGloballyUnique() = runBlocking { val shandle1 = createSingletonHandle( - handleManager, + handleManagerImpl, "writeHandle", HandleMode.Write ) val chandle1 = createCollectionHandle( - handleManager, + handleManagerImpl, "writeCollectionHandle", HandleMode.Write ) @@ -318,13 +318,13 @@ class AndroidEntityHandleManagerTest { handleHolder.reset() val shandle2 = createSingletonHandle( - handleManager, + handleManagerImpl, "writeHandle", HandleMode.Write ) val chandle2 = createCollectionHandle( - handleManager, + handleManagerImpl, "writeCollectionHandle", HandleMode.Write ) @@ -334,12 +334,12 @@ class AndroidEntityHandleManagerTest { } private suspend fun createSingletonHandle( - handleManager: EntityHandleManager, + handleManagerImpl: HandleManagerImpl, handleName: String, handleMode: HandleMode ): Handle { val entitySpec = handleHolder.getEntitySpecs(handleName).single() - return handleManager.createHandle( + return handleManagerImpl.createHandle( HandleSpec( handleName, handleMode, @@ -353,12 +353,12 @@ class AndroidEntityHandleManagerTest { } private suspend fun createCollectionHandle( - handleManager: EntityHandleManager, + handleManagerImpl: HandleManagerImpl, handleName: String, handleMode: HandleMode ): Handle { val entitySpec = handleHolder.getEntitySpecs(handleName).single() - return handleManager.createHandle( + return handleManagerImpl.createHandle( HandleSpec( handleName, handleMode, diff --git a/javatests/arcs/android/storage/database/DatabaseGarbageCollectionPeriodicTaskTest.kt b/javatests/arcs/android/storage/database/DatabaseGarbageCollectionPeriodicTaskTest.kt index 61f9f53d7fa..57c1105ef0a 100644 --- a/javatests/arcs/android/storage/database/DatabaseGarbageCollectionPeriodicTaskTest.kt +++ b/javatests/arcs/android/storage/database/DatabaseGarbageCollectionPeriodicTaskTest.kt @@ -14,7 +14,7 @@ import arcs.core.entity.ReadWriteCollectionHandle import arcs.core.entity.awaitReady import arcs.core.entity.testutil.DummyEntity import arcs.core.entity.testutil.InlineDummyEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.api.DriverAndKeyConfigurator import arcs.core.storage.keys.DatabaseStorageKey @@ -103,7 +103,7 @@ class DatabaseGarbageCollectionPeriodicTaskTest { @Suppress("UNCHECKED_CAST") private suspend fun createCollectionHandle() = - EntityHandleManager( + HandleManagerImpl( time = fakeTime, scheduler = schedulerProvider("test"), storageEndpointManager = storageEndpointManager, diff --git a/javatests/arcs/android/storage/service/BulkDeletesIntegrationTest.kt b/javatests/arcs/android/storage/service/BulkDeletesIntegrationTest.kt index 1f6d6e08d6f..f3dc4400aaa 100644 --- a/javatests/arcs/android/storage/service/BulkDeletesIntegrationTest.kt +++ b/javatests/arcs/android/storage/service/BulkDeletesIntegrationTest.kt @@ -25,7 +25,7 @@ import arcs.core.entity.awaitReady import arcs.core.entity.testutil.FixtureEntities import arcs.core.entity.testutil.FixtureEntity import arcs.core.entity.testutil.InnerEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.api.DriverAndKeyConfigurator import arcs.core.storage.keys.DatabaseStorageKey @@ -185,7 +185,7 @@ class BulkDeletesIntegrationTest { private suspend fun createCollectionHandle( expiry: Ttl = Ttl.Infinite() - ) = EntityHandleManager( + ) = HandleManagerImpl( time = time, scheduler = scheduler, storageEndpointManager = testDatabaseStorageEndpointManager(), diff --git a/javatests/arcs/android/storage/service/StorageServiceManagerTest.kt b/javatests/arcs/android/storage/service/StorageServiceManagerTest.kt index b2a1acd3c8a..2400e3451fa 100644 --- a/javatests/arcs/android/storage/service/StorageServiceManagerTest.kt +++ b/javatests/arcs/android/storage/service/StorageServiceManagerTest.kt @@ -27,7 +27,7 @@ import arcs.core.entity.ReadWriteSingletonHandle import arcs.core.entity.awaitReady import arcs.core.entity.testutil.DummyEntity import arcs.core.entity.testutil.InlineDummyEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.StorageKey import arcs.core.storage.api.DriverAndKeyConfigurator @@ -234,7 +234,7 @@ class StorageServiceManagerTest { private suspend fun createSingletonHandle(storageKey: StorageKey) = // Creates a new handle manager each time, to simulate arcs stop/start behavior. - EntityHandleManager( + HandleManagerImpl( time = time, scheduler = scheduler, storageEndpointManager = testDatabaseStorageEndpointManager(), @@ -250,7 +250,7 @@ class StorageServiceManagerTest { ).awaitReady() as ReadWriteSingletonHandle private suspend fun createCollectionHandle(storageKey: StorageKey) = - EntityHandleManager( + HandleManagerImpl( time = time, scheduler = scheduler, storageEndpointManager = testDatabaseStorageEndpointManager(), diff --git a/javatests/arcs/android/storage/ttl/PeriodicCleanupTaskTest.kt b/javatests/arcs/android/storage/ttl/PeriodicCleanupTaskTest.kt index 3b171a3140b..27876fa1b1b 100644 --- a/javatests/arcs/android/storage/ttl/PeriodicCleanupTaskTest.kt +++ b/javatests/arcs/android/storage/ttl/PeriodicCleanupTaskTest.kt @@ -17,7 +17,7 @@ import arcs.core.entity.ReadWriteCollectionHandle import arcs.core.entity.awaitReady import arcs.core.entity.testutil.DummyEntity import arcs.core.entity.testutil.InlineDummyEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.api.DriverAndKeyConfigurator import arcs.core.storage.keys.DatabaseStorageKey @@ -124,7 +124,7 @@ class PeriodicCleanupTaskTest { @Suppress("UNCHECKED_CAST") private suspend fun createCollectionHandle() = - EntityHandleManager( + HandleManagerImpl( time = fakeTime, scheduler = SimpleSchedulerProvider(Dispatchers.Default)("test"), storageEndpointManager = testDatabaseStorageEndpointManager(), diff --git a/javatests/arcs/android/systemhealth/testapp/StorageCore.kt b/javatests/arcs/android/systemhealth/testapp/StorageCore.kt index c699e24ebc5..d671540a43d 100644 --- a/javatests/arcs/android/systemhealth/testapp/StorageCore.kt +++ b/javatests/arcs/android/systemhealth/testapp/StorageCore.kt @@ -24,7 +24,7 @@ import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.Handle import arcs.core.entity.HandleSpec import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.Reference import arcs.core.storage.StorageEndpointManager import arcs.core.storage.keys.DatabaseStorageKey @@ -186,7 +186,7 @@ class StorageCore(val context: Context) { closeHandle(it.handle, it.coroutineContext) it.handle = null launchIfContext(it.coroutineContext) { - it.handleManager.close() + it.handleManagerImpl.close() } } catch (e: Exception) { log.error { "#$id: failed to close handle, reason: $e" } @@ -292,7 +292,7 @@ class StorageCore(val context: Context) { ) TaskHandle( - EntityHandleManager( + HandleManagerImpl( time = JvmTime, // Per-task single-threaded Scheduler being cascaded with Watchdog capabilities scheduler = TestSchedulerProvider(taskCoroutineContext)("sysHealthStorageCore"), @@ -378,7 +378,7 @@ class StorageCore(val context: Context) { @Suppress("UNCHECKED_CAST") private suspend fun setUpCleanerHandle(taskHandle: TaskHandle, settings: Settings) { - val handle = taskHandle.handleManager.createHandle( + val handle = taskHandle.handleManagerImpl.createHandle( HandleSpec( "CleanerHandle", HandleMode.Write, @@ -411,7 +411,7 @@ class StorageCore(val context: Context) { settings: Settings ) = when (settings.handleType) { HandleType.SINGLETON -> { - val handle = taskHandle.handleManager.createHandle( + val handle = taskHandle.handleManagerImpl.createHandle( HandleSpec( "singletonHandle$taskId", HandleMode.ReadWrite, @@ -464,7 +464,7 @@ class StorageCore(val context: Context) { taskHandle.handle = handle } HandleType.COLLECTION -> { - val handle = taskHandle.handleManager.createHandle( + val handle = taskHandle.handleManagerImpl.createHandle( HandleSpec( "collectionHandle$taskId", HandleMode.ReadWrite, @@ -743,7 +743,7 @@ class StorageCore(val context: Context) { // TODO: remove this when terminated() works to clean up? handles.forEach { runBlocking { - it.handleManager.close() + it.handleManagerImpl.close() it.coroutineContext.cancel() } } @@ -1178,7 +1178,7 @@ class StorageCore(val context: Context) { ) private data class TaskHandle( - val handleManager: EntityHandleManager, + val handleManagerImpl: HandleManagerImpl, val storageEndpointManager: StorageEndpointManager, val coroutineContext: CoroutineContext, var handle: Any? = null diff --git a/javatests/arcs/android/systemhealth/testapp/TestActivity.kt b/javatests/arcs/android/systemhealth/testapp/TestActivity.kt index 37e6682e5c3..00164e1a89a 100644 --- a/javatests/arcs/android/systemhealth/testapp/TestActivity.kt +++ b/javatests/arcs/android/systemhealth/testapp/TestActivity.kt @@ -37,7 +37,7 @@ import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.HandleSpec import arcs.core.entity.ReadSingletonHandle import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.jvm.util.JvmTime import arcs.sdk.ReadCollectionHandle @@ -67,7 +67,7 @@ class TestActivity : AppCompatActivity() { Executors.newSingleThreadExecutor().asCoroutineDispatcher() private val scope: CoroutineScope = CoroutineScope(coroutineContext) private val schedulerProvider = SimpleSchedulerProvider(Dispatchers.Default) - private lateinit var handleManager: EntityHandleManager + private lateinit var handleManagerImpl: HandleManagerImpl private var handleType = SystemHealthEnums.HandleType.SINGLETON private var storageMode = TestEntity.StorageMode.IN_MEMORY @@ -118,7 +118,7 @@ class TestActivity : AppCompatActivity() { setContentView(R.layout.test_activity) - handleManager = EntityHandleManager( + handleManagerImpl = HandleManagerImpl( time = JvmTime, scheduler = schedulerProvider("sysHealthTestActivity"), storageEndpointManager = storageEndpointManager, @@ -428,7 +428,7 @@ class TestActivity : AppCompatActivity() { when (T::class) { ReadWriteSingletonHandle::class -> { if (singletonHandle == null) { - val handle = handleManager.createHandle( + val handle = handleManagerImpl.createHandle( HandleSpec( "singletonHandle", HandleMode.ReadWrite, @@ -466,7 +466,7 @@ class TestActivity : AppCompatActivity() { } ReadWriteCollectionHandle::class -> { if (collectionHandle == null) { - val handle = handleManager.createHandle( + val handle = handleManagerImpl.createHandle( HandleSpec( "collectionHandle", HandleMode.ReadWrite, diff --git a/javatests/arcs/core/allocator/AllocatorTestBase.kt b/javatests/arcs/core/allocator/AllocatorTestBase.kt index fcdd619a747..c003d935e95 100644 --- a/javatests/arcs/core/allocator/AllocatorTestBase.kt +++ b/javatests/arcs/core/allocator/AllocatorTestBase.kt @@ -11,7 +11,7 @@ import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.host.ArcHostContext import arcs.core.host.ArcState import arcs.core.host.DeserializedException -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.HelloHelloPlan import arcs.core.host.HostRegistry import arcs.core.host.MultiplePersonPlan @@ -129,7 +129,7 @@ open class AllocatorTestBase { scope = CoroutineScope(Dispatchers.Default) allocator = Allocator.create( hostRegistry, - EntityHandleManager( + HandleManagerImpl( time = FakeTime(), scheduler = schedulerProvider("allocator"), storageEndpointManager = testStorageEndpointManager(), @@ -631,7 +631,7 @@ open class AllocatorTestBase { val allocator2 = Allocator.create( hostRegistry, - EntityHandleManager( + HandleManagerImpl( time = FakeTime(), scheduler = schedulerProvider("allocator2"), storageEndpointManager = testStorageEndpointManager(), diff --git a/javatests/arcs/core/entity/DifferentHandleManagerDifferentStoresTest.kt b/javatests/arcs/core/entity/DifferentHandleManagerDifferentStoresTest.kt index 898c300dc14..1fd02fd9155 100644 --- a/javatests/arcs/core/entity/DifferentHandleManagerDifferentStoresTest.kt +++ b/javatests/arcs/core/entity/DifferentHandleManagerDifferentStoresTest.kt @@ -1,6 +1,6 @@ package arcs.core.entity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.testutil.testStorageEndpointManager import org.junit.Before import org.junit.runner.RunWith @@ -17,7 +17,7 @@ class DifferentHandleManagerDifferentStoresTest : HandleManagerTestBase() { i++ val readStores = testStorageEndpointManager() monitorStorageEndpointManager = readStores - readHandleManager = EntityHandleManager( + readHandleManagerImpl = HandleManagerImpl( arcId = "testArcId", hostId = "testHostId", time = fakeTime, @@ -26,7 +26,7 @@ class DifferentHandleManagerDifferentStoresTest : HandleManagerTestBase() { foreignReferenceChecker = foreignReferenceChecker ) val writeStores = testStorageEndpointManager() - writeHandleManager = EntityHandleManager( + writeHandleManagerImpl = HandleManagerImpl( arcId = "testArcId", hostId = "testHostId", time = fakeTime, diff --git a/javatests/arcs/core/entity/DifferentHandleManagerTest.kt b/javatests/arcs/core/entity/DifferentHandleManagerTest.kt index 634cbdf75ba..2dd042ebcb5 100644 --- a/javatests/arcs/core/entity/DifferentHandleManagerTest.kt +++ b/javatests/arcs/core/entity/DifferentHandleManagerTest.kt @@ -1,6 +1,6 @@ package arcs.core.entity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.testutil.testStorageEndpointManager import org.junit.After import org.junit.Before @@ -18,7 +18,7 @@ class DifferentHandleManagerTest : HandleManagerTestBase() { val storageEndpointManager = testStorageEndpointManager() i++ monitorStorageEndpointManager = storageEndpointManager - readHandleManager = EntityHandleManager( + readHandleManagerImpl = HandleManagerImpl( arcId = "testArc", hostId = "testHost", time = fakeTime, @@ -26,7 +26,7 @@ class DifferentHandleManagerTest : HandleManagerTestBase() { storageEndpointManager = storageEndpointManager, foreignReferenceChecker = foreignReferenceChecker ) - writeHandleManager = EntityHandleManager( + writeHandleManagerImpl = HandleManagerImpl( arcId = "testArc", hostId = "testHost", time = fakeTime, diff --git a/javatests/arcs/core/entity/HandleManagerCloseTest.kt b/javatests/arcs/core/entity/HandleManagerCloseTest.kt index ee83e303b88..b53836226d8 100644 --- a/javatests/arcs/core/entity/HandleManagerCloseTest.kt +++ b/javatests/arcs/core/entity/HandleManagerCloseTest.kt @@ -7,7 +7,7 @@ import arcs.core.data.HandleMode import arcs.core.data.SingletonType import arcs.core.entity.testutil.FixtureEntities import arcs.core.entity.testutil.FixtureEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.StorageKey import arcs.core.storage.api.DriverAndKeyConfigurator @@ -66,7 +66,7 @@ class HandleManagerCloseTest { schedulerProvider.cancelAll() } - private fun createHandleManager() = EntityHandleManager( + private fun createHandleManager() = HandleManagerImpl( arcId = "testArc", hostId = "", time = FakeTime(), @@ -161,7 +161,7 @@ class HandleManagerCloseTest { } @Suppress("UNCHECKED_CAST") - private suspend fun EntityHandleManager.createSingletonHandle( + private suspend fun HandleManagerImpl.createSingletonHandle( storageKey: StorageKey = singletonKey, name: String = "singletonHandle", ttl: Ttl = Ttl.Infinite() @@ -179,7 +179,7 @@ class HandleManagerCloseTest { ).awaitReady() @Suppress("UNCHECKED_CAST") - private suspend fun EntityHandleManager.createCollectionHandle( + private suspend fun HandleManagerImpl.createCollectionHandle( storageKey: StorageKey = collectionKey, name: String = "collectionKey", ttl: Ttl = Ttl.Infinite() diff --git a/javatests/arcs/core/entity/HandleManagerTestBase.kt b/javatests/arcs/core/entity/HandleManagerTestBase.kt index dd9c99367bb..8f97ec8bbc3 100644 --- a/javatests/arcs/core/entity/HandleManagerTestBase.kt +++ b/javatests/arcs/core/entity/HandleManagerTestBase.kt @@ -12,7 +12,7 @@ import arcs.core.entity.AbstractTestParticle.CoolnessIndex import arcs.core.entity.AbstractTestParticle.Friend import arcs.core.entity.AbstractTestParticle.Hat import arcs.core.entity.AbstractTestParticle.Person -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.SchedulerProvider import arcs.core.host.SimpleSchedulerProvider import arcs.core.storage.StorageEndpointManager @@ -109,15 +109,15 @@ open class HandleManagerTestBase { { name: String -> name == validPackageName } val foreignReferenceChecker: ForeignReferenceChecker = ForeignReferenceCheckerImpl(mapOf(AbstractTestParticle.Package.SCHEMA to packageChecker)) - lateinit var readHandleManager: EntityHandleManager - lateinit var writeHandleManager: EntityHandleManager - private lateinit var monitorHandleManager: EntityHandleManager + lateinit var readHandleManagerImpl: HandleManagerImpl + lateinit var writeHandleManagerImpl: HandleManagerImpl + private lateinit var monitorHandleManagerImpl: HandleManagerImpl var testTimeout: Long = 10000 lateinit var monitorStorageEndpointManager: StorageEndpointManager open var testRunner = { block: suspend CoroutineScope.() -> Unit -> - monitorHandleManager = EntityHandleManager( + monitorHandleManagerImpl = HandleManagerImpl( arcId = "testArc", hostId = "monitorHost", time = fakeTime, @@ -127,9 +127,9 @@ open class HandleManagerTestBase { ) runBlocking { withTimeout(testTimeout) { block() } - monitorHandleManager.close() - readHandleManager.close() - writeHandleManager.close() + monitorHandleManagerImpl.close() + readHandleManagerImpl.close() + writeHandleManagerImpl.close() } } @@ -153,14 +153,14 @@ open class HandleManagerTestBase { // TODO(b/151366899): this is less than ideal - we should investigate how to make the entire // test process cancellable/stoppable, even when we cross scopes into a BindingContext or // over to other RamDisk listeners. - readHandleManager.close() - writeHandleManager.close() + readHandleManagerImpl.close() + writeHandleManagerImpl.close() schedulerProvider.cancelAll() } @Test fun singleton_initialStateAndSingleHandleOperations() = testRunner { - val handle = writeHandleManager.createSingletonHandle() + val handle = writeHandleManagerImpl.createSingletonHandle() // Don't use the dispatchX helpers so we can test the immediate effect of the handle ops. withContext(handle.dispatcher) { @@ -186,7 +186,7 @@ open class HandleManagerTestBase { @Test fun singleton_writeAndReadBack_unidirectional() = testRunner { // Write-only handle -> read-only handle - val writeHandle = writeHandleManager.createHandle( + val writeHandle = writeHandleManagerImpl.createHandle( HandleSpec( "writeOnlySingleton", HandleMode.Write, @@ -195,7 +195,7 @@ open class HandleManagerTestBase { ), singletonKey ).awaitReady() as WriteSingletonHandle - val readHandle = readHandleManager.createHandle( + val readHandle = readHandleManagerImpl.createHandle( HandleSpec( "readOnlySingleton", HandleMode.Read, @@ -228,7 +228,7 @@ open class HandleManagerTestBase { @Test fun singleton_writeAndReadBack_bidirectional() = testRunner { // Read/write handle <-> read/write handle - val handle1 = writeHandleManager.createHandle( + val handle1 = writeHandleManagerImpl.createHandle( HandleSpec( "readWriteSingleton1", HandleMode.ReadWrite, @@ -238,7 +238,7 @@ open class HandleManagerTestBase { singletonKey ).awaitReady() as ReadWriteSingletonHandle - val handle2 = readHandleManager.createHandle( + val handle2 = readHandleManagerImpl.createHandle( HandleSpec( "readWriteSingleton2", HandleMode.ReadWrite, @@ -278,13 +278,16 @@ open class HandleManagerTestBase { // Arrange: reference handle. val friendsStorageKey = ReferenceModeStorageKey(backingKey, RamDiskStorageKey("friends")) val friendsHandle = - writeHandleManager.createSingletonHandle(storageKey = friendsStorageKey, entitySpec = Friend) + writeHandleManagerImpl.createSingletonHandle( + storageKey = friendsStorageKey, + entitySpec = Friend + ) val friend1 = Friend("1") friendsHandle.dispatchStore(friend1) // Arrange: entity handle. - val writeHandle = writeHandleManager.createSingletonHandle() - val readHandle = readHandleManager.createSingletonHandle() + val writeHandle = writeHandleManagerImpl.createSingletonHandle() + val readHandle = readHandleManagerImpl.createSingletonHandle() val readHandleUpdated = readHandle.onUpdateDeferred() // Act. @@ -303,7 +306,7 @@ open class HandleManagerTestBase { @Test fun singleton_dereferenceEntity_nestedReference() = testRunner { // Create a stylish new hat, and create a reference to it. - val hatCollection = writeHandleManager.createHandle( + val hatCollection = writeHandleManagerImpl.createHandle( HandleSpec( "hatCollection", HandleMode.ReadWrite, @@ -330,8 +333,8 @@ open class HandleManagerTestBase { hat = fezRef ) ) - val writeHandle = writeHandleManager.createSingletonHandle() - val readHandle = readHandleManager.createSingletonHandle() + val writeHandle = writeHandleManagerImpl.createSingletonHandle() + val readHandle = readHandleManagerImpl.createSingletonHandle() val readOnUpdate = readHandle.onUpdateDeferred() writeHandle.dispatchStore(personWithHat) @@ -350,7 +353,7 @@ open class HandleManagerTestBase { @Test fun singleton_referenceForeign() = testRunner { val writeHandle = - writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) val reference = writeHandle.createForeignReference(AbstractTestParticle.Package, validPackageName) @@ -362,7 +365,7 @@ open class HandleManagerTestBase { writeHandle.dispatchStore(entity) val readHandle = - readHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + readHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) assertThat(readHandle.dispatchFetchAll()).containsExactly(entity) val readBack = readHandle.dispatchFetchAll().single().app!! assertThat(readBack.entityId).isEqualTo(validPackageName) @@ -376,8 +379,8 @@ open class HandleManagerTestBase { @Test fun singleton_noTTL() = testRunner { - val handle = writeHandleManager.createSingletonHandle() - val handleB = readHandleManager.createSingletonHandle() + val handle = writeHandleManagerImpl.createSingletonHandle() + val handleB = readHandleManagerImpl.createSingletonHandle() val handleBUpdated = handleB.onUpdateDeferred() val expectedCreateTime = 123456789L @@ -394,8 +397,8 @@ open class HandleManagerTestBase { @Test fun singleton_withTTL() = testRunner { fakeTime.millis = 0 - val handle = writeHandleManager.createSingletonHandle(ttl = Ttl.Days(2)) - val handleB = readHandleManager.createSingletonHandle() + val handle = writeHandleManagerImpl.createSingletonHandle(ttl = Ttl.Days(2)) + val handleB = readHandleManagerImpl.createSingletonHandle() var handleBUpdated = handleB.onUpdateDeferred() handle.dispatchStore(entity1) @@ -405,7 +408,7 @@ open class HandleManagerTestBase { assertThat(readBack.creationTimestamp).isEqualTo(0) assertThat(readBack.expirationTimestamp).isEqualTo(2 * 24 * 3600 * 1000) - val handleC = readHandleManager.createSingletonHandle(ttl = Ttl.Minutes(2)) + val handleC = readHandleManagerImpl.createSingletonHandle(ttl = Ttl.Minutes(2)) handleBUpdated = handleB.onUpdateDeferred() handleC.dispatchStore(entity2) handleBUpdated.join() @@ -423,8 +426,8 @@ open class HandleManagerTestBase { fun referenceSingleton_withTtl() = testRunner { fakeTime.millis = 0 // Create and store an entity with no TTL. - val entityHandle = writeHandleManager.createSingletonHandle() - val refHandle = writeHandleManager.createReferenceSingletonHandle(ttl = Ttl.Minutes(2)) + val entityHandle = writeHandleManagerImpl.createSingletonHandle() + val refHandle = writeHandleManagerImpl.createReferenceSingletonHandle(ttl = Ttl.Minutes(2)) val updated = entityHandle.onUpdateDeferred() entityHandle.dispatchStore(entity1) updated.join() @@ -444,8 +447,8 @@ open class HandleManagerTestBase { @Test fun singleton_referenceLiveness() = testRunner { // Create and store an entity. - val writeEntityHandle = writeHandleManager.createCollectionHandle() - val monitorHandle = monitorHandleManager.createCollectionHandle() + val writeEntityHandle = writeHandleManagerImpl.createCollectionHandle() + val monitorHandle = monitorHandleManagerImpl.createCollectionHandle() val initialEntityStored = monitorHandle.onUpdateDeferred { it.size() == 1 } writeEntityHandle.dispatchStore(entity1) initialEntityStored.join() @@ -453,8 +456,8 @@ open class HandleManagerTestBase { // Create and store a reference to the entity. val entity1Ref = writeEntityHandle.dispatchCreateReference(entity1) - val writeRefHandle = writeHandleManager.createReferenceSingletonHandle() - val readRefHandle = readHandleManager.createReferenceSingletonHandle() + val writeRefHandle = writeHandleManagerImpl.createReferenceSingletonHandle() + val readRefHandle = readHandleManagerImpl.createReferenceSingletonHandle() val refHeard = readRefHandle.onUpdateDeferred() writeRefHandle.dispatchStore(entity1Ref) log("Created and stored a reference") @@ -503,7 +506,7 @@ open class HandleManagerTestBase { @Test fun singleton_referenceHandle_referenceModeNotSupported() = testRunner { val e = assertSuspendingThrows(IllegalArgumentException::class) { - writeHandleManager.createReferenceSingletonHandle( + writeHandleManagerImpl.createReferenceSingletonHandle( ReferenceModeStorageKey( backingKey = backingKey, storageKey = singletonRefKey @@ -517,7 +520,7 @@ open class HandleManagerTestBase { @Test fun collection_initialStateAndSingleHandleOperations() = testRunner { - val handle = writeHandleManager.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle() // Don't use the dispatchX helpers so we can test the immediate effect of the handle ops. withContext(handle.dispatcher) { @@ -561,7 +564,7 @@ open class HandleManagerTestBase { @Test fun collection_remove_needsId() = testRunner { - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) val entity = TestParticle_Entities(text = "Hello") // Entity does not have an ID, it cannot be removed. assertSuspendingThrows(IllegalStateException::class) { @@ -575,7 +578,7 @@ open class HandleManagerTestBase { @Test fun removeByQuery_oneRemoved() = testRunner { - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) val entity = TestParticle_Entities(text = "one") val entity2 = TestParticle_Entities(text = "two") handle.dispatchStore(entity, entity2) @@ -587,7 +590,7 @@ open class HandleManagerTestBase { @Test fun removeByQuery_zeroRemoved() = testRunner { - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) val entity = TestParticle_Entities(text = "one") val entity2 = TestParticle_Entities(text = "two") handle.dispatchStore(entity, entity2) @@ -599,7 +602,7 @@ open class HandleManagerTestBase { @Test fun removeByQuery_emptyCollection() = testRunner { - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) handle.dispatchRemoveByQuery("one") @@ -608,7 +611,7 @@ open class HandleManagerTestBase { @Test fun removeByQuery_allRemoved() = testRunner { - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) val entity = TestParticle_Entities(text = "two") val entity2 = TestParticle_Entities(text = "two") handle.dispatchStore(entity, entity2) @@ -621,7 +624,7 @@ open class HandleManagerTestBase { @Test fun removeByQueryDisabled_throwsException() = testRunner { BuildFlags.REMOVE_BY_QUERY_HANDLE = false - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) assertFailsWith { handle.dispatchRemoveByQuery("two") } } @@ -629,7 +632,7 @@ open class HandleManagerTestBase { @Test fun collection_writeAndReadBack_unidirectional() = testRunner { // Write-only handle -> read-only handle - val writeHandle = writeHandleManager.createHandle( + val writeHandle = writeHandleManagerImpl.createHandle( HandleSpec( "writeOnlyCollection", HandleMode.Write, @@ -639,7 +642,7 @@ open class HandleManagerTestBase { collectionKey ).awaitReady() as WriteCollectionHandle - val readHandle = readHandleManager.createHandle( + val readHandle = readHandleManagerImpl.createHandle( HandleSpec( "readOnlyCollection", HandleMode.Read, @@ -694,7 +697,7 @@ open class HandleManagerTestBase { @Test fun collection_writeAndReadBack_bidirectional() = testRunner { // Read/write handle <-> read/write handle - val handle1 = writeHandleManager.createHandle( + val handle1 = writeHandleManagerImpl.createHandle( HandleSpec( "readWriteCollection1", HandleMode.ReadWrite, @@ -704,7 +707,7 @@ open class HandleManagerTestBase { collectionKey ).awaitReady() as ReadWriteCollectionHandle - val handle2 = readHandleManager.createHandle( + val handle2 = readHandleManagerImpl.createHandle( HandleSpec( "readWriteCollection2", HandleMode.ReadWrite, @@ -766,7 +769,7 @@ open class HandleManagerTestBase { @Test fun collection_writeMutatedEntityReplaces() = testRunner { val entity = TestParticle_Entities(text = "Hello") - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) handle.dispatchStore(entity) assertThat(handle.dispatchFetchAll()).containsExactly(entity) @@ -804,7 +807,9 @@ open class HandleManagerTestBase { ) ) val entity = TestInlineParticle_Entities(inline, inlineSet, inlineList) - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestInlineParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle( + entitySpec = TestInlineParticle_Entities + ) handle.dispatchStore(entity) val modified = entity.mutate( @@ -839,10 +844,10 @@ open class HandleManagerTestBase { number = 1.0, list = listOf(1L, 2L, 4L, 2L) ) - val writeHandle = writeHandleManager.createCollectionHandle( + val writeHandle = writeHandleManagerImpl.createCollectionHandle( entitySpec = TestParticle_Entities ) - val readHandle = readHandleManager.createCollectionHandle( + val readHandle = readHandleManagerImpl.createCollectionHandle( entitySpec = TestParticle_Entities ) @@ -876,10 +881,10 @@ open class HandleManagerTestBase { ) val entity = TestInlineParticle_Entities(inline, inlineSet, inlineList) - val writeHandle = writeHandleManager.createCollectionHandle( + val writeHandle = writeHandleManagerImpl.createCollectionHandle( entitySpec = TestInlineParticle_Entities ) - val readHandle = readHandleManager.createCollectionHandle( + val readHandle = readHandleManagerImpl.createCollectionHandle( entitySpec = TestInlineParticle_Entities ) @@ -898,7 +903,7 @@ open class HandleManagerTestBase { storageKey = RamDiskStorageKey("set-referencedEntities") ) - val referencedEntityHandle = writeHandleManager.createCollectionHandle( + val referencedEntityHandle = writeHandleManagerImpl.createCollectionHandle( referencedEntitiesKey, entitySpec = TestReferencesParticle_Entities_References ) @@ -922,10 +927,10 @@ open class HandleManagerTestBase { toEntity(setOf(34, 2145, 1, 11), listOf(3, 4, 5)) ) - val writeHandle = writeHandleManager.createCollectionHandle( + val writeHandle = writeHandleManagerImpl.createCollectionHandle( entitySpec = TestReferencesParticle_Entities ) - val readHandle = readHandleManager.createCollectionHandle( + val readHandle = readHandleManagerImpl.createCollectionHandle( entitySpec = TestReferencesParticle_Entities ) @@ -947,7 +952,7 @@ open class HandleManagerTestBase { fakeTime.autoincrement = 1 val id = "MyId" val entity = TestParticle_Entities(text = "Hello", number = 1.0, entityId = id) - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) handle.dispatchStore(entity) assertThat(handle.dispatchFetchAll()).containsExactly(entity) @@ -973,7 +978,7 @@ open class HandleManagerTestBase { number = 1.0, creationTimestamp = creationTime ) - val handle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val handle = writeHandleManagerImpl.createCollectionHandle(entitySpec = TestParticle_Entities) handle.dispatchStore(entity) assertThat(handle.dispatchFetchAll()).containsExactly(entity) @@ -996,14 +1001,17 @@ open class HandleManagerTestBase { // Arrange: reference handle. val friendsStorageKey = ReferenceModeStorageKey(backingKey, RamDiskStorageKey("friends")) val friendsHandle = - writeHandleManager.createCollectionHandle(storageKey = friendsStorageKey, entitySpec = Friend) + writeHandleManagerImpl.createCollectionHandle( + storageKey = friendsStorageKey, + entitySpec = Friend + ) val friend1 = Friend("1") friendsHandle.dispatchStore(friend1) // Arrange: entity handle. - val writeHandle = writeHandleManager.createCollectionHandle() - val readHandle = readHandleManager.createCollectionHandle() - val monitorHandle = monitorHandleManager.createCollectionHandle() + val writeHandle = writeHandleManagerImpl.createCollectionHandle() + val readHandle = readHandleManagerImpl.createCollectionHandle() + val monitorHandle = monitorHandleManagerImpl.createCollectionHandle() val monitorInitialized = monitorHandle.onUpdateDeferred { it.size() == 1 } val readUpdated = readHandle.onUpdateDeferred { it.size() == 1 } @@ -1031,16 +1039,16 @@ open class HandleManagerTestBase { CollectionType(EntityType(Hat.SCHEMA)), Hat ) - val hatCollection = writeHandleManager.createHandle( + val hatCollection = writeHandleManagerImpl.createHandle( hatSpec, hatCollectionKey ).awaitReady() as ReadWriteCollectionHandle - val hatMonitor = monitorHandleManager.createHandle( + val hatMonitor = monitorHandleManagerImpl.createHandle( hatSpec, hatCollectionKey ).awaitReady() as ReadWriteCollectionHandle - val writeHandle = writeHandleManager.createCollectionHandle() - val readHandle = readHandleManager.createCollectionHandle() + val writeHandle = writeHandleManagerImpl.createCollectionHandle() + val readHandle = readHandleManagerImpl.createCollectionHandle() val fez = Hat(entityId = "fez-id", style = "fez") val hatMonitorKnows = hatMonitor.onUpdateDeferred { @@ -1081,9 +1089,9 @@ open class HandleManagerTestBase { @Test fun collection_noTTL() = testRunner { - val monitor = monitorHandleManager.createCollectionHandle() - val handle = writeHandleManager.createCollectionHandle() - val handleB = readHandleManager.createCollectionHandle() + val monitor = monitorHandleManagerImpl.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle() + val handleB = readHandleManagerImpl.createCollectionHandle() val handleBChanged = handleB.onUpdateDeferred() val monitorNotified = monitor.onUpdateDeferred() @@ -1102,8 +1110,8 @@ open class HandleManagerTestBase { @Test fun collection_withTTL() = testRunner { fakeTime.millis = 0 - val handle = writeHandleManager.createCollectionHandle(ttl = Ttl.Days(2)) - val handleB = readHandleManager.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle(ttl = Ttl.Days(2)) + val handleB = readHandleManagerImpl.createCollectionHandle() var handleBChanged = handleB.onUpdateDeferred() handle.dispatchStore(entity1) handleBChanged.join() @@ -1112,7 +1120,7 @@ open class HandleManagerTestBase { assertThat(readBack.creationTimestamp).isEqualTo(0) assertThat(readBack.expirationTimestamp).isEqualTo(2 * 24 * 3600 * 1000) - val handleC = readHandleManager.createCollectionHandle(ttl = Ttl.Minutes(2)) + val handleC = readHandleManagerImpl.createCollectionHandle(ttl = Ttl.Minutes(2)) handleBChanged = handleB.onUpdateDeferred() handleC.dispatchStore(entity2) handleBChanged.join() @@ -1129,8 +1137,8 @@ open class HandleManagerTestBase { @Test fun referenceCollection_withTtl() = testRunner { fakeTime.millis = 0 - val entityHandle = writeHandleManager.createCollectionHandle() - val refHandle = writeHandleManager.createReferenceCollectionHandle(ttl = Ttl.Minutes(2)) + val entityHandle = writeHandleManagerImpl.createCollectionHandle() + val refHandle = writeHandleManagerImpl.createReferenceCollectionHandle(ttl = Ttl.Minutes(2)) // Create and store an entity with no TTL. val updated = entityHandle.onUpdateDeferred() @@ -1152,8 +1160,12 @@ open class HandleManagerTestBase { @Test fun collection_addingToA_showsUpInQueryOnB() = testRunner { - val writeHandle = writeHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) - val readHandle = readHandleManager.createCollectionHandle(entitySpec = TestParticle_Entities) + val writeHandle = writeHandleManagerImpl.createCollectionHandle( + entitySpec = TestParticle_Entities + ) + val readHandle = readHandleManagerImpl.createCollectionHandle( + entitySpec = TestParticle_Entities + ) val entity1 = TestParticle_Entities(text = "21.0") val entity2 = TestParticle_Entities(text = "22.0") val readUpdatedTwice = readHandle.onUpdateDeferred { it.size() == 2 } @@ -1176,7 +1188,7 @@ open class HandleManagerTestBase { entityId = "doctor1", coolnessIndex = CoolnessIndex(pairsOfShoesOwned = 0, isCool = false) ) - val handle = writeHandleManager.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle() handle.dispatchStore(entity1, entity2) assertThat(handle.dispatchFetchAll()).containsExactly(entity1, entity2) @@ -1189,7 +1201,7 @@ open class HandleManagerTestBase { @Test @Ignore("Need to patch ExpressionEvaluator to check types") fun collection_queryWithInvalidQueryThrows() = testRunner { - val handle = writeHandleManager.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle() handle.dispatchStore(entity1, entity2) assertThat(handle.dispatchFetchAll()).containsExactly(entity1, entity2) @@ -1204,8 +1216,8 @@ open class HandleManagerTestBase { @Test fun collection_referenceLiveness() = testRunner { // Create and store some entities. - val writeEntityHandle = writeHandleManager.createCollectionHandle() - val monitorHandle = monitorHandleManager.createCollectionHandle() + val writeEntityHandle = writeHandleManagerImpl.createCollectionHandle() + val monitorHandle = monitorHandleManagerImpl.createCollectionHandle() monitorHandle.onUpdate { log("Monitor Handle: $it") } @@ -1222,8 +1234,8 @@ open class HandleManagerTestBase { // Create a store a reference to the entity. val entity1Ref = writeEntityHandle.dispatchCreateReference(entity1) val entity2Ref = writeEntityHandle.dispatchCreateReference(entity2) - val writeRefHandle = writeHandleManager.createReferenceCollectionHandle() - val readRefHandle = readHandleManager.createReferenceCollectionHandle() + val writeRefHandle = writeHandleManagerImpl.createReferenceCollectionHandle() + val readRefHandle = readHandleManagerImpl.createReferenceCollectionHandle() val refWritesHappened = readRefHandle.onUpdateDeferred { log("References created so far: $it") it.size() == 2 @@ -1282,7 +1294,7 @@ open class HandleManagerTestBase { @Test fun collection_referenceHandle_referenceModeNotSupported() = testRunner { val e = assertSuspendingThrows(IllegalArgumentException::class) { - writeHandleManager.createReferenceCollectionHandle( + writeHandleManagerImpl.createReferenceCollectionHandle( ReferenceModeStorageKey( backingKey = backingKey, storageKey = collectionRefKey @@ -1296,7 +1308,7 @@ open class HandleManagerTestBase { @Test fun arcsStrictMode_handle_operation_fails() = testRunner { - val handle = writeHandleManager.createCollectionHandle() + val handle = writeHandleManagerImpl.createCollectionHandle() ArcsStrictMode.enableStrictHandlesForTest { assertFailsWith { handle.clear() @@ -1304,7 +1316,7 @@ open class HandleManagerTestBase { } } - private suspend fun EntityHandleManager.createSingletonHandle( + private suspend fun HandleManagerImpl.createSingletonHandle( storageKey: StorageKey = singletonKey, name: String = "singletonWriteHandle", ttl: Ttl = Ttl.Infinite(), @@ -1320,19 +1332,19 @@ open class HandleManagerTestBase { ttl ).awaitReady() as ReadWriteSingletonHandle - private suspend fun EntityHandleManager.createSingletonHandle( + private suspend fun HandleManagerImpl.createSingletonHandle( storageKey: StorageKey = singletonKey, name: String = "singletonWriteHandle", ttl: Ttl = Ttl.Infinite() ) = createSingletonHandle(storageKey, name, ttl, Person) - private suspend fun EntityHandleManager.createCollectionHandle( + private suspend fun HandleManagerImpl.createCollectionHandle( storageKey: StorageKey = collectionKey, name: String = "collectionReadHandle", ttl: Ttl = Ttl.Infinite() ) = createCollectionHandle(storageKey, name, ttl, Person) - private suspend fun EntityHandleManager.createCollectionHandle( + private suspend fun HandleManagerImpl.createCollectionHandle( storageKey: StorageKey = collectionKey, name: String = "collectionReadHandle", ttl: Ttl = Ttl.Infinite(), @@ -1348,7 +1360,7 @@ open class HandleManagerTestBase { ttl ).awaitReady() as ReadWriteQueryCollectionHandle - private suspend fun EntityHandleManager.createReferenceSingletonHandle( + private suspend fun HandleManagerImpl.createReferenceSingletonHandle( storageKey: StorageKey = singletonRefKey, name: String = "referenceSingletonWriteHandle", ttl: Ttl = Ttl.Infinite() @@ -1363,7 +1375,7 @@ open class HandleManagerTestBase { ttl ).awaitReady() as ReadWriteSingletonHandle> - private suspend fun EntityHandleManager.createReferenceCollectionHandle( + private suspend fun HandleManagerImpl.createReferenceCollectionHandle( storageKey: StorageKey = collectionRefKey, name: String = "referenceCollectionReadHandle", ttl: Ttl = Ttl.Infinite() diff --git a/javatests/arcs/core/entity/ReferenceTest.kt b/javatests/arcs/core/entity/ReferenceTest.kt index 608df5de4af..66c27d86c4c 100644 --- a/javatests/arcs/core/entity/ReferenceTest.kt +++ b/javatests/arcs/core/entity/ReferenceTest.kt @@ -8,7 +8,7 @@ import arcs.core.data.Schema import arcs.core.data.SchemaRegistry import arcs.core.entity.testutil.DummyEntity import arcs.core.entity.testutil.InlineDummyEntity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.RawEntityDereferencer import arcs.core.storage.Reference as StorageReference import arcs.core.storage.api.DriverAndKeyConfigurator @@ -42,7 +42,7 @@ class ReferenceTest { private lateinit var scheduler: Scheduler private lateinit var dereferencer: RawEntityDereferencer - private lateinit var entityHandleManager: EntityHandleManager + private lateinit var handleManagerImpl: HandleManagerImpl private lateinit var handle: ReadWriteCollectionHandle private val STORAGE_KEY = ReferenceModeStorageKey( @@ -60,7 +60,7 @@ class ReferenceTest { scheduler = Scheduler(Executors.newSingleThreadExecutor().asCoroutineDispatcher()) val storageEndpointManager = testStorageEndpointManager() dereferencer = RawEntityDereferencer(DummyEntity.SCHEMA, storageEndpointManager) - entityHandleManager = EntityHandleManager( + handleManagerImpl = HandleManagerImpl( "testArc", "", FakeTime(), @@ -69,7 +69,7 @@ class ReferenceTest { foreignReferenceChecker = ForeignReferenceCheckerImpl(emptyMap()) ) - handle = entityHandleManager.createHandle( + handle = handleManagerImpl.createHandle( HandleSpec( "testHandle", HandleMode.ReadWrite, @@ -83,7 +83,7 @@ class ReferenceTest { @After fun tearDown() = runTest { scheduler.waitForIdle() - entityHandleManager.close() + handleManagerImpl.close() scheduler.cancel() SchemaRegistry.clearForTest() diff --git a/javatests/arcs/core/entity/SameHandleManagerTest.kt b/javatests/arcs/core/entity/SameHandleManagerTest.kt index 62ade5caee4..e8caca130b5 100644 --- a/javatests/arcs/core/entity/SameHandleManagerTest.kt +++ b/javatests/arcs/core/entity/SameHandleManagerTest.kt @@ -1,6 +1,6 @@ package arcs.core.entity -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.storage.testutil.testStorageEndpointManager import org.junit.After import org.junit.Before @@ -14,7 +14,7 @@ class SameHandleManagerTest : HandleManagerTestBase() { override fun setUp() { super.setUp() monitorStorageEndpointManager = testStorageEndpointManager() - readHandleManager = EntityHandleManager( + readHandleManagerImpl = HandleManagerImpl( arcId = "testArc", hostId = "testHost", time = fakeTime, @@ -22,7 +22,7 @@ class SameHandleManagerTest : HandleManagerTestBase() { storageEndpointManager = testStorageEndpointManager(), foreignReferenceChecker = foreignReferenceChecker ) - writeHandleManager = readHandleManager + writeHandleManagerImpl = readHandleManagerImpl } @After diff --git a/javatests/arcs/core/host/HandleAdapterTest.kt b/javatests/arcs/core/host/HandleAdapterTest.kt index b867d3f11fe..273df8ce7cc 100644 --- a/javatests/arcs/core/host/HandleAdapterTest.kt +++ b/javatests/arcs/core/host/HandleAdapterTest.kt @@ -67,8 +67,8 @@ class HandleAdapterTest { @get:Rule val log = LogRule() - private lateinit var manager: EntityHandleManager - private lateinit var monitorManager: EntityHandleManager + private lateinit var managerImpl: HandleManagerImpl + private lateinit var monitorManagerImpl: HandleManagerImpl private val idGenerator = Id.Generator.newForTest("session") private lateinit var schedulerProvider: SimpleSchedulerProvider private lateinit var scheduler: Scheduler @@ -79,7 +79,7 @@ class HandleAdapterTest { DriverAndKeyConfigurator.configure(null) schedulerProvider = SimpleSchedulerProvider(EmptyCoroutineContext) scheduler = schedulerProvider("tests") - manager = EntityHandleManager( + managerImpl = HandleManagerImpl( "testArc", "", FakeTime(), @@ -87,7 +87,7 @@ class HandleAdapterTest { storageEndpointManager = testStorageEndpointManager(), foreignReferenceChecker = ForeignReferenceCheckerImpl(emptyMap()) ) - monitorManager = EntityHandleManager( + monitorManagerImpl = HandleManagerImpl( "testArc", "", FakeTime(), @@ -99,14 +99,14 @@ class HandleAdapterTest { @After fun tearDown() = runBlocking { - manager.close() - monitorManager.close() + managerImpl.close() + monitorManagerImpl.close() schedulerProvider.cancelAll() } @Test fun singletonHandleAdapter_readOnlyCantWrite() = runTest { - val readOnlyHandle = manager.createHandle( + val readOnlyHandle = managerImpl.createHandle( HandleSpec( READ_ONLY_HANDLE, HandleMode.Read, @@ -123,7 +123,7 @@ class HandleAdapterTest { @Test fun singletonHandleAdapter_writeOnlyCantRead() = runTest { - val writeOnlyHandle = manager.createHandle( + val writeOnlyHandle = managerImpl.createHandle( HandleSpec( WRITE_ONLY_HANDLE, HandleMode.Write, @@ -140,7 +140,7 @@ class HandleAdapterTest { @Test fun singletonHandleAdapter_createReference() = runTest { val handle = ( - manager.createHandle( + managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWrite, @@ -179,7 +179,7 @@ class HandleAdapterTest { @Test fun singleton_noOpsAfterClose() = runTest { - val handle = manager.createHandle( + val handle = managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWrite, @@ -199,7 +199,7 @@ class HandleAdapterTest { @Test fun collectionHandleAdapter_readOnlyCantWrite() = runTest { - val readOnlyHandle = manager.createHandle( + val readOnlyHandle = managerImpl.createHandle( HandleSpec( READ_ONLY_HANDLE, HandleMode.Read, @@ -216,7 +216,7 @@ class HandleAdapterTest { @Test fun collectionHandleAdapter_writeOnlyCantRead() = runTest { - val writeOnlyHandle = manager.createHandle( + val writeOnlyHandle = managerImpl.createHandle( HandleSpec( WRITE_ONLY_HANDLE, HandleMode.Write, @@ -233,7 +233,7 @@ class HandleAdapterTest { @Test fun collectionHandleAdapter_createReference() = runTest { - val handle = manager.createHandle( + val handle = managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWrite, @@ -242,7 +242,7 @@ class HandleAdapterTest { ), STORAGE_KEY ) as ReadWriteCollectionHandle - val monitorHandle = monitorManager.createHandle( + val monitorHandle = monitorManagerImpl.createHandle( HandleSpec( READ_ONLY_HANDLE, HandleMode.ReadWrite, @@ -291,7 +291,7 @@ class HandleAdapterTest { @Test fun collection_noOpsAfterClose() = runTest { - val handle = manager.createHandle( + val handle = managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWriteQuery, diff --git a/javatests/arcs/core/host/LifecycleTest.kt b/javatests/arcs/core/host/LifecycleTest.kt index 8699d63f08d..5db61fe95ff 100644 --- a/javatests/arcs/core/host/LifecycleTest.kt +++ b/javatests/arcs/core/host/LifecycleTest.kt @@ -50,7 +50,7 @@ class LifecycleTest { private lateinit var scheduler: Scheduler private lateinit var testHost: TestingHost private lateinit var hostRegistry: HostRegistry - private lateinit var entityHandleManager: EntityHandleManager + private lateinit var handleManagerImpl: HandleManagerImpl private lateinit var allocator: Allocator private val testScope = TestCoroutineScope() @@ -78,13 +78,13 @@ class LifecycleTest { ::StartupTimeoutParticle.toRegistration() ) hostRegistry = ExplicitHostRegistry().also { it.registerHost(testHost) } - entityHandleManager = EntityHandleManager( + handleManagerImpl = HandleManagerImpl( time = FakeTime(), scheduler = scheduler, storageEndpointManager = testStorageEndpointManager(), foreignReferenceChecker = ForeignReferenceCheckerImpl(emptyMap()) ) - allocator = Allocator.create(hostRegistry, entityHandleManager, testScope) + allocator = Allocator.create(hostRegistry, handleManagerImpl, testScope) testHost.setup() } @@ -92,7 +92,7 @@ class LifecycleTest { fun tearDown() = runBlocking { try { scheduler.waitForIdle() - entityHandleManager.close() + handleManagerImpl.close() } finally { schedulerProvider.cancelAll() } diff --git a/javatests/arcs/core/host/ReflectiveParticleConstructionTest.kt b/javatests/arcs/core/host/ReflectiveParticleConstructionTest.kt index d1937c39a22..5d5fc2ceaf5 100644 --- a/javatests/arcs/core/host/ReflectiveParticleConstructionTest.kt +++ b/javatests/arcs/core/host/ReflectiveParticleConstructionTest.kt @@ -81,7 +81,7 @@ class ReflectiveParticleConstructionTest { val allocator = Allocator.create( hostRegistry, - EntityHandleManager( + HandleManagerImpl( time = FakeTime(), scheduler = schedulerProvider("allocator"), storageEndpointManager = testStorageEndpointManager(), diff --git a/javatests/arcs/sdk/HandleUtilsTest.kt b/javatests/arcs/sdk/HandleUtilsTest.kt index 61c22f1675c..9626e1bc784 100644 --- a/javatests/arcs/sdk/HandleUtilsTest.kt +++ b/javatests/arcs/sdk/HandleUtilsTest.kt @@ -18,7 +18,7 @@ import arcs.core.entity.ForeignReferenceCheckerImpl import arcs.core.entity.HandleSpec import arcs.core.entity.ReadWriteSingletonHandle import arcs.core.entity.awaitReady -import arcs.core.host.EntityHandleManager +import arcs.core.host.HandleManagerImpl import arcs.core.host.HandleMode import arcs.core.storage.StorageKey import arcs.core.storage.api.DriverAndKeyConfigurator @@ -55,14 +55,14 @@ class HandleUtilsTest { val log = LogRule() private lateinit var scheduler: Scheduler - private lateinit var manager: EntityHandleManager + private lateinit var managerImpl: HandleManagerImpl @Before fun setUp() = runBlocking { RamDisk.clear() DriverAndKeyConfigurator.configure(null) scheduler = Scheduler(Executors.newSingleThreadExecutor().asCoroutineDispatcher() + Job()) - manager = EntityHandleManager( + managerImpl = HandleManagerImpl( arcId = "testArc", hostId = "testHost", time = FakeTime(), @@ -75,7 +75,7 @@ class HandleUtilsTest { @After fun tearDown() = runBlocking { scheduler.waitForIdle() - manager.close() + managerImpl.close() scheduler.cancel() } @@ -339,7 +339,7 @@ class HandleUtilsTest { private suspend fun createCollectionHandle( storageKey: StorageKey - ) = manager.createHandle( + ) = managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWriteQuery, @@ -351,7 +351,7 @@ class HandleUtilsTest { private suspend fun createSingletonHandle( storageKey: StorageKey - ) = manager.createHandle( + ) = managerImpl.createHandle( HandleSpec( READ_WRITE_HANDLE, HandleMode.ReadWrite, From d5242e852e179e160b4ef932870fc6c1ff36d838 Mon Sep 17 00:00:00 2001 From: Yuan Gu Date: Tue, 8 Dec 2020 15:24:36 -0800 Subject: [PATCH 24/31] Add more unit tests for DirectStore. PiperOrigin-RevId: 346422565 --- java/arcs/core/storage/ActiveStore.kt | 1 - java/arcs/core/storage/DirectStore.kt | 3 - .../storage/testutil/TestStoreWriteBack.kt | 21 ++++ .../{StoreTest.kt => DirectStoreTest.kt} | 96 ++++++++++++++++++- 4 files changed, 114 insertions(+), 7 deletions(-) create mode 100644 java/arcs/core/storage/testutil/TestStoreWriteBack.kt rename javatests/arcs/core/storage/{StoreTest.kt => DirectStoreTest.kt} (77%) diff --git a/java/arcs/core/storage/ActiveStore.kt b/java/arcs/core/storage/ActiveStore.kt index 8323f39617d..48cc390e3f7 100644 --- a/java/arcs/core/storage/ActiveStore.kt +++ b/java/arcs/core/storage/ActiveStore.kt @@ -29,7 +29,6 @@ abstract class ActiveStore( ) : IStore { override val storageKey: StorageKey = options.storageKey override val type: Type = options.type - open val versionToken: String? = options.versionToken /** Suspends until all pending operations are complete. */ abstract suspend fun idle() diff --git a/java/arcs/core/storage/DirectStore.kt b/java/arcs/core/storage/DirectStore.kt index 4f4da466ab1..222030403ec 100644 --- a/java/arcs/core/storage/DirectStore.kt +++ b/java/arcs/core/storage/DirectStore.kt @@ -56,9 +56,6 @@ class DirectStore /* internal */ constru private val writeBack: WriteBack, private val devTools: DevToolsForDirectStore? ) : ActiveStore(options) { - override val versionToken: String? - get() = driver.token - // TODO(#5551): Consider including a hash of state.value and storage key in log prefix. private val log = TaggedLog { "DirectStore" } diff --git a/java/arcs/core/storage/testutil/TestStoreWriteBack.kt b/java/arcs/core/storage/testutil/TestStoreWriteBack.kt new file mode 100644 index 00000000000..6b295005233 --- /dev/null +++ b/java/arcs/core/storage/testutil/TestStoreWriteBack.kt @@ -0,0 +1,21 @@ +package arcs.core.storage.testutil + +import arcs.core.storage.StoreWriteBack +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.channels.Channel + +/** Test [StoreWriteBack] implementation for use in unit tests. */ +class TestStoreWriteBack(protocol: String, scope: CoroutineScope) : StoreWriteBack( + protocol = protocol, + queueSize = Channel.UNLIMITED, + forceEnable = false, + scope = scope +) { + var closed: Boolean = false + private set + + override fun close() { + super.close() + closed = true + } +} diff --git a/javatests/arcs/core/storage/StoreTest.kt b/javatests/arcs/core/storage/DirectStoreTest.kt similarity index 77% rename from javatests/arcs/core/storage/StoreTest.kt rename to javatests/arcs/core/storage/DirectStoreTest.kt index cbc281e6cd6..ca8d690f392 100644 --- a/javatests/arcs/core/storage/StoreTest.kt +++ b/javatests/arcs/core/storage/DirectStoreTest.kt @@ -28,6 +28,7 @@ import arcs.core.data.SchemaFields import arcs.core.data.util.toReferencable import arcs.core.storage.testutil.DummyStorageKey import arcs.core.storage.testutil.FakeDriverProvider +import arcs.core.storage.testutil.TestStoreWriteBack import arcs.core.storage.testutil.testWriteBackProvider import com.google.common.truth.Truth.assertThat import kotlinx.atomicfu.atomic @@ -35,18 +36,35 @@ import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.Job +import kotlinx.coroutines.test.TestCoroutineDispatcher +import kotlinx.coroutines.test.TestCoroutineScope import kotlinx.coroutines.test.runBlockingTest import org.junit.Assert.fail +import org.junit.Before import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -/** Tests for [ActiveStore]. */ +/** Tests for [DirectStore]. */ @Suppress("UNCHECKED_CAST", "UNUSED_VARIABLE") @OptIn(ExperimentalCoroutinesApi::class) @RunWith(JUnit4::class) -class StoreTest { +class DirectStoreTest { val testKey: StorageKey = DummyStorageKey("key") + lateinit var testScope: TestCoroutineScope + lateinit var testStoreWriteBack: TestStoreWriteBack + lateinit var testWriteBackProvider: WriteBackProvider + + @Before + fun setup() { + testScope = TestCoroutineScope(TestCoroutineDispatcher()) + testWriteBackProvider = object : WriteBackProvider { + override fun invoke(protocol: Protocol): WriteBack { + this@DirectStoreTest.testStoreWriteBack = TestStoreWriteBack(protocol, testScope) + return this@DirectStoreTest.testStoreWriteBack + } + } + } @Test(expected = CrdtException::class) fun throws_ifAppropriateDriverCantBeFound() = runBlockingTest { @@ -133,6 +151,58 @@ class StoreTest { assertThat(sentSyncRequest.value).isTrue() } + @Test + fun sendSyncRequest_fromInvalidOperation() = runBlockingTest { + val (driver, driverProvider) = setupFakes() + val store = createStore(driverProvider) as DirectStore + + val deferred = CompletableDeferred(coroutineContext[Job]) + val callback: ProxyCallback = { message -> + // Invalid ops will only cause a SyncRequest so any other message is considered unexpected. + when (message) { + is ProxyMessage.SyncRequest -> { + deferred.complete(Unit) + } + else -> { + deferred.completeExceptionally(AssertionError("Shouldn't ever get here.")) + } + } + } + + // Set up the callback + val cbid = store.on(callback) + + // Send an invalid message with wrong VersionInfo, which should result in + // a SyncRequest on the callback + store.onProxyMessage(ProxyMessage.Operations(listOf(Increment("me", 1 to 2)), cbid)) + + // Wait for our deferred to be completed. + deferred.await() + } + + @Test + fun doesntSendResponse_to_offProxy() = runBlockingTest { + val (_, driverProvider) = setupFakes() + + val store = createStore(driverProvider) + + val listener1Finished = CompletableDeferred(coroutineContext[Job]) + val id1 = store.on { message -> + assertThat(message).isInstanceOf(ProxyMessage.ModelUpdate::class.java) + listener1Finished.complete(Unit) + } + val id2 = store.on { + // id2 will be turned off so this callback shouldn't be ever called. + fail("This callback should not be called.") + } + store.off(id2) + + store.onProxyMessage(ProxyMessage.SyncRequest(id2)) + store.onProxyMessage(ProxyMessage.SyncRequest(id1)) + + listener1Finished.await() + } + @Test fun sendsAModelResponse_onlyTo_theRequestingProxy() = runBlockingTest { val (_, driverProvider) = setupFakes() @@ -311,6 +381,21 @@ class StoreTest { assertThat(activeStore.getLocalData()).isEqualTo(driver.lastData) } + @Test + fun close_setClosed_andCloseDriver() = runBlockingTest { + val (driver, driverProvider) = setupFakes() + + val activeStore = createStore(driverProvider) as DirectStore + assertThat(activeStore.closed).isFalse() + assertThat(driver.closed).isFalse() + assertThat(testStoreWriteBack.closed).isFalse() + + activeStore.close() + assertThat(activeStore.closed).isTrue() + assertThat(driver.closed).isTrue() + assertThat(testStoreWriteBack.closed).isTrue() + } + private fun setupFakes(): Pair, FakeDriverProvider> { val fakeDriver = FakeDriver() val fakeProvider = FakeDriverProvider(testKey to fakeDriver) @@ -328,7 +413,7 @@ class StoreTest { StoreOptions(testKey, CountType()), this, FixedDriverFactory(*providers), - ::testWriteBackProvider, + this@DirectStoreTest.testWriteBackProvider, null ) @@ -342,6 +427,7 @@ class StoreTest { var lastReceiver: (suspend (data: T, version: Int) -> Unit)? = null var lastData: T? = null var lastVersion: Int? = null + var closed: Boolean = false override suspend fun registerReceiver( token: String?, @@ -356,5 +442,9 @@ class StoreTest { lastVersion = version return doOnSend?.invoke(data, version) ?: sendReturnValue } + + override suspend fun close() { + closed = true + } } } From f1092db4e6ff1f238467582c4d8e906d93789028 Mon Sep 17 00:00:00 2001 From: Alex Rosengarten Date: Tue, 8 Dec 2020 16:34:12 -0800 Subject: [PATCH 25/31] Caching RawEntity hashCode to improve overhead. PiperOrigin-RevId: 346436643 --- java/arcs/core/data/RawEntity.kt | 37 ++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/java/arcs/core/data/RawEntity.kt b/java/arcs/core/data/RawEntity.kt index 908cf29b435..edace32cfee 100644 --- a/java/arcs/core/data/RawEntity.kt +++ b/java/arcs/core/data/RawEntity.kt @@ -42,6 +42,9 @@ data class RawEntity( } ) + // Cached `hashCode` value. + private var hashCode: Int = UNINITIALIZED_HASH + /** Iterates over of all field data (both singletons and collections). */ val allData: Sequence> get() = sequence { @@ -64,8 +67,42 @@ data class RawEntity( expirationTimestamp ) + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as RawEntity + + if (id != other.id) return false + if (singletons != other.singletons) return false + if (collections != other.collections) return false + if (creationTimestamp != other.creationTimestamp) return false + if (expirationTimestamp != other.expirationTimestamp) return false + + return true + } + + /** Computes and caches `hashCode`. */ + override fun hashCode(): Int { + if (UNINITIALIZED_HASH == hashCode) { + var result = id.hashCode() + result = 31 * result + singletons.hashCode() + result = 31 * result + collections.hashCode() + result = 31 * result + creationTimestamp.hashCode() + result = 31 * result + expirationTimestamp.hashCode() + + // If the hash happens to be the sentinel value, choose a different value. + if (UNINITIALIZED_HASH == result) { + result = 1 + } + hashCode = result + } + return hashCode + } + companion object { const val NO_REFERENCE_ID = "NO REFERENCE ID" const val UNINITIALIZED_TIMESTAMP: Long = -1 + const val UNINITIALIZED_HASH: Int = 0 } } From 408388c6f157f7b4118cf0f95d38964c6b108578 Mon Sep 17 00:00:00 2001 From: Sarah Heimlich Date: Tue, 8 Dec 2020 18:36:05 -0800 Subject: [PATCH 26/31] Add error message for CollectionHandle init (#6690) Add error message for CollectionHandle init Closes #6690 COPYBARA_INTEGRATE_REVIEW=https://github.com/polymerlabs/arcs/pull/6690 from SHeimlich:collectionHandle dd7e309d7d188d6bccf7a7d196a58e412c4f0772 PiperOrigin-RevId: 346455417 --- java/arcs/core/entity/CollectionHandle.kt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/java/arcs/core/entity/CollectionHandle.kt b/java/arcs/core/entity/CollectionHandle.kt index 466950d58bf..2b35c45597c 100644 --- a/java/arcs/core/entity/CollectionHandle.kt +++ b/java/arcs/core/entity/CollectionHandle.kt @@ -45,7 +45,9 @@ class CollectionHandle( private val storageAdapter = config.storageAdapter init { - check(spec.containerType == HandleContainerType.Collection) + check(spec.containerType == HandleContainerType.Collection) { + "Collection containerType required for CollectionHandle $name, but got ${spec.containerType}." + } } // Filter out expired models. From 0557c0b14fed858eec9ad0a65ffecd6f6ee5d43c Mon Sep 17 00:00:00 2001 From: Maria Mandlis Date: Tue, 8 Dec 2020 18:36:37 -0800 Subject: [PATCH 27/31] Refactor mock helper methods from the Handle tests into testutil/ PiperOrigin-RevId: 346455476 --- java/arcs/core/entity/testutil/BUILD | 39 ++++++++- .../arcs/core/entity/testutil/StorageMocks.kt | 52 ++++++++++++ javatests/arcs/core/entity/BUILD | 2 +- javatests/arcs/core/entity/BaseHandleTest.kt | 55 ++++-------- .../arcs/core/entity/CollectionHandleTest.kt | 41 +++------ .../arcs/core/entity/SingletonHandleTest.kt | 84 ++++++++++--------- 6 files changed, 162 insertions(+), 111 deletions(-) create mode 100644 java/arcs/core/entity/testutil/StorageMocks.kt diff --git a/java/arcs/core/entity/testutil/BUILD b/java/arcs/core/entity/testutil/BUILD index 1e8150093f6..11885d620ee 100644 --- a/java/arcs/core/entity/testutil/BUILD +++ b/java/arcs/core/entity/testutil/BUILD @@ -4,24 +4,61 @@ licenses(["notice"]) package(default_visibility = ["//visibility:public"]) +MOCK_SRCS = [ + "StorageMocks.kt", +] + arcs_kt_jvm_library( name = "testutil", testonly = True, - srcs = glob(["*.kt"]), + srcs = glob( + ["*.kt"], + exclude = MOCK_SRCS, + ), + exports = [":fixture_arcs_gen"], + deps = [ + ":fixture_arcs_gen", + "//java/arcs/core/common", + "//java/arcs/core/crdt", + "//java/arcs/core/data:annotations", + "//java/arcs/core/data:data-kt", + "//java/arcs/core/data:rawentity", + "//java/arcs/core/data:schema_fields", + "//java/arcs/core/entity", + "//java/arcs/core/storage:reference", + "//java/arcs/core/storage:storage-kt", + "//java/arcs/core/storage:storage_key", + "//java/arcs/core/storage/testutil", + "//java/arcs/core/util", + "//java/arcs/core/util:utils-platform-dependencies", + ], +) + +arcs_kt_jvm_library( + name = "testutil_mock", + testonly = True, + srcs = MOCK_SRCS, + add_android_constraints = False, exports = [":fixture_arcs_gen"], deps = [ ":fixture_arcs_gen", + ":testutil", "//java/arcs/core/common", + "//java/arcs/core/crdt", "//java/arcs/core/data:annotations", "//java/arcs/core/data:data-kt", "//java/arcs/core/data:rawentity", "//java/arcs/core/data:schema_fields", "//java/arcs/core/entity", "//java/arcs/core/storage:reference", + "//java/arcs/core/storage:storage-kt", "//java/arcs/core/storage:storage_key", "//java/arcs/core/storage/testutil", "//java/arcs/core/util", "//java/arcs/core/util:utils-platform-dependencies", + "//third_party/java/mockito", + "//third_party/kotlin/kotlinx_coroutines", + "//third_party/kotlin/mockito_kotlin", ], ) diff --git a/java/arcs/core/entity/testutil/StorageMocks.kt b/java/arcs/core/entity/testutil/StorageMocks.kt new file mode 100644 index 00000000000..b49af18a466 --- /dev/null +++ b/java/arcs/core/entity/testutil/StorageMocks.kt @@ -0,0 +1,52 @@ +/* + * Copyright 2020 Google LLC. + * + * This code may only be used under the BSD style license found at + * http://polymer.github.io/LICENSE.txt + * + * Code distributed by Google as part of this project is also subject to an additional IP rights + * grant found at + * http://polymer.github.io/PATENTS.txt + */ +package arcs.core.entity.testutil + +import arcs.core.crdt.VersionMap +import arcs.core.entity.CollectionProxy +import arcs.core.entity.SingletonProxy +import arcs.core.entity.StorageAdapter +import com.nhaarman.mockitokotlin2.any +import com.nhaarman.mockitokotlin2.mock +import kotlinx.coroutines.CompletableDeferred + +fun mockStorageAdapter(): StorageAdapter { + return mock { + on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } + } +} + +fun mockSingletonStorageProxy(): SingletonProxy { + val proxyVersionMap = VersionMap() + return mock { + on { getVersionMap() }.then { proxyVersionMap } + on { applyOp(any()) }.then { CompletableDeferred(true) } + on { applyOps(any()) }.then { CompletableDeferred(true) } + on { prepareForSync() }.then { Unit } + on { addOnUpdate(any(), any()) }.then { Unit } + on { addOnResync(any(), any()) }.then { Unit } + on { addOnDesync(any(), any()) }.then { Unit } + } +} + +fun mockCollectionStorageProxy(): CollectionProxy { + val proxyVersionMap = VersionMap() + return mock { + on { getVersionMap() }.then { proxyVersionMap } + on { applyOp(any()) }.then { CompletableDeferred(true) } + on { applyOps(any()) }.then { CompletableDeferred(true) } + on { prepareForSync() }.then { Unit } + on { addOnUpdate(any(), any()) }.then { Unit } + on { addOnResync(any(), any()) }.then { Unit } + on { addOnDesync(any(), any()) }.then { Unit } + } +} diff --git a/javatests/arcs/core/entity/BUILD b/javatests/arcs/core/entity/BUILD index b5ace8fa08c..51d93da4a71 100644 --- a/javatests/arcs/core/entity/BUILD +++ b/javatests/arcs/core/entity/BUILD @@ -21,7 +21,6 @@ arcs_kt_jvm_test_suite( package = "arcs.core.entity", deps = [ ":lib", - ":test_arcs_gen", "//java/arcs/core/common", "//java/arcs/core/crdt", "//java/arcs/core/data", @@ -30,6 +29,7 @@ arcs_kt_jvm_test_suite( "//java/arcs/core/data/util:data-util", "//java/arcs/core/entity", "//java/arcs/core/entity/testutil", + "//java/arcs/core/entity/testutil:testutil_mock", "//java/arcs/core/host", "//java/arcs/core/storage", "//java/arcs/core/storage/api", diff --git a/javatests/arcs/core/entity/BaseHandleTest.kt b/javatests/arcs/core/entity/BaseHandleTest.kt index ddcb0b54ee4..a855f693e24 100644 --- a/javatests/arcs/core/entity/BaseHandleTest.kt +++ b/javatests/arcs/core/entity/BaseHandleTest.kt @@ -1,11 +1,12 @@ package arcs.core.entity -import arcs.core.crdt.VersionMap import arcs.core.data.EntityType import arcs.core.data.HandleMode import arcs.core.data.RawEntity import arcs.core.data.SingletonType import arcs.core.entity.testutil.StorableReferencableEntity +import arcs.core.entity.testutil.mockSingletonStorageProxy +import arcs.core.entity.testutil.mockStorageAdapter import arcs.core.storage.Dereferencer import arcs.core.storage.Reference as StorageReference import arcs.core.storage.StorageProxy.CallbackIdentifier @@ -20,7 +21,6 @@ import com.nhaarman.mockitokotlin2.never import com.nhaarman.mockitokotlin2.verify import com.nhaarman.mockitokotlin2.whenever import kotlin.test.assertFailsWith -import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.CoroutineDispatcher import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.test.runBlockingTest @@ -28,9 +28,6 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -private typealias TestStorageAdapter = - StorageAdapter - private class TestBaseHandle( config: BaseHandleConfig ) : BaseHandle(config) { @@ -40,34 +37,14 @@ private class TestBaseHandle( @OptIn(ExperimentalCoroutinesApi::class) @RunWith(JUnit4::class) class BaseHandleTest { - // TODO(b/175070564): move the mocking methods into testutil. - private fun mockStorageProxy(): SingletonProxy { - val proxyVersionMap = VersionMap() - return mock { - on { getVersionMap() }.then { proxyVersionMap } - on { applyOp(any()) }.then { CompletableDeferred(true) } - on { applyOps(any()) }.then { CompletableDeferred(true) } - on { prepareForSync() }.then { Unit } - on { addOnUpdate(any(), any()) }.then { Unit } - on { addOnResync(any(), any()) }.then { Unit } - on { addOnDesync(any(), any()) }.then { Unit } - } - } - - private fun mockStorageAdapter(): TestStorageAdapter { - return mock { - on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - } - } - private fun createHandle( handleName: String = "defaultHandle", particleName: String = "defaultParticle", type: Type = SingletonType(EntityType(StorableReferencableEntity.SCHEMA)), handleMode: HandleMode = HandleMode.ReadWriteQuery, - proxy: SingletonProxy = mockStorageProxy(), - storageAdapter: TestStorageAdapter = mockStorageAdapter(), + proxy: SingletonProxy = mockSingletonStorageProxy(), + storageAdapter: StorageAdapter = + mockStorageAdapter(), dereferencerFactory: EntityDereferencerFactory = mock() ): TestBaseHandle { val config = SingletonHandle.Config( @@ -100,28 +77,28 @@ class BaseHandleTest { @Test fun init_readSpec_storageProxyPrepareForSync() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() createHandle(handleMode = HandleMode.Read, proxy = proxy) verify(proxy).prepareForSync() } @Test fun init_readWriteSpec_storageProxyPrepareForSync() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() createHandle(handleMode = HandleMode.ReadWrite, proxy = proxy) verify(proxy).prepareForSync() } @Test fun init_writeOnlySpec_storageProxyDoesNotPrepareForSync() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() createHandle(handleMode = HandleMode.Write, proxy = proxy) verify(proxy, never()).prepareForSync() } @Test fun registerForStorageEvents_callStorageProxyRegisterForStorageEvents() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) handle.registerForStorageEvents({}) @@ -132,7 +109,7 @@ class BaseHandleTest { @Test fun maybeInitiateSync_callStorageProxyMaybeInitiateSync() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(proxy = proxy) handle.maybeInitiateSync() @@ -142,7 +119,7 @@ class BaseHandleTest { @Test fun getProxy() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(proxy = proxy) assertThat(handle.getProxy()).isEqualTo(proxy) @@ -150,7 +127,7 @@ class BaseHandleTest { @Test fun dispatcher_returnsStorageProxyDispatcher() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(proxy = proxy) val dispatcher: CoroutineDispatcher = mock {} whenever(proxy.dispatcher).thenReturn(dispatcher) @@ -160,7 +137,7 @@ class BaseHandleTest { @Test fun onReady() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) handle.onReady({}) @@ -187,7 +164,7 @@ class BaseHandleTest { @Test fun unregisterForStorageEvents() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) handle.unregisterForStorageEvents() @@ -197,7 +174,7 @@ class BaseHandleTest { @Test fun close() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(handleName = HANDLE_NAME, particleName = PARTICLE_NAME, proxy = proxy) handle.close() @@ -224,7 +201,7 @@ class BaseHandleTest { @Test fun createReferenceInternal_succeess() { - val proxy = mockStorageProxy() + val proxy = mockSingletonStorageProxy() val handle = createHandle(proxy = proxy) whenever(proxy.storageKey).thenReturn( ReferenceModeStorageKey(RamDiskStorageKey("x"), RamDiskStorageKey("y")) diff --git a/javatests/arcs/core/entity/CollectionHandleTest.kt b/javatests/arcs/core/entity/CollectionHandleTest.kt index f81150fc50d..c0b204f90ae 100644 --- a/javatests/arcs/core/entity/CollectionHandleTest.kt +++ b/javatests/arcs/core/entity/CollectionHandleTest.kt @@ -6,14 +6,13 @@ import arcs.core.data.CollectionType import arcs.core.data.EntityType import arcs.core.data.HandleMode import arcs.core.entity.testutil.StorableReferencableEntity -import com.nhaarman.mockitokotlin2.any +import arcs.core.entity.testutil.mockCollectionStorageProxy +import arcs.core.entity.testutil.mockStorageAdapter import com.nhaarman.mockitokotlin2.eq import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.verify -import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.test.runBlockingTest -import org.junit.Before import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -22,31 +21,11 @@ import org.junit.runners.JUnit4 @Suppress("DeferredResultUnused") @RunWith(JUnit4::class) class CollectionHandleTest { - private lateinit var proxyVersionMap: VersionMap - private lateinit var dereferencerFactory: EntityDereferencerFactory - private lateinit var proxy: CollectionProxy - private lateinit var storageAdapter: - StorageAdapter - private lateinit var handle: - CollectionHandle - - @Before - fun setUp() { - proxyVersionMap = VersionMap() - - proxy = mock { - on { getVersionMap() }.then { proxyVersionMap.copy() } - on { applyOps(any()) }.then { CompletableDeferred(true) } - on { prepareForSync() }.then { Unit } - } - storageAdapter = mock { - on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - } - dereferencerFactory = mock { - // Maybe add mock endpoints here, if needed. - } - + private fun createHandle( + proxy: CollectionProxy = mockCollectionStorageProxy(), + storageAdapter: StorageAdapter = + mockStorageAdapter() + ): CollectionHandle { val config = CollectionHandle.Config( HANDLE_NAME, HandleSpec( @@ -57,14 +36,16 @@ class CollectionHandleTest { ), proxy, storageAdapter, - dereferencerFactory, + mock(), "particle" ) - handle = CollectionHandle(config) + return CollectionHandle(config) } @Test fun storeAll() = runBlockingTest { + val proxy = mockCollectionStorageProxy() + val handle = createHandle(proxy = proxy) val entity1 = StorableReferencableEntity("1") val entity2 = StorableReferencableEntity("2") val entity3 = StorableReferencableEntity("3") diff --git a/javatests/arcs/core/entity/SingletonHandleTest.kt b/javatests/arcs/core/entity/SingletonHandleTest.kt index 64fff8b1226..c55957979ca 100644 --- a/javatests/arcs/core/entity/SingletonHandleTest.kt +++ b/javatests/arcs/core/entity/SingletonHandleTest.kt @@ -7,6 +7,8 @@ import arcs.core.data.EntityType import arcs.core.data.HandleMode import arcs.core.data.SingletonType import arcs.core.entity.testutil.StorableReferencableEntity +import arcs.core.entity.testutil.mockSingletonStorageProxy +import arcs.core.entity.testutil.mockStorageAdapter import arcs.core.storage.StorageProxy.CallbackIdentifier import arcs.core.storage.keys.RamDiskStorageKey import arcs.core.storage.referencemode.ReferenceModeStorageKey @@ -19,7 +21,6 @@ import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.verify import com.nhaarman.mockitokotlin2.whenever import kotlin.test.assertFailsWith -import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.test.runBlockingTest import org.junit.Test @@ -30,31 +31,13 @@ import org.junit.runners.JUnit4 @Suppress("DeferredResultUnused") @RunWith(JUnit4::class) class SingletonHandleTest { - private lateinit var proxy: SingletonProxy - private lateinit var storageAdapter: - StorageAdapter private fun createHandle( - type: Type = SingletonType(EntityType(StorableReferencableEntity.SCHEMA)) + type: Type = SingletonType(EntityType(StorableReferencableEntity.SCHEMA)), + proxy: SingletonProxy = mockSingletonStorageProxy(), + storageAdapter: StorageAdapter = + mockStorageAdapter() ): SingletonHandle { - val proxyVersionMap = VersionMap() - proxy = mock { - on { getVersionMap() }.then { proxyVersionMap } - on { applyOp(any()) }.then { CompletableDeferred(true) } - on { applyOps(any()) }.then { CompletableDeferred(true) } - on { prepareForSync() }.then { Unit } - on { addOnUpdate(any(), any()) }.then { Unit } - on { addOnResync(any(), any()) }.then { Unit } - on { addOnDesync(any(), any()) }.then { Unit } - } - storageAdapter = mock { - on { referencableToStorable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - on { storableToReferencable(any()) }.then { it.arguments[0] as StorableReferencableEntity } - } - val dereferencerFactory: EntityDereferencerFactory = mock { - // Maybe add mock endpoints here, if needed. - } - val config = SingletonHandle.Config( HANDLE_NAME, HandleSpec( @@ -65,7 +48,7 @@ class SingletonHandleTest { ), proxy, storageAdapter, - dereferencerFactory, + mock(), PARTICLE_NAME ) @@ -81,7 +64,8 @@ class SingletonHandleTest { @Test fun onUpdate_proxyAddOnUpdateCalled() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) handle.onUpdate({}) verify(proxy).addOnUpdate(eq(CallbackIdentifier(HANDLE_NAME, PARTICLE_NAME)), any()) @@ -89,7 +73,8 @@ class SingletonHandleTest { @Test fun onUpdate_callbackInput_singletonDelta() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val oldEntity = StorableReferencableEntity("1", "old") val newEntity = StorableReferencableEntity("2", "new") val captor = @@ -107,7 +92,9 @@ class SingletonHandleTest { @Test fun onUpdate_valuesAreAdapted() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val storageAdapter = mockStorageAdapter() + val handle = createHandle(proxy = proxy, storageAdapter = storageAdapter) val oldEntity = StorableReferencableEntity("1", "old") val newEntity = StorableReferencableEntity("2", "new") val captor = @@ -124,8 +111,9 @@ class SingletonHandleTest { @Test fun onDesync_callStorageProxyAddOnDesync() { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val action: () -> Unit = {} - val handle = createHandle() handle.onDesync(action) @@ -134,8 +122,9 @@ class SingletonHandleTest { @Test fun onResync_callStorageProxyAddOnResync() { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val action: () -> Unit = {} - val handle = createHandle() handle.onResync(action) @@ -164,7 +153,8 @@ class SingletonHandleTest { @Test fun createReference_wrongId_throws() = runBlockingTest { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "other-id")) val e = assertFailsWith { @@ -175,7 +165,8 @@ class SingletonHandleTest { @Test fun createReference_notReferenceModeStorageProxy_throws() = runBlockingTest { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "fake-id")) val e = assertFailsWith { @@ -188,7 +179,8 @@ class SingletonHandleTest { @Test fun createReference_success() = runBlockingTest { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "fake-id")) whenever(proxy.storageKey).thenReturn( ReferenceModeStorageKey(RamDiskStorageKey("x"), RamDiskStorageKey("y")) @@ -202,15 +194,19 @@ class SingletonHandleTest { @Test fun fetch_emptyHandle_null() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) + assertThat(handle.fetch()).isNull() + verify(proxy).getParticleViewUnsafe() } @Test fun fetch_initValues_success() { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val entity = StorableReferencableEntity("1", "id") - val handle = createHandle() whenever(proxy.getParticleViewUnsafe()).thenReturn(entity) assertThat(handle.fetch()).isEqualTo(entity) @@ -219,8 +215,10 @@ class SingletonHandleTest { @Test fun fetch_valueViaStorageAdapter_adapted() { + val proxy = mockSingletonStorageProxy() + val storageAdapter = mockStorageAdapter() + val handle = createHandle(proxy = proxy, storageAdapter = storageAdapter) val entity = StorableReferencableEntity("1", "id") - val handle = createHandle() whenever(proxy.getParticleViewUnsafe()).thenReturn(entity) handle.fetch() @@ -238,7 +236,9 @@ class SingletonHandleTest { @Test fun fetch_expiredEntities_filteredOut() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val storageAdapter = mockStorageAdapter() + val handle = createHandle(proxy = proxy, storageAdapter = storageAdapter) whenever(proxy.getParticleViewUnsafe()).thenReturn(StorableReferencableEntity("1", "id")) whenever(storageAdapter.isExpired(any())).thenReturn(true) @@ -249,8 +249,9 @@ class SingletonHandleTest { @Test fun store_validEntity_success() = runBlockingTest { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val entity = StorableReferencableEntity("1") - val handle = createHandle() handle.store(entity).join() @@ -269,8 +270,9 @@ class SingletonHandleTest { @Test fun store_incrementVersionMap() = runBlockingTest { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val entity1 = StorableReferencableEntity("1") - val handle = createHandle() handle.store(entity1).join() verify(proxy).applyOp( eq(Operation.Update(HANDLE_NAME, VersionMap(HANDLE_NAME to 1), entity1)) @@ -286,8 +288,9 @@ class SingletonHandleTest { @Test fun clear_handleWithValue_success() { + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) val entity = StorableReferencableEntity("1") - val handle = createHandle() handle.store(entity) val versionMap = VersionMap(HANDLE_NAME to 1) @@ -299,7 +302,8 @@ class SingletonHandleTest { @Test fun clear_emptyHandle_success() { - val handle = createHandle() + val proxy = mockSingletonStorageProxy() + val handle = createHandle(proxy = proxy) handle.clear() From be944aafdb1108b6a9540d61c9f7d0798d628bd7 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Tue, 8 Dec 2020 18:50:12 -0800 Subject: [PATCH 28/31] Remove unused methods from EntityHandleManager and StorageProxy. The latter in particular removes a decent amount of complicated, unused logic. PiperOrigin-RevId: 346456956 --- java/arcs/core/host/HandleManagerImpl.kt | 8 -- java/arcs/core/storage/StorageProxy.kt | 8 +- java/arcs/core/storage/StorageProxyImpl.kt | 122 ++++-------------- .../arcs/core/storage/StorageProxyImplTest.kt | 116 ----------------- 4 files changed, 26 insertions(+), 228 deletions(-) diff --git a/java/arcs/core/host/HandleManagerImpl.kt b/java/arcs/core/host/HandleManagerImpl.kt index 7ce8246222f..9553ad58d02 100644 --- a/java/arcs/core/host/HandleManagerImpl.kt +++ b/java/arcs/core/host/HandleManagerImpl.kt @@ -97,14 +97,6 @@ class HandleManagerImpl( override fun scheduler() = scheduler - @Deprecated("Will be replaced by ParticleContext lifecycle handling") - suspend fun initiateProxySync() { - proxyMutex.withLock { - singletonStorageProxies.values.forEach { it.maybeInitiateSync() } - collectionStorageProxies.values.forEach { it.maybeInitiateSync() } - } - } - override suspend fun allStorageProxies() = proxyMutex.withLock { singletonStorageProxies.values.plus(collectionStorageProxies.values) } diff --git a/java/arcs/core/storage/StorageProxy.kt b/java/arcs/core/storage/StorageProxy.kt index 50a0c672a81..94663a05f8f 100644 --- a/java/arcs/core/storage/StorageProxy.kt +++ b/java/arcs/core/storage/StorageProxy.kt @@ -118,13 +118,7 @@ interface StorageProxy { fun getVersionMap(): VersionMap /** - * Return the current local version of the model. Suspends until it has a synchronized view of - * the data. - */ - suspend fun getParticleView(): T - - /** - * Similar to [getParticleView], but requires the current proxy to have been synced at least + * Return the current local version of the model. Requires the proxy to have been synced at least * once, and also requires the caller to be running within the [Scheduler]'s thread. */ fun getParticleViewUnsafe(): T diff --git a/java/arcs/core/storage/StorageProxyImpl.kt b/java/arcs/core/storage/StorageProxyImpl.kt index 06fe5dc940c..5604def8483 100644 --- a/java/arcs/core/storage/StorageProxyImpl.kt +++ b/java/arcs/core/storage/StorageProxyImpl.kt @@ -68,7 +68,7 @@ class StorageProxyImpl private con private val log = TaggedLog { "StorageProxy" } private val handleCallbacks = atomic(HandleCallbacks()) - private val stateHolder = atomic(StateHolder(ProxyState.NO_SYNC)) + private val state = atomic(ProxyState.NO_SYNC) // This will be initialized by the [create] method below. private lateinit var store: StorageEndpoint @@ -120,7 +120,7 @@ class StorageProxyImpl private con } .onCompletion { busySendingMessagesChannel.send(false) - stateHolder.update { it.setState(ProxyState.CLOSED) } + state.update { ProxyState.CLOSED } _crdt = null } .launchIn(scheduler.scope) @@ -139,13 +139,13 @@ class StorageProxyImpl private con } /* visible for testing */ - fun getStateForTesting(): ProxyState = stateHolder.value.state + fun getStateForTesting(): ProxyState = state.value override fun prepareForSync() { checkNotClosed() - stateHolder.update { - if (it.state == ProxyState.NO_SYNC) { - it.setState(ProxyState.READY_TO_SYNC) + state.update { + if (it == ProxyState.NO_SYNC) { + ProxyState.READY_TO_SYNC } else { it } @@ -155,14 +155,14 @@ class StorageProxyImpl private con override fun maybeInitiateSync() { checkNotClosed() var needsSync = false - stateHolder.update { + state.update { // TODO(b/157188866): remove reliance on ready signal for write-only handles in tests // If there are no readable handles observing this proxy, it will be in the NO_SYNC // state and will never deliver any onReady notifications, which breaks tests that // call awaitReady on write-only handles. - if (it.state == ProxyState.READY_TO_SYNC || it.state == ProxyState.NO_SYNC) { + if (it == ProxyState.READY_TO_SYNC || it == ProxyState.NO_SYNC) { needsSync = true - it.setState(ProxyState.AWAITING_SYNC) + ProxyState.AWAITING_SYNC } else { needsSync = false it @@ -189,7 +189,7 @@ class StorageProxyImpl private con checkNotClosed() checkWillSync() handleCallbacks.update { it.addOnReady(id, action) } - if (stateHolder.value.state == ProxyState.SYNC) { + if (state.value == ProxyState.SYNC) { scheduler.schedule(HandleCallbackTask(id, "onReady(immediate)", action)) } } @@ -204,7 +204,7 @@ class StorageProxyImpl private con checkNotClosed() checkWillSync() handleCallbacks.update { it.addOnDesync(id, action) } - if (stateHolder.value.state == ProxyState.DESYNC) { + if (state.value == ProxyState.DESYNC) { scheduler.schedule(HandleCallbackTask(id, "onDesync(immediate)", action)) } } @@ -220,7 +220,7 @@ class StorageProxyImpl private con } override suspend fun close() { - if (stateHolder.value.state == ProxyState.CLOSED) return + if (state.value == ProxyState.CLOSED) return scheduler.waitForIdle() @@ -274,7 +274,7 @@ class StorageProxyImpl private con // Don't send update notifications for local writes that occur prior to sync (these should // only be in onFirstStart and onStart, and as such particles aren't ready for updates yet). - if (stateHolder.value.state in arrayOf(ProxyState.SYNC, ProxyState.DESYNC)) { + if (state.value in arrayOf(ProxyState.SYNC, ProxyState.DESYNC)) { // TODO: the returned Deferred doesn't account for this update propagation; should it? notifyUpdate(oldValue, newValue) } @@ -283,63 +283,24 @@ class StorageProxyImpl private con override fun getVersionMap(): VersionMap = crdt.versionMap.copy() - override suspend fun getParticleView(): T = getParticleViewAsync().await() - override fun getParticleViewUnsafe(): T { checkNotClosed() checkInDispatcher() log.debug { "Getting particle view (lifecycle)" } - check(stateHolder.value.state in arrayOf(ProxyState.SYNC, ProxyState.DESYNC)) { - "Read operations are not valid before onReady (storage proxy state is " + - "${stateHolder.value.state})" + check(state.value in arrayOf(ProxyState.SYNC, ProxyState.DESYNC)) { + "Read operations are not valid before onReady (storage proxy state is ${state.value})" } return crdt.consumerView } - /** TODO(b/153560976): Enforce the scheduler thread requirement. */ - fun getParticleViewAsync(): Deferred { - checkNotClosed() - check(stateHolder.value.state != ProxyState.NO_SYNC) { - "getParticleView not valid on non-readable StorageProxy" - } - - log.debug { "Getting particle view" } - val future = CompletableDeferred() - - val priorState = stateHolder.getAndUpdate { - when (it.state) { - // Already synced, exit early to avoid adding a waiting sync. - ProxyState.SYNC -> return@getAndUpdate it - // Time to sync. - ProxyState.READY_TO_SYNC -> it.setState(ProxyState.AWAITING_SYNC) - // Either already awaiting first sync, or a re-sync at this point. - else -> it - }.addWaitingSync(future) - }.state - - // If this was our first state transition - it means we need to request sync. - if (priorState == ProxyState.READY_TO_SYNC) requestSynchronization() - - // If this was called while already synced, resolve the future with the current value. - if (priorState == ProxyState.SYNC) { - scheduler.scope.launch { - val result = crdt.consumerView - log.verbose { "Already synchronized, returning $result" } - future.complete(result) - } - } - - return future - } - /** * Applies messages from a [ActiveStore]. */ suspend fun onMessage(message: ProxyMessage) = coroutineScope { log.verbose { "onMessage: $message" } - if (stateHolder.value.state == ProxyState.CLOSED) { + if (state.value == ProxyState.CLOSED) { log.verbose { "in closed state, received message: $message" } return@coroutineScope } @@ -404,7 +365,7 @@ class StorageProxyImpl private con // of operations). While we could use this to sync, we don't want to send ready notifications // until after maybeInitiateSync() has been called. Since this case is rare it's easiest to // just ignore the update and re-request it at the right time. - if (stateHolder.value.state == ProxyState.READY_TO_SYNC) { + if (state.value == ProxyState.READY_TO_SYNC) { log.verbose { "ignoring model update since proxy is in READY_TO_SYNC state" } return } @@ -412,28 +373,17 @@ class StorageProxyImpl private con val oldValue = crdt.consumerView crdt.merge(model) - val newValue = crdt.consumerView - val toResolve = mutableSetOf>() - val priorState = stateHolder.getAndUpdate { - toResolve.addAll(it.waitingSyncs) - - it.clearWaitingSyncs() - .setState(ProxyState.SYNC) - }.state - - log.debug { "Completing ${toResolve.size} waiting syncs" } - toResolve.forEach { it.complete(newValue) } - + val priorState = state.getAndUpdate { ProxyState.SYNC } when (priorState) { ProxyState.AWAITING_SYNC -> { notifyReady() applyPostSyncModelOps() } ProxyState.READY_TO_SYNC -> Unit // Unreachable; guarded above - ProxyState.SYNC -> notifyUpdate(oldValue, newValue) + ProxyState.SYNC -> notifyUpdate(oldValue, crdt.consumerView) ProxyState.DESYNC -> { notifyResync() - notifyUpdate(oldValue, newValue) + notifyUpdate(oldValue, crdt.consumerView) applyPostSyncModelOps() } ProxyState.NO_SYNC, @@ -458,7 +408,7 @@ class StorageProxyImpl private con private fun processModelOps(operations: List) { // Queue-up ops we receive while we're not-synced. - if (stateHolder.value.state != ProxyState.SYNC) { + if (state.value != ProxyState.SYNC) { modelOpsToApplyAfterSyncing.addAll(operations) return } @@ -467,24 +417,14 @@ class StorageProxyImpl private con val couldApplyAllOps = operations.all { crdt.applyOperation(it) } if (!couldApplyAllOps) { - stateHolder.update { it.setState(ProxyState.DESYNC) } + state.update { ProxyState.DESYNC } log.info { "Could not apply ops, notifying onDesync listeners and requesting Sync." } notifyDesync() requestSynchronization() } else { - val futuresToResolve = mutableSetOf>() - stateHolder.update { - futuresToResolve.addAll(it.waitingSyncs) - it.clearWaitingSyncs() - } - - val newValue = crdt.consumerView - futuresToResolve.forEach { it.complete(newValue) } - log.debug { "Notifying onUpdate listeners" } - - notifyUpdate(oldValue, newValue) + notifyUpdate(oldValue, crdt.consumerView) } } @@ -568,11 +508,11 @@ class StorageProxyImpl private con "Operations can only be used performed Scheduler's Dispatcher" } - private fun checkNotClosed() = check(stateHolder.value.state != ProxyState.CLOSED) { + private fun checkNotClosed() = check(state.value != ProxyState.CLOSED) { "Unexpected operation on closed StorageProxy" } - private fun checkWillSync() = check(stateHolder.value.state != ProxyState.NO_SYNC) { + private fun checkWillSync() = check(state.value != ProxyState.NO_SYNC) { "Action handlers are not valid on a StorageProxy that has not been set up to sync " + "(i.e. there are no readable handles observing this proxy)" } @@ -644,18 +584,6 @@ class StorageProxyImpl private con } } - private data class StateHolder( - val state: ProxyState, - val waitingSyncs: List> = emptyList() - ) { - fun setState(newState: ProxyState) = copy(state = newState) - - fun addWaitingSync(deferred: CompletableDeferred) = - copy(waitingSyncs = waitingSyncs + deferred) - - fun clearWaitingSyncs() = copy(waitingSyncs = emptyList()) - } - // Visible for testing enum class ProxyState { /** diff --git a/javatests/arcs/core/storage/StorageProxyImplTest.kt b/javatests/arcs/core/storage/StorageProxyImplTest.kt index 06da71779c6..1556443341f 100644 --- a/javatests/arcs/core/storage/StorageProxyImplTest.kt +++ b/javatests/arcs/core/storage/StorageProxyImplTest.kt @@ -28,7 +28,6 @@ import arcs.core.util.Scheduler import arcs.core.util.Time import arcs.core.util.testutil.LogRule import com.google.common.truth.Truth.assertThat -import com.google.common.truth.Truth.assertWithMessage import com.nhaarman.mockitokotlin2.any import com.nhaarman.mockitokotlin2.eq import com.nhaarman.mockitokotlin2.mock @@ -46,7 +45,6 @@ import kotlinx.coroutines.Job import kotlinx.coroutines.asCoroutineDispatcher import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.runBlocking -import kotlinx.coroutines.withContext import kotlinx.coroutines.withTimeout import org.junit.After import org.junit.Before @@ -574,120 +572,6 @@ class StorageProxyImplTest { verifyNoMoreInteractions(onReady, onUpdate, onDesync, onResync) } - @Test - fun getParticleViewReturnsSyncedState() = runTest { - val proxy = mockProxy() - val notifyChannel = Channel(Channel.BUFFERED) - proxy.registerForStorageEvents(callbackId) { - runBlocking { notifyChannel.send(it) } - } - proxy.prepareForSync() - proxy.maybeInitiateSync() - - proxy.awaitOutgoingMessageQueueDrain() - fakeStoreEndpoint.waitFor(ProxyMessage.SyncRequest(null)) - fakeStoreEndpoint.clearProxyMessages() - - proxy.onMessage(ProxyMessage.ModelUpdate(mockCrdtData, null)) - scheduler.waitForIdle() - assertThat(notifyChannel.receiveOrTimeout()).isEqualTo(StorageEvent.READY) - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.SYNC) - - withContext(proxy.dispatcher) { - assertThat(proxy.getParticleView()).isEqualTo("data") - } - assertThat(fakeStoreEndpoint.getProxyMessages()).isEmpty() - } - - @Test - fun getParticleViewWhenNotSyncingFails() = runTest { - val proxy = mockProxy() - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.NO_SYNC) - - val exception = assertFailsWith { - @Suppress("DeferredResultUnused") - proxy.getParticleViewAsync() - } - assertThat(exception).hasMessageThat() - .isEqualTo("getParticleView not valid on non-readable StorageProxy") - } - - @Test - fun getParticleViewWhenReadyToSyncQueuesAndRequestsSync() = runTest { - val proxy = mockProxy() - val notifyChannel = Channel() - proxy.registerForStorageEvents(callbackId) { - runBlocking { notifyChannel.send(it) } - } - proxy.prepareForSync() - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.READY_TO_SYNC) - - val future1 = proxy.getParticleViewAsync() - assertThat(future1.isCompleted).isFalse() - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.AWAITING_SYNC) - - // Test that multiple futures can be returned and resolved. - val future2 = proxy.getParticleViewAsync() - assertThat(future2.isCompleted).isFalse() - - // Syncing the proxy should resolve the futures after triggering a READY StorageEvent. - proxy.onMessage(ProxyMessage.ModelUpdate(mockCrdtData, null)) - assertThat(notifyChannel.receiveOrTimeout()).isEqualTo(StorageEvent.READY) - - proxy.awaitOutgoingMessageQueueDrain() - assertWithMessage("Store should've received only one sync request the whole time") - .that(fakeStoreEndpoint.getProxyMessages()) - .containsExactly( - ProxyMessage.SyncRequest(null) - ) - - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.SYNC) - assertThat(future1.await()).isEqualTo("data") - assertThat(future2.await()).isEqualTo("data") - } - - @Test - fun getParticleViewWhenDesyncedQueues() = runTest { - val proxy = mockProxy() - val notifyChannel = Channel(Channel.BUFFERED) - proxy.registerForStorageEvents(callbackId) { - runBlocking { notifyChannel.send(it) } - } - proxy.prepareForSync() - proxy.maybeInitiateSync() - - proxy.awaitOutgoingMessageQueueDrain() - fakeStoreEndpoint.waitFor(ProxyMessage.SyncRequest(null)) - fakeStoreEndpoint.clearProxyMessages() - - proxy.onMessage(ProxyMessage.ModelUpdate(mockCrdtData, null)) - assertThat(notifyChannel.receiveOrTimeout()).isEqualTo(StorageEvent.READY) - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.SYNC) - - whenever(mockCrdtModel.applyOperation(mockCrdtOperation)).thenReturn(false) - proxy.onMessage(ProxyMessage.Operations(listOf(mockCrdtOperation), null)) - assertThat(notifyChannel.receiveOrTimeout()).isEqualTo(StorageEvent.DESYNC) - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.DESYNC) - - proxy.awaitOutgoingMessageQueueDrain() - fakeStoreEndpoint.waitFor(ProxyMessage.SyncRequest(null)) - fakeStoreEndpoint.clearProxyMessages() - - val future = proxy.getParticleViewAsync() - assertThat(future.isCompleted).isFalse() - - proxy.awaitOutgoingMessageQueueDrain() - assertThat(fakeStoreEndpoint.getProxyMessages()).isEmpty() - - // Syncing the proxy should resolve the future. - proxy.onMessage(ProxyMessage.ModelUpdate(mockCrdtData, null)) - assertThat(notifyChannel.receiveOrTimeout()).isEqualTo(StorageEvent.RESYNC) - - assertThat(proxy.getStateForTesting()).isEqualTo(ProxyState.SYNC) - assertThat(future.isCompleted).isTrue() - assertThat(future.await()).isEqualTo("data") - } - @Test fun getVersionMap() = runTest { val proxy = mockProxy() From 0e2bf53e6e90a774f1ab6bff450c6e3d744b71e1 Mon Sep 17 00:00:00 2001 From: Josh Pratt Date: Tue, 8 Dec 2020 19:11:28 -0800 Subject: [PATCH 29/31] Create showcase & showcase test for queries PiperOrigin-RevId: 346459397 --- .../arcs/core/allocator/AllocatorTestBase.kt | 3 - javatests/arcs/showcase/queries/BUILD | 51 ++++++++++++++++ .../showcase/queries/ProductClassifier.kt | 60 +++++++++++++++++++ .../arcs/showcase/queries/ProductDatabase.kt | 27 +++++++++ .../showcase/queries/QueriesShowcaseTest.kt | 46 ++++++++++++++ javatests/arcs/showcase/queries/queries.arcs | 27 +++++++++ 6 files changed, 211 insertions(+), 3 deletions(-) create mode 100644 javatests/arcs/showcase/queries/BUILD create mode 100644 javatests/arcs/showcase/queries/ProductClassifier.kt create mode 100644 javatests/arcs/showcase/queries/ProductDatabase.kt create mode 100644 javatests/arcs/showcase/queries/QueriesShowcaseTest.kt create mode 100644 javatests/arcs/showcase/queries/queries.arcs diff --git a/javatests/arcs/core/allocator/AllocatorTestBase.kt b/javatests/arcs/core/allocator/AllocatorTestBase.kt index c003d935e95..f1eb6245752 100644 --- a/javatests/arcs/core/allocator/AllocatorTestBase.kt +++ b/javatests/arcs/core/allocator/AllocatorTestBase.kt @@ -62,9 +62,6 @@ open class AllocatorTestBase { private val schedulerProvider = SimpleSchedulerProvider(Dispatchers.Default) private lateinit var scope: CoroutineScope - /** - * Recipe hand translated from 'person.arcs' - */ protected lateinit var allocator: Allocator private lateinit var hostRegistry: HostRegistry private lateinit var writePersonParticle: Plan.Particle diff --git a/javatests/arcs/showcase/queries/BUILD b/javatests/arcs/showcase/queries/BUILD new file mode 100644 index 00000000000..3e3f54eeb75 --- /dev/null +++ b/javatests/arcs/showcase/queries/BUILD @@ -0,0 +1,51 @@ +load( + "//third_party/java/arcs/build_defs:build_defs.bzl", + "arcs_kt_android_library", + "arcs_kt_android_test_suite", + "arcs_kt_gen", +) + +licenses(["notice"]) + +arcs_kt_gen( + name = "codegen", + srcs = ["queries.arcs"], +) + +arcs_kt_android_library( + name = "particles", + testonly = 1, + srcs = [ + "ProductClassifier.kt", + "ProductDatabase.kt", + ], + deps = [ + ":codegen", + "//java/arcs/android/storage/database", + "//java/arcs/core/entity", + "//java/arcs/jvm/host", + "//java/arcs/sdk/android/storage", + "//java/arcs/sdk/android/storage/service", + "//javatests/arcs/android/integration", + ], +) + +arcs_kt_android_test_suite( + name = "tests", + size = "small", + srcs = glob(["*Test.kt"]), + manifest = "//java/arcs/android/common:AndroidManifest.xml", + package = "arcs.showcase.queries", + deps = [ + ":codegen", + ":particles", + "//java/arcs/core/host", + "//java/arcs/core/testutil", + "//java/arcs/core/testutil/handles", + "//javatests/arcs/android/integration", + "//third_party/android/androidx_test/ext/junit", + "//third_party/java/junit:junit-android", + "//third_party/java/truth:truth-android", + "//third_party/kotlin/kotlinx_coroutines", + ], +) diff --git a/javatests/arcs/showcase/queries/ProductClassifier.kt b/javatests/arcs/showcase/queries/ProductClassifier.kt new file mode 100644 index 00000000000..6d98b0fb172 --- /dev/null +++ b/javatests/arcs/showcase/queries/ProductClassifier.kt @@ -0,0 +1,60 @@ +package arcs.showcase.queries + +import arcs.jvm.host.TargetHost + +/** + * This particle is an example of using handles with associated queries. + * It performs three different queries which select products based on values provided at runtime: + * - an arbitrary double [CHEAP_PRICE] + * - an arbitrary double [EXPENSIVE_PRICE], and + * - an arbitrary string [SEARCH_NAME]. + * + * It filters dummy data from [ProductDatabase], but could be used with any other source of data and + * performs a simple labelling task showing how data can be combined from different handles without + * loading the whole database into the Particle. + */ +@TargetHost(arcs.android.integration.IntegrationHost::class) +class ProductClassifier : AbstractProductClassifier() { + + override fun onReady() { + // This map from product names to 'tags' accumulates the output that this particle provides. + val productDescriptions = mutableMapOf>() + + // Here, 'cheap' products are defined as products from the [cheapProducts] handle that have a + // price less than or equal to [CHEAP_PRICE]. + // i.e. `price <= ?` see the definition in [queries.arcs] + handles.cheapProducts.query(CHEAP_PRICE).forEach { + productDescriptions.getOrPut(it.name) { mutableSetOf() }.add("cheap") + } + // Here, 'expensive' products are defined as products from the [expensiveProducts] handle that have a + // price greater than or equal to [EXPENSIVE_PRICE]. + // i.e. `price >= ?` see the definition in [queries.arcs] + handles.expensiveProducts.query(EXPENSIVE_PRICE).forEach { + productDescriptions.getOrPut(it.name) { mutableSetOf() }.add("expensive") + } + // Here, 'named' products are defined as products from the [namedProducts] handle that have the + // name equal to [SEARCH_NAME]. + // i.e. `name == ?` see the definition in [queries.arcs] + handles.namedProducts.query(SEARCH_NAME).forEach { + productDescriptions.getOrPut(it.name) { mutableSetOf() }.add("selected") + } + + // This renders the 'tags' and product names as strings and stores them using the + // [productDescriptions] handle. + handles.productDescriptions.storeAll( + productDescriptions.map { (name, tags) -> + ProductDescription(description = "$name: ${tags.joinToString()}") + } + ) + } + + companion object { + /** + * These constants are arbitrary values to show that query arguments are provided at runtime. + * They do not actually need to be constants and could be changed at runtime. + */ + private const val CHEAP_PRICE = 3.0 + private const val EXPENSIVE_PRICE = 25.0 + private const val SEARCH_NAME = "Pencil" + } +} diff --git a/javatests/arcs/showcase/queries/ProductDatabase.kt b/javatests/arcs/showcase/queries/ProductDatabase.kt new file mode 100644 index 00000000000..5f75ce6cc0d --- /dev/null +++ b/javatests/arcs/showcase/queries/ProductDatabase.kt @@ -0,0 +1,27 @@ +package arcs.showcase.queries + +import arcs.jvm.host.TargetHost + +typealias Product = AbstractProductDatabase.Product + +/** + * This particle generates dummy data that is used in testing queries. + * @see ProductClassifier + */ +@TargetHost(arcs.android.integration.IntegrationHost::class) +class ProductDatabase : AbstractProductDatabase() { + override fun onFirstStart() { + handles.products.storeAll( + listOf( + Product(name = "Pencil", price = 2.5), + Product(name = "Ice cream", price = 3.0), + Product(name = "Chocolate", price = 3.0), + Product(name = "Blueberries", price = 4.0), + Product(name = "Sandwich", price = 4.50), + Product(name = "Scarf", price = 20.0), + Product(name = "Hat", price = 25.0), + Product(name = "Stop sign", price = 100.0) + ) + ) + } +} diff --git a/javatests/arcs/showcase/queries/QueriesShowcaseTest.kt b/javatests/arcs/showcase/queries/QueriesShowcaseTest.kt new file mode 100644 index 00000000000..f300764df37 --- /dev/null +++ b/javatests/arcs/showcase/queries/QueriesShowcaseTest.kt @@ -0,0 +1,46 @@ +package arcs.showcase.queries + +import androidx.test.ext.junit.runners.AndroidJUnit4 +import arcs.android.integration.IntegrationEnvironment +import arcs.core.host.toRegistration +import arcs.core.testutil.handles.dispatchFetchAll +import com.google.common.truth.Truth.assertThat +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.runBlocking +import org.junit.Rule +import org.junit.Test +import org.junit.runner.RunWith + +@OptIn(ExperimentalCoroutinesApi::class) +@RunWith(AndroidJUnit4::class) +class QueriesShowcaseTest { + + @get:Rule + val env = IntegrationEnvironment( + ::ProductClassifier.toRegistration(), + ::ProductDatabase.toRegistration() + ) + + /** + * Tests that a set of known queries, on known runtime data produce expected results. + * - Products are defined in [ProductDatabase], + * - filtering criteria are defined in [ProductClassifier]. + */ + @Test + fun testQueries() = runBlocking { + val arc = env.startArc(ClassifyProductsPlan) + val productClassifier: ProductClassifier = env.getParticle(arc) + val productDescriptions = productClassifier.handles.productDescriptions.dispatchFetchAll() + val descriptions = productDescriptions.map { it.description } + + assertThat(descriptions).containsExactly( + "Pencil: cheap, selected", + "Ice cream: cheap", + "Chocolate: cheap", + "Hat: expensive", + "Stop sign: expensive" + ) + + env.stopArc(arc) + } +} diff --git a/javatests/arcs/showcase/queries/queries.arcs b/javatests/arcs/showcase/queries/queries.arcs new file mode 100644 index 00000000000..2f16f658030 --- /dev/null +++ b/javatests/arcs/showcase/queries/queries.arcs @@ -0,0 +1,27 @@ +meta + namespace: arcs.showcase.queries + +schema Product + name: Text + price: Number + +particle ProductDatabase in '.ProductDatabase' + products: writes [Product {name, price}] + +particle ProductClassifier in '.ProductClassifier' + cheapProducts: reads [Product {name, price} [price <= ?]] + expensiveProducts: reads [Product {name, price} [price >= ?]] + namedProducts: reads [Product {name, price} [name == ?]] + productDescriptions: reads writes [ProductDescription {description: Text}] // reads is only for testing. + +@arcId('testArc') +recipe ClassifyProducts + products: create 'products' + productDescriptions: create 'productDescriptions' + ProductDatabase + products: products + ProductClassifier + cheapProducts: products + expensiveProducts: products + namedProducts: products + productDescriptions: productDescriptions From c9f416617bd0dda85bf3bb5843617ae88e788927 Mon Sep 17 00:00:00 2001 From: Cameron Silvestrini Date: Thu, 10 Dec 2020 15:02:53 +1100 Subject: [PATCH 30/31] Disable import ordering in ktlint (#6749) Due to the Copybara process, the correct import order is not always the same, so this lint check is proving to be very annoying. --- .editorconfig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.editorconfig b/.editorconfig index ec55049ab9e..9d01000be2f 100644 --- a/.editorconfig +++ b/.editorconfig @@ -21,3 +21,5 @@ trim_trailing_whitespace = false continuation_indent_size = 2 insert_file_newline = true max_line_length = 100 +disabled_rules=import-ordering + From 75c791ec0fca82e26dcda8ccc0b98a5e1d5d5636 Mon Sep 17 00:00:00 2001 From: "Scott J. Miles" Date: Thu, 10 Dec 2020 12:38:53 -0800 Subject: [PATCH 31/31] remove dead code --- .../storage/tests/reference-mode-store-integration-test.ts | 1 - src/runtime/tests/capabilities-resolver-test.ts | 1 - 2 files changed, 2 deletions(-) diff --git a/src/runtime/storage/tests/reference-mode-store-integration-test.ts b/src/runtime/storage/tests/reference-mode-store-integration-test.ts index 4574704a224..1134b089570 100644 --- a/src/runtime/storage/tests/reference-mode-store-integration-test.ts +++ b/src/runtime/storage/tests/reference-mode-store-integration-test.ts @@ -52,7 +52,6 @@ describe('ReferenceModeStore Integration', async () => { }); console.warn('writeHandle.addFromData'); - //debugger; await writeHandle.addFromData({foo: 'This is text in foo'}); return returnPromise; }); diff --git a/src/runtime/tests/capabilities-resolver-test.ts b/src/runtime/tests/capabilities-resolver-test.ts index f7ff7b7c5a1..baffcf275ac 100644 --- a/src/runtime/tests/capabilities-resolver-test.ts +++ b/src/runtime/tests/capabilities-resolver-test.ts @@ -62,7 +62,6 @@ describe('Capabilities Resolver New', () => { it('creates volatile keys', Flags.withDefaultReferenceMode(async () => { // Register volatile storage key factory. // Verify only volatile (in-memory, no ttl) storage key can be created. - //VolatileStorageKey.register(); const resolver = new _CapabilitiesResolver({arcId: ArcId.newForTest('test')}); const createKey = resolver.createStorageKey.bind(resolver); verifyReferenceModeStorageKey(await createKey(unspecified, entityType, handleId), VolatileStorageKey);