From 737bdd4d9b2e2614ff58c4366205aacf60ee0d14 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 11:07:58 +0100 Subject: [PATCH 1/9] Remove SQLite-specific search functionality --- bw2data/__init__.py | 4 +- bw2data/backends/base.py | 40 ++-- bw2data/backends/proxies.py | 20 +- bw2data/search/__init__.py | 2 - bw2data/search/indices.py | 125 ---------- bw2data/search/schema.py | 16 -- bw2data/search/search.py | 88 ------- tests/database_querying.py | 5 - tests/search.py | 359 ----------------------------- tests/unit/test_database_events.py | 2 - 10 files changed, 35 insertions(+), 626 deletions(-) delete mode 100644 bw2data/search/__init__.py delete mode 100644 bw2data/search/indices.py delete mode 100644 bw2data/search/schema.py delete mode 100644 bw2data/search/search.py delete mode 100644 tests/search.py diff --git a/bw2data/__init__.py b/bw2data/__init__.py index 74eb20db..ca590e87 100644 --- a/bw2data/__init__.py +++ b/bw2data/__init__.py @@ -27,7 +27,7 @@ "prepare_lca_inputs", "ProcessedDataStore", "projects", - "Searcher", + # "Searcher", "set_data_dir", "Weighting", "weightings", @@ -71,7 +71,7 @@ from bw2data.utils import get_activity, get_node from bw2data.data_store import DataStore, ProcessedDataStore from bw2data.method import Method -from bw2data.search import Searcher, IndexManager +# from bw2data.search import Searcher, IndexManager from bw2data.weighting_normalization import Weighting, Normalization from bw2data.backends import convert_backend, get_id, Node, Edge from bw2data.compat import prepare_lca_inputs, Mapping, get_multilca_data_objs diff --git a/bw2data/backends/base.py b/bw2data/backends/base.py index 2c7908e3..ccf36f56 100644 --- a/bw2data/backends/base.py +++ b/bw2data/backends/base.py @@ -43,7 +43,7 @@ ) from bw2data.logs import stdout_feedback_logger from bw2data.query import Query -from bw2data.search import IndexManager, Searcher +# from bw2data.search import IndexManager, Searcher from bw2data.signals import on_database_reset, on_database_write from bw2data.utils import as_uncertainty_dict, get_geocollection, get_node, set_correct_process_type @@ -732,20 +732,24 @@ def new_node(self, code: str = None, **kwargs): return obj def make_searchable(self, reset: bool = False, signal: bool = True): - if self.name not in databases: - raise UnknownObject("This database is not yet registered") - if self._searchable and not reset: - stdout_feedback_logger.info("This database is already searchable") - return - databases[self.name]["searchable"] = True - databases.flush(signal=signal) - IndexManager(self.filename).create() - IndexManager(self.filename).add_datasets(self) + return + + # if self.name not in databases: + # raise UnknownObject("This database is not yet registered") + # if self._searchable and not reset: + # stdout_feedback_logger.info("This database is already searchable") + # return + # databases[self.name]["searchable"] = True + # databases.flush(signal=signal) + # IndexManager(self.filename).create() + # IndexManager(self.filename).add_datasets(self) def make_unsearchable(self, signal: bool = True): - databases[self.name]["searchable"] = False - databases.flush(signal=signal) - IndexManager(self.filename).delete_database() + return + + # databases[self.name]["searchable"] = False + # databases.flush(signal=signal) + # IndexManager(self.filename).delete_database() def delete( self, keep_params: bool = False, warn: bool = True, vacuum: bool = True, signal: bool = True @@ -785,7 +789,7 @@ def purge(dct: dict) -> dict: ActivityDataset.delete().where(ActivityDataset.database == self.name).execute() ExchangeDataset.delete().where(ExchangeDataset.output_database == self.name).execute() - IndexManager(self.filename).delete_database() + # IndexManager(self.filename).delete_database() if not keep_params: from bw2data.parameters import ( @@ -1003,9 +1007,11 @@ def search(self, string, **kwargs): * ``proxy``: Return ``Activity`` proxies instead of dictionary index Models. Default is ``True``. Returns a list of ``Activity`` datasets.""" - with Searcher(self.filename) as s: - results = s.search(string=string, **kwargs) - return results + raise NotImplementedError + + # with Searcher(self.filename) as s: + # results = s.search(string=string, **kwargs) + # return results def set_geocollections(self): """Set ``geocollections`` attribute for databases which don't currently have it.""" diff --git a/bw2data/backends/proxies.py b/bw2data/backends/proxies.py index 14a0b395..4e82e3af 100644 --- a/bw2data/backends/proxies.py +++ b/bw2data/backends/proxies.py @@ -19,7 +19,7 @@ from bw2data.errors import ValidityError from bw2data.logs import stdout_feedback_logger from bw2data.proxies import ActivityProxyBase, ExchangeProxyBase -from bw2data.search import IndexManager +# from bw2data.search import IndexManager from bw2data.signals import on_activity_code_change, on_activity_database_change @@ -281,7 +281,7 @@ def purge(obj: Activity, dct: dict) -> dict: ).execute() except ActivityParameter.DoesNotExist: pass - IndexManager(Database(self["database"]).filename).delete_dataset(self._data) + # IndexManager(Database(self["database"]).filename).delete_dataset(self._data) self.exchanges().delete(allow_in_sourced_project=True) self.upstream().delete(allow_in_sourced_project=True) @@ -352,8 +352,8 @@ def save(self, signal: bool = True, data_already_set: bool = False, force_insert if self.get("location") and self["location"] not in geomapping: geomapping.add([self["location"]]) - if databases[self["database"]].get("searchable", True): - IndexManager(Database(self["database"]).filename).update_dataset(self._data) + # if databases[self["database"]].get("searchable", True): + # IndexManager(Database(self["database"]).filename).update_dataset(self._data) def _change_code(self, new_code: str, signal: bool = True): if self["code"] == new_code: @@ -383,11 +383,11 @@ def _change_code(self, new_code: str, signal: bool = True): ).execute() if databases[self["database"]].get("searchable"): - from bw2data import Database + # from bw2data import Database - IndexManager(Database(self["database"]).filename).delete_dataset(self) + # IndexManager(Database(self["database"]).filename).delete_dataset(self) self._data["code"] = new_code - IndexManager(Database(self["database"]).filename).add_datasets([self]) + # IndexManager(Database(self["database"]).filename).add_datasets([self]) else: self._data["code"] = new_code @@ -420,11 +420,11 @@ def _change_database(self, new_database: str, signal: bool = True): ).execute() if databases[self["database"]].get("searchable"): - from bw2data import Database + # from bw2data import Database - IndexManager(Database(self["database"]).filename).delete_dataset(self) + # IndexManager(Database(self["database"]).filename).delete_dataset(self) self._data["database"] = new_database - IndexManager(Database(self["database"]).filename).add_datasets([self]) + # IndexManager(Database(self["database"]).filename).add_datasets([self]) else: self._data["database"] = new_database diff --git a/bw2data/search/__init__.py b/bw2data/search/__init__.py deleted file mode 100644 index 9fb7a0b3..00000000 --- a/bw2data/search/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from bw2data.search.indices import IndexManager -from bw2data.search.search import Searcher diff --git a/bw2data/search/indices.py b/bw2data/search/indices.py deleted file mode 100644 index 62019b54..00000000 --- a/bw2data/search/indices.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -import warnings - -from playhouse.sqlite_ext import SqliteExtDatabase - -from bw2data import projects -from bw2data.search.schema import BW2Schema - -MODELS = (BW2Schema,) - - -class IndexManager: - def __init__(self, database_path): - self.path = os.path.join(projects.request_directory("search"), database_path) - self.db = SqliteExtDatabase(self.path) - if not os.path.exists(self.path): - self.create() - - def get(self): - return self - - def create(self): - self.delete_database() - with self.db.bind_ctx(MODELS): - self.db.create_tables(MODELS) - - def _format_dataset(self, ds): - def _fix_location(string): - if isinstance(string, tuple): - string = string[1] - if isinstance(string, str): - if string.lower() == "none": - return "" - else: - return string.lower().strip() - else: - return "" - - return dict( - name=(ds.get("name") or "").lower(), - comment=(ds.get("comment") or "").lower(), - product=(ds.get("reference product") or "").lower(), - categories=", ".join(ds.get("categories") or []).lower(), - synonyms=", ".join(ds.get("synonyms") or []).lower(), - location=_fix_location(ds.get("location") or ""), - database=ds["database"], - code=ds["code"], - ) - - def add_dataset(self, ds): - self.add_datasets([ds]) - - def add_datasets(self, datasets): - all_dataset = list(datasets) - with self.db.bind_ctx(MODELS): - for chunk_range in range(0, len(datasets), 100): - for model in MODELS: - model.insert_many( - [ - self._format_dataset(ds) - for ds in all_dataset[chunk_range : chunk_range + 100] - ] - ).execute() - - def update_dataset(self, ds): - with self.db.bind_ctx(MODELS): - for model in MODELS: - model.delete().where( - model.code == ds["code"], model.database == ds["database"] - ).execute() - model.insert(**self._format_dataset(ds)).execute() - - def delete_dataset(self, ds): - with self.db.bind_ctx(MODELS): - for model in MODELS: - model.delete().where( - model.code == ds["code"], model.database == ds["database"] - ).execute() - - def delete_database(self): - with self.db.bind_ctx(MODELS): - self.db.drop_tables(MODELS) - - def close(self): - self.db.close() - - def search(self, string, limit=None, weights=None, mask=None, filter=None): - if mask: - warnings.warn( - "`mask` functionality has been deleted, and now does nothing. This input argument will be removed in the future", - DeprecationWarning, - ) - if filter: - warnings.warn( - "`filter` functionality has been deleted, and now does nothing. This input argument will be removed in the future", - DeprecationWarning, - ) - - with self.db.bind_ctx(MODELS): - if string == "*": - query = BW2Schema - else: - query = BW2Schema.search_bm25( - string.replace(",", "") - .replace("(", "") - .replace(")", "") - .replace("{", "") - .replace("}", ""), - weights=weights, - ) - return list( - query.select( - BW2Schema.name, - BW2Schema.comment, - BW2Schema.product, - BW2Schema.categories, - BW2Schema.synonyms, - BW2Schema.location, - BW2Schema.database, - BW2Schema.code, - ) - .limit(limit) - .dicts() - .execute() - ) diff --git a/bw2data/search/schema.py b/bw2data/search/schema.py deleted file mode 100644 index d3c36446..00000000 --- a/bw2data/search/schema.py +++ /dev/null @@ -1,16 +0,0 @@ -from playhouse.sqlite_ext import FTS5Model, RowIDField, SearchField - - -class BW2Schema(FTS5Model): - rowid = RowIDField() - name = SearchField() - comment = SearchField() - product = SearchField() - categories = SearchField() - synonyms = SearchField() - location = SearchField() - database = SearchField() - code = SearchField() - - class Meta: - options = {"tokenize": "unicode61 tokenchars '''&:'"} diff --git a/bw2data/search/search.py b/bw2data/search/search.py deleted file mode 100644 index 2d1056cd..00000000 --- a/bw2data/search/search.py +++ /dev/null @@ -1,88 +0,0 @@ -from itertools import groupby - -import peewee - -from bw2data.search.indices import IndexManager - - -def keysplit(strng): - """Split an activity key joined into a single string using the magic sequence `⊡|⊡`""" - return tuple(strng.split("⊡|⊡")) - - -class Searcher: - search_fields = { - "name", - "comment", - "product", - "categories", - "synonyms", - "location", - } - - def __init__(self, database): - self._database = database - - def __enter__(self): - self.index = IndexManager(self._database).get() - return self - - def __exit__(self, type, value, traceback): - self.index.close() - - def search( - self, - string, - limit=25, - facet=None, - proxy=True, - boosts=None, - filter=None, - mask=None, - node_class=None, - ): - from bw2data import get_node - - lowercase = lambda x: x.lower() if hasattr(x, "lower") else x - string = lowercase(string) - - boosts = boosts or { - "name": 5, - "comment": 1, - "product": 3, - "categories": 2, - "synonyms": 3, - "location": 3, - } - - kwargs = {"limit": limit} - if facet: - kwargs.pop("limit") - - with self: - try: - results = self.index.search(string, weights=boosts, **kwargs) - except peewee.OperationalError as e: - if "no such table" in str(e): - results = None - else: - raise - - if facet: - results = {k: list(v) for k, v in groupby(results, lambda x: x.get(facet))} - - if proxy and facet is not None: - return { - key: [ - get_node(database=obj["database"], code=obj["code"], node_class=node_class) - for obj in value - ] - for key, value in results.items() - } - elif proxy: - return [ - get_node(database=obj["database"], code=obj["code"], node_class=node_class) - for obj in results - ] - else: - return results diff --git a/tests/database_querying.py b/tests/database_querying.py index a10ab13e..16c2deea 100644 --- a/tests/database_querying.py +++ b/tests/database_querying.py @@ -139,11 +139,6 @@ def test_len_respects_filters(self): self.db.filters = {"product": "widget"} self.assertEqual(len(self.db), 2) - def test_make_searchable_unknown_object(self): - db = DatabaseChooser("mysterious") - with self.assertRaises(UnknownObject): - db.make_searchable() - def test_convert_same_backend(self): database = DatabaseChooser("a database") database.write( diff --git a/tests/search.py b/tests/search.py deleted file mode 100644 index 204020e8..00000000 --- a/tests/search.py +++ /dev/null @@ -1,359 +0,0 @@ -from bw2data import databases -from bw2data.backends import SQLiteBackend -from bw2data.search import IndexManager, Searcher -from bw2data.tests import bw2test - - -@bw2test -def test_search_dataset_containing_stop_word(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "name": "foo of bar, high voltage"}) - with Searcher("foo") as s: - assert s.search("foo of bar, high voltage", proxy=False) - - -@bw2test -def test_add_dataset(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "name": "lollipop"}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_search_dataset(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "name": "lollipop"}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "lollipop", - "database": "foo", - "location": "", - "code": "bar", - "categories": "", - "synonyms": "", - } - ] - - -@bw2test -def test_search_geocollection_location(): - im = IndexManager("foo") - im.add_dataset( - { - "database": "foo", - "code": "bar", - "name": "lollipop", - "location": ("foo", "Here"), - } - ) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "lollipop", - "database": "foo", - "location": "here", - "code": "bar", - "categories": "", - "synonyms": "", - } - ] - - -@bw2test -def test_update_dataset(): - im = IndexManager("foo") - ds = {"database": "foo", "code": "bar", "name": "lollipop"} - im.add_dataset(ds) - ds["name"] = "lemon cake" - im.update_dataset(ds) - with Searcher("foo") as s: - assert s.search("lemon", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "lemon cake", - "database": "foo", - "location": "", - "code": "bar", - "categories": "", - "synonyms": "", - } - ] - - -@bw2test -def test_delete_dataset(): - im = IndexManager("foo") - ds = {"database": "foo", "code": "bar", "name": "lollipop"} - im.add_dataset(ds) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - im.delete_dataset(ds) - with Searcher("foo") as s: - assert not s.search("lollipop", proxy=False) - - -@bw2test -def test_add_datasets(): - im = IndexManager("foo") - ds = [{"database": "foo", "code": "bar", "name": "lollipop"}] - im.add_datasets(ds) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_add_database(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "lollipop"}} - db.write(ds) - with Searcher(db.filename) as s: - assert s.search("lollipop", proxy=False) - db.make_unsearchable() - with Searcher(db.filename) as s: - assert not s.search("lollipop", proxy=False) - - -@bw2test -def test_add_searchable_database(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "lollipop"}} - db.write(ds) - with Searcher(db.filename) as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_modify_database(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "lollipop"}} - db.write(ds) - with Searcher(db.filename) as s: - assert not s.search("cream", proxy=False) - assert s.search("lollipop", proxy=False) - ds2 = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "ice cream"}} - db.write(ds2) - with Searcher(db.filename) as s: - assert s.search("cream", proxy=False) - - -@bw2test -def test_delete_database(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "lollipop"}} - db.write(ds) - with Searcher(db.filename) as s: - assert s.search("lollipop", proxy=False) - db.make_unsearchable() - with Searcher(db.filename) as s: - assert not s.search("lollipop", proxy=False) - db.make_searchable() - with Searcher(db.filename) as s: - assert s.search("lollipop", proxy=False) - del databases["foo"] - with Searcher(db.filename) as s: - assert not s.search("lollipop", proxy=False) - - -@bw2test -def test_reset_index(): - im = IndexManager("foo") - ds = {"database": "foo", "code": "bar", "name": "lollipop"} - im.add_dataset(ds) - im.create() - with Searcher("foo") as s: - assert not s.search("lollipop", proxy=False) - - -@bw2test -def test_basic_search(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "name": "lollipop"}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_product_term(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "reference product": "lollipop"}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_comment_term(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "comment": "lollipop"}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_categories_term(): - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "categories": ("lollipop",)}) - with Searcher("foo") as s: - assert s.search("lollipop", proxy=False) - - -@bw2test -def test_limit(): - im = IndexManager("foo") - im.add_datasets( - [{"database": "foo", "code": "bar", "name": "lollipop {}".format(x)} for x in range(50)] - ) - with Searcher("foo") as s: - assert len(s.search("lollipop", limit=25, proxy=False)) == 25 - - -@bw2test -def test_star_search(): - im = IndexManager("foo") - im.add_datasets( - [{"database": "foo", "code": "bar", "name": "lollipop {}".format(x)} for x in range(50)] - ) - with Searcher("foo") as s: - assert len(s.search("*", limit=25, proxy=False)) == 25 - - -@bw2test -def test_search_faceting(): - im = IndexManager("foo") - ds = [ - {"database": "foo", "code": "bar", "name": "lollipop", "location": "CH"}, - {"database": "foo", "code": "bar", "name": "ice lollipop", "location": "FR"}, - ] - im.add_datasets(ds) - with Searcher("foo") as s: - res = s.search("lollipop", proxy=False, facet="location") - assert res == { - "fr": [ - { - "comment": "", - "product": "", - "name": "ice lollipop", - "database": "foo", - "location": "fr", - "code": "bar", - "categories": "", - "synonyms": "", - } - ], - "ch": [ - { - "comment": "", - "product": "", - "name": "lollipop", - "database": "foo", - "location": "ch", - "code": "bar", - "categories": "", - "synonyms": "", - } - ], - } - - -@bw2test -def test_copy_save_propogates_to_search_index(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "lollipop"}} - db.write(ds) - assert db.search("lollipop") - cp = db.get("bar").copy(code="baz") - cp["name"] = "candy" - cp.save() - assert db.search("candy") - - -@bw2test -def test_case_sensitivity_convert_lowercase(): - db = SQLiteBackend("foo") - ds = {("foo", "bar"): {"database": "foo", "code": "bar", "name": "LOLLIpop"}} - db.write(ds) - assert db.search("LOLLIpop".lower()) - assert db.search("lollipop") - assert db.search("LOLLipop") - assert db.search("LOLL*") - assert db.search("Lollipop") - assert not db.search("nope") - - -@bw2test -def test_synonym_search(): - im = IndexManager("foo") - im.add_dataset( - { - "database": "foo", - "code": "bar", - "name": "polytetrafluoroethylene", - "synonyms": ["PTFE", "Teflon"], - } - ) - with Searcher("foo") as s: - assert s.search("Teflon", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "polytetrafluoroethylene", - "database": "foo", - "location": "", - "code": "bar", - "categories": "", - "synonyms": "ptfe, teflon", - } - ] - - -@bw2test -def test_search_single_char(): - """Check we can disambiguate between "system 1", "system 2" and "system 3" """ - im = IndexManager("foo") - for i in [1, 2, 3]: - im.add_dataset( - { - "database": "foo", - "code": "bar", - "name": "Milk organic system %s" % i, - } - ) - with Searcher("foo") as s: - assert s.search("milk organic system 2", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "milk organic system 2", - "database": "foo", - "location": "", - "code": "bar", - "categories": "", - "synonyms": "", - } - ] - - -@bw2test -def test_search_with_parentheses(): - """Test that searching with parentheses works correctly""" - im = IndexManager("foo") - im.add_dataset({"database": "foo", "code": "bar", "name": "beam dried (u=10%) planed"}) - with Searcher("foo") as s: - assert s.search("dried (u=10%)", proxy=False) == [ - { - "comment": "", - "product": "", - "name": "beam dried (u=10%) planed", - "database": "foo", - "location": "", - "code": "bar", - "categories": "", - "synonyms": "", - } - ] diff --git a/tests/unit/test_database_events.py b/tests/unit/test_database_events.py index a7a38c6c..a740e2f0 100644 --- a/tests/unit/test_database_events.py +++ b/tests/unit/test_database_events.py @@ -1007,7 +1007,6 @@ def test_database_copy_revision_expected_format(): "depends": ["biosphere"], "backend": "sqlite", "geocollections": ["world"], - "searchable": True, "format": "Copied from 'food'", } } @@ -1306,7 +1305,6 @@ def test_database_rename_revision_expected_format(): "depends": ["biosphere"], "backend": "sqlite", "geocollections": ["world"], - "searchable": True, } } }, From 8e0b5a3fb4e06379653e0f5c59fb2d675ddb411a Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 11:49:12 +0100 Subject: [PATCH 2/9] Fix tests with changed geocollections I don't understand why disabling search functionality would effect these tests, but don't care enough to find out as we want to change the events in any case. --- tests/unit/test_database_events.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_database_events.py b/tests/unit/test_database_events.py index a740e2f0..c8010262 100644 --- a/tests/unit/test_database_events.py +++ b/tests/unit/test_database_events.py @@ -481,7 +481,14 @@ def test_database_write_revision_expected_format(): "type": "lci_database", "id": None, "change_type": "database_metadata_change", - "delta": {"iterable_item_added": {"root['food']['depends'][0]": "biosphere"}}, + "delta": { + "dictionary_item_added": { + "root['food']['geocollections']": ["world"] + }, + "iterable_item_added": { + "root['food']['depends'][0]": "biosphere" + }, + }, } ], }, @@ -1066,7 +1073,12 @@ def test_database_copy_revision_expected_format(): "type": "lci_database", "id": None, "change_type": "database_metadata_change", - "delta": {"iterable_item_added": {"root['yum']['depends'][0]": "biosphere"}}, + "delta": { + "iterable_item_added": { + "root['yum']['depends'][0]": "biosphere", + "root['yum']['geocollections'][0]": "world", + } + }, } ], }, From d3eff7adb5a7f3b27fb9580db7412d3a9c539b38 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 12:09:12 +0100 Subject: [PATCH 3/9] Remove `bw2data` parameterization --- bw2data/__init__.py | 4 +- bw2data/backends/base.py | 36 +- bw2data/backends/proxies.py | 26 +- bw2data/parameters.py | 1661 -------------- bw2data/project.py | 87 - bw2data/revisions.py | 143 -- bw2data/signals.py | 115 - bw2data/tests.py | 2 - docs/content/events_and_changes.md | 8 - tests/activity_proxy.py | 41 - tests/database.py | 53 - tests/exchange_proxy.py | 38 - tests/parameters.py | 1939 ----------------- tests/unit/test_activity_parameter_events.py | 778 ------- tests/unit/test_database_parameter_events.py | 564 ----- tests/unit/test_group.py | 252 --- .../test_parameterized_exchange_events.py | 341 --- tests/unit/test_project_parameter_events.py | 451 ---- 18 files changed, 33 insertions(+), 6506 deletions(-) delete mode 100644 bw2data/parameters.py delete mode 100644 tests/parameters.py delete mode 100644 tests/unit/test_activity_parameter_events.py delete mode 100644 tests/unit/test_database_parameter_events.py delete mode 100644 tests/unit/test_group.py delete mode 100644 tests/unit/test_parameterized_exchange_events.py delete mode 100644 tests/unit/test_project_parameter_events.py diff --git a/bw2data/__init__.py b/bw2data/__init__.py index ca590e87..24b8cff8 100644 --- a/bw2data/__init__.py +++ b/bw2data/__init__.py @@ -22,7 +22,7 @@ "Node", "Normalization", "normalizations", - "parameters", + # "parameters", "preferences", "prepare_lca_inputs", "ProcessedDataStore", @@ -80,7 +80,7 @@ mapping = Mapping() from bw2data.updates import Updates -from bw2data.parameters import parameters +# from bw2data.parameters import parameters Updates.check_status() diff --git a/bw2data/backends/base.py b/bw2data/backends/base.py index ccf36f56..dcf69164 100644 --- a/bw2data/backends/base.py +++ b/bw2data/backends/base.py @@ -791,24 +791,24 @@ def purge(dct: dict) -> dict: ExchangeDataset.delete().where(ExchangeDataset.output_database == self.name).execute() # IndexManager(self.filename).delete_database() - if not keep_params: - from bw2data.parameters import ( - ActivityParameter, - DatabaseParameter, - ParameterizedExchange, - ) - - groups = tuple( - { - o[0] - for o in ActivityParameter.select(ActivityParameter.group) - .where(ActivityParameter.database == self.name) - .tuples() - } - ) - ParameterizedExchange.delete().where(ParameterizedExchange.group << groups).execute() - ActivityParameter.delete().where(ActivityParameter.database == self.name).execute() - DatabaseParameter.delete().where(DatabaseParameter.database == self.name).execute() + # if not keep_params: + # from bw2data.parameters import ( + # ActivityParameter, + # DatabaseParameter, + # ParameterizedExchange, + # ) + + # groups = tuple( + # { + # o[0] + # for o in ActivityParameter.select(ActivityParameter.group) + # .where(ActivityParameter.database == self.name) + # .tuples() + # } + # ) + # ParameterizedExchange.delete().where(ParameterizedExchange.group << groups).execute() + # ActivityParameter.delete().where(ActivityParameter.database == self.name).execute() + # DatabaseParameter.delete().where(DatabaseParameter.database == self.name).execute() if vacuum_needed: sqlite3_lci_db.vacuum() diff --git a/bw2data/backends/proxies.py b/bw2data/backends/proxies.py index 4e82e3af..2ed39499 100644 --- a/bw2data/backends/proxies.py +++ b/bw2data/backends/proxies.py @@ -264,7 +264,7 @@ def key(self): def delete(self, signal: bool = True): from bw2data import Database, calculation_setups - from bw2data.parameters import ActivityParameter, ParameterizedExchange + # from bw2data.parameters import ActivityParameter, ParameterizedExchange def purge(obj: Activity, dct: dict) -> dict: return { @@ -273,14 +273,14 @@ def purge(obj: Activity, dct: dict) -> dict: if key != obj._data["id"] and key != (obj._data["database"], obj._data["code"]) } - try: - ap = ActivityParameter.get(database=self[0], code=self[1]) - ParameterizedExchange.delete().where(ParameterizedExchange.group == ap.group).execute() - ActivityParameter.delete().where( - ActivityParameter.database == self[0], ActivityParameter.code == self[1] - ).execute() - except ActivityParameter.DoesNotExist: - pass + # try: + # ap = ActivityParameter.get(database=self[0], code=self[1]) + # ParameterizedExchange.delete().where(ParameterizedExchange.group == ap.group).execute() + # ActivityParameter.delete().where( + # ActivityParameter.database == self[0], ActivityParameter.code == self[1] + # ).execute() + # except ActivityParameter.DoesNotExist: + # pass # IndexManager(Database(self["database"]).filename).delete_dataset(self._data) self.exchanges().delete(allow_in_sourced_project=True) self.upstream().delete(allow_in_sourced_project=True) @@ -603,11 +603,11 @@ def save(self, signal: bool = True, data_already_set: bool = False, force_insert self._document.save(signal=signal, force_insert=force_insert) def delete(self, signal: bool = True): - from bw2data.parameters import ParameterizedExchange + # from bw2data.parameters import ParameterizedExchange - ParameterizedExchange.delete().where( - ParameterizedExchange.exchange == self._document.id - ).execute() + # ParameterizedExchange.delete().where( + # ParameterizedExchange.exchange == self._document.id + # ).execute() self._document.delete_instance(signal=signal) databases.set_dirty(self["output"][0]) self = None diff --git a/bw2data/parameters.py b/bw2data/parameters.py deleted file mode 100644 index 80a4f69d..00000000 --- a/bw2data/parameters.py +++ /dev/null @@ -1,1661 +0,0 @@ -__all__ = ( - "ActivityParameter", - "DatabaseParameter", - "Group", - "GroupDependency", - "ParameterizedExchange", - "parameters", - "ProjectParameter", -) - -import datetime -import itertools -import re -import uuid -from typing import Optional - -import asteval -from asteval import Interpreter -from bw2parameters import ParameterSet -from bw2parameters.errors import MissingName -from peewee import BooleanField, Check, DateTimeField, FloatField, IntegerField, Model, TextField - -from bw2data import config, databases, get_activity, projects -from bw2data.backends.schema import ExchangeDataset -from bw2data.signals import ( - on_activity_parameter_recalculate, - on_activity_parameter_recalculate_exchanges, - on_activity_parameter_update_formula_activity_parameter_name, - on_activity_parameter_update_formula_database_parameter_name, - on_activity_parameter_update_formula_project_parameter_name, - on_database_parameter_recalculate, - on_database_parameter_update_formula_database_parameter_name, - on_database_parameter_update_formula_project_parameter_name, - on_project_parameter_recalculate, - on_project_parameter_update_formula_parameter_name, -) -from bw2data.snowflake_ids import SnowflakeIDBaseClass -from bw2data.sqlite import PickleField, SubstitutableDatabase - -# https://stackoverflow.com/questions/34544784/arbitrary-string-to-valid-python-name -clean = lambda x: re.sub(r"\W|^(?=\d)", "_", x) -nonempty = lambda dct: {k: v for k, v in dct.items() if v is not None} - -"""Autoupdate `updated` field in Group when parameters change""" -AUTOUPDATE_TRIGGER = """CREATE TRIGGER IF NOT EXISTS {table}_{action}_trigger AFTER {action} ON {table} BEGIN - UPDATE group_table SET updated = datetime('now') WHERE name = {name}; -END;""" - -"""Activity parameter groups can't cross databases""" -_CROSSDATABASE_TEMPLATE = """CREATE TRIGGER IF NOT EXISTS ap_crossdatabase_{action} BEFORE {action} ON activityparameter BEGIN - SELECT CASE WHEN - ((SELECT COUNT(*) FROM activityparameter WHERE "group" = NEW."group") > 0) - AND (NEW.database NOT IN (SELECT DISTINCT "database" FROM activityparameter where "group" = NEW."group")) - THEN RAISE(ABORT,'Cross database group') - END; -END;""" -CROSSDATASE_INSERT_TRIGGER = _CROSSDATABASE_TEMPLATE.format(action="INSERT") -CROSSDATASE_UPDATE_TRIGGER = _CROSSDATABASE_TEMPLATE.format(action="UPDATE") - -"""Activities can't be in multiple activity parameter groups""" -_CROSSGROUP_TEMPLATE = """CREATE TRIGGER IF NOT EXISTS ap_crossgroup_{action} BEFORE {action} ON activityparameter BEGIN - SELECT CASE WHEN EXISTS (SELECT * FROM activityparameter AS a WHERE - a.database = NEW.database AND - a.code = NEW.code AND - a."group" != NEW."group") - THEN RAISE(ABORT,'Cross group activity') - END; -END;""" -CROSSGROUP_INSERT_TRIGGER = _CROSSGROUP_TEMPLATE.format(action="INSERT") -CROSSGROUP_UPDATE_TRIGGER = _CROSSGROUP_TEMPLATE.format(action="UPDATE") - -"""No circular dependences in activity parameter group dependencies""" -_CLOSURE_TEMPLATE = """CREATE TRIGGER IF NOT EXISTS gd_circular_{action} BEFORE {action} ON groupdependency BEGIN - SELECT CASE WHEN EXISTS (SELECT * FROM groupdependency AS g WHERE g."group" = NEW.depends AND g.depends = NEW."group") - THEN RAISE(ABORT,'Circular dependency') - END; -END; -""" -GD_INSERT_TRIGGER = _CLOSURE_TEMPLATE.format(action="INSERT") -GD_UPDATE_TRIGGER = _CLOSURE_TEMPLATE.format(action="UPDATE") - -"""Parameterized exchange groups must be in activityparameters table""" -_PE_GROUP_TEMPLATE = """CREATE TRIGGER IF NOT EXISTS pe_group_{action} BEFORE {action} ON parameterizedexchange BEGIN - SELECT CASE WHEN - ((SELECT COUNT(*) FROM activityparameter WHERE "group" = NEW."group") < 1) - THEN RAISE(ABORT,'Missing activity parameter group') - END; -END; -""" -PE_INSERT_TRIGGER = _PE_GROUP_TEMPLATE.format(action="INSERT") -PE_UPDATE_TRIGGER = _PE_GROUP_TEMPLATE.format(action="UPDATE") - - -class ParameterBase(SnowflakeIDBaseClass): - __repr__ = lambda x: str(x) - - def __lt__(self, other): - if type(self) != type(other): - raise TypeError - else: - return self.name.lower() < other.name.lower() - - @classmethod - def create_table(cls): - super(ParameterBase, cls).create_table() - cls._meta.database.execute_sql( - AUTOUPDATE_TRIGGER.format(action="INSERT", name=cls._new_name, table=cls._db_table) - ) - for action in ("UPDATE", "DELETE"): - cls._meta.database.execute_sql( - AUTOUPDATE_TRIGGER.format(action=action, name=cls._old_name, table=cls._db_table) - ) - - @staticmethod - def expire_downstream(group): - """Expire any activity parameters that depend on this group""" - Group.update(fresh=False).where( - Group.name - << GroupDependency.select(GroupDependency.group).where(GroupDependency.depends == group) - ).execute() - - -class ProjectParameter(ParameterBase): - """Parameter set for a project. Group name is 'project'. - - Columns: - - name: str, unique - - formula: str, optional - - amount: float, optional - - data: object, optional. Used for any other metadata. - - Note that there is no magic for reading and writing to ``data`` (unlike ``Activity`` objects) - it must be used directly. - - """ - - name = TextField(index=True, unique=True) - formula = TextField(null=True) - amount = FloatField(null=True) - data = PickleField(default={}) - - _old_name = "'project'" - _new_name = "'project'" - _db_table = "projectparameter" - - def __str__(self): - return "Project parameter: {}".format(self.name) - - def save(self, *args, **kwargs): - Group.get_or_create(name="project")[0].expire() - super(ProjectParameter, self).save(*args, **kwargs) - - @staticmethod - def load(group=None): - """Return dictionary of parameter data with names as keys and ``.dict()`` as values.""" - - def reformat(o): - o = o.dict - return (o.pop("name"), o) - - return dict([reformat(o) for o in ProjectParameter.select()]) - - @staticmethod - def static(ignored="project", only=None): - """Get dictionary of ``{name: amount}`` for all project parameters. - - ``only`` restricts returned names to ones found in ``only``. ``ignored`` included for API compatibility with other ``recalculate`` methods. - """ - result = dict( - ProjectParameter.select(ProjectParameter.name, ProjectParameter.amount).tuples() - ) - if only is not None: - result = {k: v for k, v in result.items() if k in only} - return result - - @staticmethod - def expired(): - """Return boolean - is this group expired?""" - try: - return not Group.get(name="project").fresh - except Group.DoesNotExist: - return False - - @staticmethod - def recalculate(ignored: Optional[bool] = None, signal: bool = True): - """Recalculate all parameters. - - ``ignored`` included for API compatibility with other ``recalculate`` methods - it will really be ignored. - """ - if not ProjectParameter.expired(): - return - data = ProjectParameter.load() - if not data: - return - ParameterSet(data).evaluate_and_set_amount_field() - with parameters.db.atomic() as _: - for key, value in data.items(): - ProjectParameter.update( - amount=value["amount"], - ).where(ProjectParameter.name == key).execute() - Group.get_or_create(name="project")[0].freshen() - ProjectParameter.expire_downstream("project") - - if signal: - on_project_parameter_recalculate.send() - - @staticmethod - def dependency_chain(): - """Determine if ```ProjectParameter`` parameters have dependencies - within the group. - - Returns: - - .. code-block:: python - - [ - { - 'kind': 'project', - 'group': 'project', - 'names': set of variables names - } - ] - - """ - data = ProjectParameter.load() - if not data: - return [] - - # Parse all formulas, find missing variables - needed = get_new_symbols(data.values()) - if not needed: - return [] - - missing = needed.difference(data) - if missing: - raise MissingName( - "The following variables aren't defined:\n{}".format("|".join(missing)) - ) - - return [{"kind": "project", "group": "project", "names": needed}] - - @staticmethod - def is_dependency_within_group(name): - own_group = next(iter(ProjectParameter.dependency_chain()), {}) - return True if name in own_group.get("names", set()) else False - - def is_deletable(self): - """Perform a test to see if the current parameter can be deleted.""" - if ProjectParameter.is_dependency_within_group(self.name): - return False - # Test the database parameters - if DatabaseParameter.is_dependent_on(self.name): - return False - # Test activity parameters - if ActivityParameter.is_dependent_on(self.name, "project"): - return False - return True - - @classmethod - def update_formula_parameter_name(cls, old: str, new: str, signal: bool = True): - """Performs an update of the formula of relevant parameters. - - NOTE: Make sure to wrap this in an .atomic() statement! - """ - data = ( - alter_parameter_formula(p, old, new) - for p in cls.select().where(cls.formula.contains(old)) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - Group.get_or_create(name="project")[0].expire() - - if signal: - on_project_parameter_update_formula_parameter_name.send( - old={"old": old}, new={"new": new} - ) - - @property - def dict(self): - """Parameter data as a standardized dictionary""" - obj = nonempty( - { - "name": self.name, - "formula": self.formula, - "amount": self.amount, - } - ) - obj.update(self.data) - return obj - - -class DatabaseParameter(ParameterBase): - """Parameter set for a database. Group name is the name of the database. - - Columns: - - * database: str - * name: str, unique within a database - * formula: str, optional - * amount: float, optional - * data: object, optional. Used for any other metadata. - - Note that there is no magic for reading and writing to ``data`` (unlike ``Activity`` objects) - it must be used directly. - - """ - - database = TextField(index=True) - name = TextField(index=True) - formula = TextField(null=True) - amount = FloatField(null=True) - data = PickleField(default={}) - - _old_name = "OLD.database" - _new_name = "NEW.database" - _db_table = "databaseparameter" - - class Meta: - indexes = ((("database", "name"), True),) - constraints = [Check("database != 'project'")] - - def __str__(self): - return "Database parameter: {}:{}".format(self.database, self.name) - - @staticmethod - def load(database): - """Return dictionary of parameter data with names as keys and ``.dict()`` as values.""" - - def reformat(o): - o = o.dict - return (o.pop("name"), o) - - return dict( - [ - reformat(o) - for o in DatabaseParameter.select().where(DatabaseParameter.database == database) - ] - ) - - @staticmethod - def expired(database): - """Return boolean - is this group expired?""" - try: - return not Group.get(name=database).fresh - except Group.DoesNotExist: - return False - - @staticmethod - def static(database, only=None): - """Return dictionary of {name: amount} for database group.""" - result = dict( - DatabaseParameter.select(DatabaseParameter.name, DatabaseParameter.amount) - .where(DatabaseParameter.database == database) - .tuples() - ) - if only is not None: - result = {k: v for k, v in result.items() if k in only} - return result - - @staticmethod - def recalculate(database: str, signal: bool = True): - """Recalculate all database parameters for ``database``, if expired.""" - if ProjectParameter.expired(): - ProjectParameter.recalculate() - - # Can we avoid doing anything? - if not DatabaseParameter.expired(database): - return - data = DatabaseParameter.load(database) - if not data: - return - - # Parse all formulas, find missing variables - new_symbols = get_new_symbols(data.values(), set(data)) - found_symbols = {x[0] for x in ProjectParameter.select(ProjectParameter.name).tuples()} - missing = new_symbols.difference(found_symbols) - if missing: - raise MissingName( - "The following variables aren't defined:\n{}".format("|".join(missing)) - ) - - # Add or delete `project` dependency if needed - if new_symbols: - GroupDependency.get_or_create(group=database, depends="project") - # Load needed project variables as {'foo': 42} dict - glo = ProjectParameter.static(only=new_symbols) - else: - GroupDependency.delete().where( - GroupDependency.group == database, GroupDependency.depends == "project" - ).execute() - glo = None - - # Update database parameter values - ParameterSet(data, glo).evaluate_and_set_amount_field() - with parameters.db.atomic(): - for key, value in data.items(): - DatabaseParameter.update( - amount=value["amount"], - ).where( - DatabaseParameter.name == key, - DatabaseParameter.database == database, - ).execute() - Group.get(name=database).freshen() - DatabaseParameter.expire_downstream(database) - - if signal: - on_database_parameter_recalculate.send(name=database) - - @staticmethod - def dependency_chain(group, include_self=False): - """Find where each missing variable is defined in dependency chain. - - If ``include_self`` is True will include parameters within the group as possible dependencies - - Returns: - - .. code-block:: python - - [ - { - 'kind': one of 'project', 'database', 'activity', - 'group': group name, - 'names': set of variables names - } - ] - - """ - data = DatabaseParameter.load(group) - if not data: - return [] - - # Parse all formulas, find missing variables - context = set(data) if not include_self else set() - needed = get_new_symbols(data.values(), context=context) - if not needed: - return [] - - names, chain = set(), [] - if include_self: - included = needed.intersection(data) - for name in included: - names.add(name) - needed.remove(name) - if names: - chain.append({"kind": "database", "group": group, "names": names}) - - if needed: - names = set() - for name in ProjectParameter.static(only=needed): - names.add(name) - needed.remove(name) - if names: - chain.insert(0, {"kind": "project", "group": "project", "names": names}) - - if needed: - raise MissingName( - "The following variables aren't defined:\n{}".format("|".join(needed)) - ) - - return chain - - @staticmethod - def is_dependency_within_group(name, database): - own_group = next( - ( - x - for x in DatabaseParameter.dependency_chain(database, include_self=True) - if x.get("group") == database - ), - {}, - ) - return True if name in own_group.get("names", set()) else False - - def save(self, *args, **kwargs): - """Save this model instance""" - Group.get_or_create(name=self.database)[0].expire() - super(DatabaseParameter, self).save(*args, **kwargs) - - def is_deletable(self): - """Perform a test to see if the current parameter can be deleted.""" - # Test if the current parameter is used by other database parameters - if DatabaseParameter.is_dependency_within_group(self.name, self.database): - return False - # Then test all relevant activity parameters - if ActivityParameter.is_dependent_on(self.name, self.database): - return False - return True - - @staticmethod - def is_dependent_on(name): - """Test if any database parameters are dependent on the given - project parameter name. - """ - query = ( - GroupDependency.select(GroupDependency.group) - .where(GroupDependency.depends == "project") - .distinct() - ) - - for row in query.execute(): - chain = DatabaseParameter.dependency_chain(row.group) - own_group = next((x for x in chain if x.get("group") == "project"), {}) - if name in own_group.get("names", set()): - return True - - return False - - @classmethod - def update_formula_project_parameter_name(cls, old: str, new: str, signal: bool = True): - """Performs an update of the formula of relevant parameters. - - This method specifically targets project parameters used in database - formulas - """ - data = ( - alter_parameter_formula(p, old, new) - for p in ( - cls.select() - .join(GroupDependency, on=(GroupDependency.group == cls.database)) - .where(cls.formula.contains(old)) - ) - if not DatabaseParameter.is_dependency_within_group(old, p.database) - ) - dbs = set( - p.database - for p in ( - cls.select(cls.database) - .join(GroupDependency, on=(GroupDependency.group == cls.database)) - .where(cls.formula.contains(old)) - .distinct() - ) - if not DatabaseParameter.is_dependency_within_group(old, p.database) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - for db in dbs: - Group.get_or_create(name=db)[0].expire() - - if signal: - on_database_parameter_update_formula_project_parameter_name.send( - old={"old": old}, new={"new": new} - ) - - @classmethod - def update_formula_database_parameter_name(cls, old: str, new: str, signal: bool = True): - """Performs an update of the formula of relevant parameters. - - This method specifically targets database parameters used in database - formulas - """ - data = ( - alter_parameter_formula(p, old, new) - for p in cls.select().where(cls.formula.contains(old)) - if DatabaseParameter.is_dependency_within_group(old, p.database) - ) - dbs = set( - p.database - for p in (cls.select(cls.database).where(cls.formula.contains(old)).distinct()) - if DatabaseParameter.is_dependency_within_group(old, p.database) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - for db in dbs: - Group.get_or_create(name=db)[0].expire() - - if signal: - on_database_parameter_update_formula_database_parameter_name.send( - old={"old": old}, new={"new": new} - ) - - @property - def dict(self): - """Parameter data as a standardized dictionary""" - obj = nonempty( - { - "database": self.database, - "name": self.name, - "formula": self.formula, - "amount": self.amount, - } - ) - obj.update(self.data) - return obj - - -class ActivityParameter(ParameterBase): - """Parameter set for a group of activities. - - Columns: - - - group: str - - database: str - - code: str. Code and database define the linked activity for this parameter. - - name: str, unique within a group - - formula: str, optional - - amount: float, optional - - data: object, optional. Used for any other metadata. - - Activities can only have parameters in one group. Group names cannot be 'project' or the name of any existing database. - - Activity parameter groups can depend on other activity parameter groups, so that a formula in group "a" can depend on a variable in group "b". This dependency information is stored in ``Group.order`` - in our small example, we could define the following: - - .. code-block:: python - - a = Group.get(name="a") - a.order = ["b", "c"] - a.save() - - In this case, a variable not found in "a" would be searched for in "b" and then "c", in that order. Database and then project parameters are also implicitly included at the end of ``Group.order``. - - Note that there is no magic for reading and writing to ``data`` (unlike ``Activity`` objects) - it must be used directly. - - """ - - group = TextField() - database = TextField() - code = TextField() - name = TextField() - formula = TextField(null=True) - amount = FloatField(null=True) - data = PickleField(default={}) - - _old_name = 'OLD."group"' - _new_name = 'NEW."group"' - _db_table = "activityparameter" - - class Meta: - indexes = [(("group", "name"), True)] - constraints = [Check("""("group" != 'project') AND ("group" != database)""")] - - def __str__(self): - return "Activity parameter: {}:{}".format(self.group, self.name) - - @staticmethod - def load(group): - """Return dictionary of parameter data with names as keys and ``.dict()`` as values.""" - - def reformat(o): - o = o.dict - return (o.pop("name"), o) - - return dict( - [ - reformat(o) - for o in ActivityParameter.select().where(ActivityParameter.group == group) - ] - ) - - @staticmethod - def static(group, only=None, full=False): - """Get dictionary of ``{name: amount}`` for parameters defined in ``group``. - - ``only`` restricts returned names to ones found in ``only``. ``full`` returns all names, including those found in the dependency chain. - """ - result = dict( - ActivityParameter.select(ActivityParameter.name, ActivityParameter.amount) - .where(ActivityParameter.group == group) - .tuples() - ) - if full: - temp = ActivityParameter._static_dependencies(group) - temp.update(result) - result = temp - if only is not None: - result = {k: v for k, v in result.items() if k in only} - return result - - @staticmethod - def _static_dependencies(group): - """Get dictionary of ``{name: amount}`` for all variables defined in dependency chain. - - Be careful! This could have variables which overlap with local variable names. Designed for internal use. - """ - database = ActivityParameter.get(group=group).database - - chain = [ProjectParameter.static(), DatabaseParameter.static(database)] + [ - ActivityParameter.static(g) for g in Group.get(name=group).order[::-1] - ] - - result = {} - for dct in chain: - result.update(dct) - return result - - @staticmethod - def insert_dummy(group: str, activity: tuple, signal: bool = True): - code, database = activity[1], activity[0] - if ( - not ActivityParameter.select() - .where( - ActivityParameter.group == group, - ActivityParameter.code == code, - ActivityParameter.database == database, - ) - .count() - ): - ActivityParameter( - group=group, - name="__dummy_{}__".format(uuid.uuid4().hex), - code=code, - database=database, - amount=0, - ).save(signal=signal) - - @staticmethod - def expired(group): - """Return boolean - is this group expired?""" - try: - return not Group.get(name=group).fresh - except Group.DoesNotExist: - return False - - @staticmethod - def dependency_chain(group, include_self=False): - """Find where each missing variable is defined in dependency chain. - - Will also load in all parameters needed to resolve the ``ParameterizedExchanges`` for this group. - - If ``include_self`` is True will include parameters within the group as possible dependencies - - Returns: - - .. code-block:: python - - [ - { - 'kind': one of 'project', 'database', 'activity', - 'group': group name, - 'names': set of variables names - } - ] - - """ - data = ActivityParameter.load(group) - if not data: - return [] - - # Parse all formulas, find missing variables - context = set(data) if not include_self else None - activity_needed = get_new_symbols(data.values(), context=context) - exchanges_needed = get_new_symbols( - ParameterizedExchange.load(group).values(), context=context - ) - needed = activity_needed.union(exchanges_needed) - - if not needed: - return [] - - chain = [] - - # Iteratively search through other activity params, - # then db params, then project params - for new_group in Group.get(name=group).order: - names = set() - for name in ActivityParameter.static(new_group, only=needed): - names.add(name) - needed.remove(name) - if names: - chain.append({"kind": "activity", "group": new_group, "names": names}) - - if needed and include_self: - names = set() - included = needed.intersection(data) - for name in included: - names.add(name) - needed.remove(name) - if names: - chain.append({"kind": "activity", "group": group, "names": names}) - - if needed: - database = ActivityParameter.get(group=group).database - names = set() - for name in DatabaseParameter.static(database, only=needed): - names.add(name) - needed.remove(name) - if names: - chain.append({"kind": "database", "group": database, "names": names}) - if needed: - names = set() - for name in ProjectParameter.static(only=needed): - names.add(name) - needed.remove(name) - if names: - chain.append({"kind": "project", "group": "project", "names": names}) - if needed: - raise MissingName( - "The following variables aren't defined:\n{}".format("|".join(needed)) - ) - - return chain - - @staticmethod - def is_dependency_within_group(name, group, include_order=False): - """Determine if the given parameter `name` is a dependency within - the given activity `group`. - - The optional ``include_order`` parameter will include dependencies - from groups found in the the ``Group``.`order` field. - """ - chain = ActivityParameter.dependency_chain(group, include_self=True) - own_group = next((x for x in chain if x.get("group") == group), {}) - names = own_group.get("names", set()) - if include_order: - for new_group in Group.get(name=group).order: - order_group = next((x for x in chain if x.get("group") == new_group), {}) - names = names.union(order_group.get("names", set())) - return True if name in names else False - - @staticmethod - def recalculate(group: str, signal: bool = True): - """Recalculate all values for activity parameters in this group, and update their underlying `Activity` and `Exchange` values.""" - # Start by traversing and updating the list of dependencies - if not ActivityParameter.expired(group): - return - - chain = ActivityParameter.dependency_chain(group) - - # Reset dependencies and dependency order - if chain: - obj = Group.get(name=group) - obj.order = [o["group"] for o in chain if o["kind"] == "activity"] - obj.save() - GroupDependency.delete().where(GroupDependency.group == group).execute() - GroupDependency.insert_many( - [{"group": group, "depends": o["group"]} for o in chain] - ).execute() - - # Update all upstream groups - mapping = { - "project": ProjectParameter, - "database": DatabaseParameter, - "activity": ActivityParameter, - } - - # Not guaranteed to be the most efficient, - # but definitely simplest for now. - # Could be smarter here in the future - # Shouldn't be any race conditions because check for - # circular dependencies - for row in chain[::-1]: - mapping[row["kind"]].recalculate(row["group"]) - - # Update activity parameter values - data = ActivityParameter.load(group) - static = { - k: v for k, v in ActivityParameter._static_dependencies(group).items() if k not in data - } - ParameterSet(data, static).evaluate_and_set_amount_field() - with parameters.db.atomic(): - for key, value in data.items(): - ActivityParameter.update( - amount=value["amount"], - ).where( - ActivityParameter.name == key, - ActivityParameter.group == group, - ).execute() - Group.get(name=group).freshen() - ActivityParameter.expire_downstream(group) - - ActivityParameter.recalculate_exchanges(group, signal=False) - - if signal: - on_activity_parameter_recalculate.send(name=group) - - @staticmethod - def recalculate_exchanges(group: str, signal: bool = True): - """Recalculate formulas for all parameterized exchanges in group ``group``.""" - if ActivityParameter.expired(group): - return ActivityParameter.recalculate(group) - - interpreter = Interpreter() - for k, v in ActivityParameter.static(group, full=True).items(): - interpreter.symtable[k] = v - # TODO: Remove uncertainty from exchanges? - for obj in ParameterizedExchange.select().where(ParameterizedExchange.group == group): - exc = ExchangeDataset.get(id=obj.exchange) - exc.data["amount"] = interpreter(obj.formula) - exc.save(signal=False) - - databases.set_dirty(ActivityParameter.get(group=group).database) - - if signal: - on_activity_parameter_recalculate_exchanges.send(name=group) - - def save(self, *args, **kwargs): - """Save this model instance""" - Group.get_or_create(name=self.group)[0].expire() - super().save(*args, **kwargs) - - def is_deletable(self): - """Perform a test to see if the current parameter can be deleted.""" - # First check own group - if ActivityParameter.is_dependency_within_group(self.name, self.group): - return False - # Then test other relevant activity groups. - if ActivityParameter.is_dependent_on(self.name, self.group): - return False - return True - - @staticmethod - def is_dependent_on(name, group): - """Test if any activity parameters are dependent on the given - parameter name from the given group. - """ - query = ( - GroupDependency.select(GroupDependency.group) - .where(GroupDependency.depends == group) - .distinct() - ) - - for row in query.execute(): - chain = ActivityParameter.dependency_chain(row.group) - own_group = next((x for x in chain if x.get("group") == group), {}) - if name in own_group.get("names", set()): - return True - - return False - - @classmethod - def update_formula_project_parameter_name(cls, old: str, new: str, signal: bool = True): - """Performs an update of the formula of relevant parameters. - - This method specifically targets project parameters used in activity - formulas - """ - data = ( - alter_parameter_formula(p, old, new) - for p in ( - cls.select() - .join(GroupDependency, on=(GroupDependency.group == cls.group)) - .where((GroupDependency.depends == "project") & (cls.formula.contains(old))) - ) - if not ActivityParameter.is_dependency_within_group(old, p.group) - ) - group_parameters = itertools.chain( - ( - cls.select(cls.group) - .join(GroupDependency, on=(GroupDependency.group == cls.group)) - .where((GroupDependency.depends == "project") & (cls.formula.contains(old))) - .distinct() - ), - ( - ParameterizedExchange.select(ParameterizedExchange.group) - .where(ParameterizedExchange.formula.contains(old)) - .distinct() - ), - ) - groups = set( - p.group - for p in group_parameters - if not ActivityParameter.is_dependency_within_group(old, p.group) - ) - exchanges = ( - alter_parameter_formula(p, old, new) - for p in ParameterizedExchange.select().where(ParameterizedExchange.group << groups) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - for param_exc in exchanges: - param_exc.save(signal=False) - Group.update(fresh=False).where(Group.name << groups).execute() - - if signal: - on_activity_parameter_update_formula_project_parameter_name.send( - old={"old": old}, new={"new": new} - ) - - @classmethod - def update_formula_database_parameter_name(cls, old: str, new: str, signal: bool = True): - """Performs an update of the formula of relevant parameters. - - This method specifically targets database parameters used in activity - formulas - """ - data = ( - alter_parameter_formula(p, old, new) - for p in ( - cls.select() - .join(GroupDependency, on=(GroupDependency.group == cls.group)) - .where((GroupDependency.depends == cls.database) & (cls.formula.contains(old))) - ) - if not ActivityParameter.is_dependency_within_group(old, p.group) - ) - group_parameters = itertools.chain( - ( - cls.select(cls.group) - .join(GroupDependency, on=(GroupDependency.group == cls.group)) - .where((GroupDependency.depends == cls.database) & (cls.formula.contains(old))) - .distinct() - ), - ( - ParameterizedExchange.select(ParameterizedExchange.group) - .where(ParameterizedExchange.formula.contains(old)) - .distinct() - ), - ) - groups = set( - p.group - for p in group_parameters - if not ActivityParameter.is_dependency_within_group(old, p.group) - ) - exchanges = ( - alter_parameter_formula(p, old, new) - for p in ParameterizedExchange.select().where(ParameterizedExchange.group << groups) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - for param_exc in exchanges: - param_exc.save(signal=False) - Group.update(fresh=False).where(Group.name << groups).execute() - - if signal: - on_activity_parameter_update_formula_database_parameter_name.send( - old={"old": old}, new={"new": new} - ) - - @classmethod - def update_formula_activity_parameter_name( - cls, old: str, new: str, include_order: bool = False, signal: bool = True - ): - """Performs an update of the formula of relevant parameters. - - This method specifically targets activity parameters used in activity - formulas - """ - data = ( - alter_parameter_formula(p, old, new) - for p in cls.select().where(cls.formula.contains(old)) - if ActivityParameter.is_dependency_within_group(old, p.group, include_order) - ) - group_parameters = itertools.chain( - cls.select(cls.group).where(cls.formula.contains(old)).distinct(), - ( - ParameterizedExchange.select(ParameterizedExchange.group) - .where(ParameterizedExchange.formula.contains(old)) - .distinct() - ), - ) - groups = set( - p.group - for p in group_parameters - if ActivityParameter.is_dependency_within_group(old, p.group, include_order) - ) - exchanges = ( - alter_parameter_formula(p, old, new) - for p in ParameterizedExchange.select().where(ParameterizedExchange.group << groups) - ) - cls.bulk_update(data, fields=[cls.formula], batch_size=50) - for param_exc in exchanges: - param_exc.save(signal=False) - Group.update(fresh=False).where(Group.name << groups).execute() - - if signal: - on_activity_parameter_update_formula_activity_parameter_name.send( - old={"old": old}, new={"new": new, "include_order": include_order} - ) - - @classmethod - def create_table(cls): - super(ActivityParameter, cls).create_table() - cls._meta.database.execute_sql(CROSSDATASE_UPDATE_TRIGGER) - cls._meta.database.execute_sql(CROSSDATASE_INSERT_TRIGGER) - cls._meta.database.execute_sql(CROSSGROUP_UPDATE_TRIGGER) - cls._meta.database.execute_sql(CROSSGROUP_INSERT_TRIGGER) - - @property - def dict(self): - """Parameter data as a standardized dictionary""" - obj = nonempty( - { - "database": self.database, - "code": self.code, - "name": self.name, - "formula": self.formula, - "amount": self.amount, - } - ) - obj.update(self.data) - return obj - - -class ParameterizedExchange(SnowflakeIDBaseClass): - group = TextField() - exchange = IntegerField(unique=True) - formula = TextField() - - @classmethod - def create_table(cls): - super(ParameterizedExchange, cls).create_table() - cls._meta.database.execute_sql(PE_UPDATE_TRIGGER) - cls._meta.database.execute_sql(PE_INSERT_TRIGGER) - - def save(self, *args, **kwargs): - Group.get_or_create(name=self.group)[0].expire() - super().save(*args, **kwargs) - # Push the changed formula to the Exchange. - exc = ExchangeDataset.get_or_none(id=self.exchange) - if exc and exc.data.get("formula") != self.formula: - exc.data["formula"] = self.formula - exc.save(signal=False) - - @staticmethod - def load(group): - """Return dictionary of parameter data with names as keys and ``.dict()`` as values.""" - return { - o.exchange: o.formula - for o in ParameterizedExchange.select().where(ParameterizedExchange.group == group) - } - - @staticmethod - def recalculate(group): - """Shortcut for ``ActivityParameter.recalculate_exchanges``.""" - return ActivityParameter.recalculate_exchanges(group) - - -class Group(SnowflakeIDBaseClass): - name = TextField(unique=True) - fresh = BooleanField(default=True) - updated = DateTimeField(default=datetime.datetime.now) - order = PickleField(default=[]) - - def expire(self): - """Set ``fresh`` to ``False``""" - self.fresh = False - self.save() - - def freshen(self): - """Set ``fresh`` to ``True``""" - self.fresh = True - self.save() - - def save(self, *args, **kwargs): - """Save this model instance. Will remove 'project' and database names from ``order``.""" - self.purge_order() - super(Group, self).save(*args, **kwargs) - - def purge_order(self): - reserved = set(databases).union(set(["project"])) - self.order = [x for x in self.order if x not in reserved] - - class Meta: - table_name = "group_table" - - -class GroupDependency(Model): - group = TextField() - depends = TextField() - - class Meta: - indexes = ((("group", "depends"), True),) - constraints = [Check('"group" != depends')] - - def save(self, *args, **kwargs): - if self.group == "project": - raise ValueError("`project` group can't have dependencies") - elif self.group in databases and self.depends != "project": - raise ValueError("Database groups can only depend on `project`") - super(GroupDependency, self).save(*args, **kwargs) - - @classmethod - def create_table(cls): - super(GroupDependency, cls).create_table() - cls._meta.database.execute_sql(GD_UPDATE_TRIGGER) - cls._meta.database.execute_sql(GD_INSERT_TRIGGER) - - -class ParameterManager: - def __init__(self): - self.db = SubstitutableDatabase( - projects.dir / "parameters.db", - [ - DatabaseParameter, - ProjectParameter, - ActivityParameter, - ParameterizedExchange, - Group, - GroupDependency, - ], - ) - config.sqlite3_databases.append(("parameters.db", self.db)) - - def add_to_group(self, group, activity): - """Add `activity` to group. - - Creates ``group`` if needed. - - Will delete any existing ``ActivityParameter`` for this activity. - - Deletes `parameters` key from `Activity`.""" - Group.get_or_create(name=group) - - activity = get_activity((activity[0], activity[1])) - if "parameters" not in activity: - return - - # Avoid duplicate by deleting existing parameters - # Call in loop to get event handling - for ap in ActivityParameter.select().where( - ActivityParameter.database == activity["database"], - ActivityParameter.code == activity["code"], - ): - ap.delete_instance() - - def reformat(o): - skipped = ("name", "amount", "formula") - return [ - nonempty( - { - "group": group, - "database": o["database"], - "code": o["code"], - "name": p["name"], - "formula": p.get("formula"), - "amount": p.get("amount", 0), - "data": {k: v for k, v in p.items() if k not in skipped}, - } - ) - for p in o.get("parameters", []) - ] - - # Get formatted parameters - with self.db.atomic(): - for row in reformat(activity): - ActivityParameter.create(**row) - - # Parameters are now "active", remove from `Activity` - del activity["parameters"] - activity.save() - - self.add_exchanges_to_group(group, activity) - - return ( - ActivityParameter.select() - .where( - ActivityParameter.database == activity["database"], - ActivityParameter.code == activity["code"], - ) - .count() - ) - - def remove_from_group(self, group, activity, restore_amounts=True): - """Remove `activity` from `group`. - - Will delete any existing ``ActivityParameter`` and ``ParameterizedExchange`` for this activity. - - Restores `parameters` key to this `Activity`. - By default, restores `amount` value of each parameterized exchange - of the `Activity` to the original value. This can be avoided by using - the ``restore_amounts`` parameter. - - """ - - def drop_fields(dct): - dct = {k: v for k, v in dct.items() if k not in ("database", "code")} - return dct.pop("name"), dct - - activity = get_activity((activity[0], activity[1])) - activity["parameters"] = dict( - [ - drop_fields(o.dict) - for o in ActivityParameter.select().where( - ActivityParameter.database == activity[0], - ActivityParameter.code == activity[1], - ) - ] - ) - - with self.db.atomic(): - self.remove_exchanges_from_group(group, activity, restore_amounts) - # Call in loop to get event handling - for ap in ActivityParameter.select().where( - ActivityParameter.database == activity[0], - ActivityParameter.code == activity[1], - ): - ap.delete_instance() - activity.save() - - def add_exchanges_to_group(self, group, activity): - """Add exchanges with formulas from ``activity`` to ``group``. - - Every exchange with a formula field will have its original `amount` - value stored as `original_amount`. This original value can be - restored when parameterization is removed from the activity with - `remove_from_group`. - - """ - count = 0 - if ( - not ActivityParameter.select() - .where( - ActivityParameter.database == activity[0], - ActivityParameter.code == activity[1], - ) - .count() - ): - ActivityParameter.insert_dummy(group, activity) - - for exc in get_activity((activity[0], activity[1])).exchanges(): - if "formula" in exc: - try: - obj = ParameterizedExchange.get(exchange=exc._document.id) - except ParameterizedExchange.DoesNotExist: - obj = ParameterizedExchange(exchange=exc._document.id) - obj.group = group - obj.formula = exc["formula"] - obj.save() - if "original_amount" not in exc: - exc["original_amount"] = exc["amount"] - exc.save() - count += 1 - - return count - - def remove_exchanges_from_group(self, group, activity, restore_original=True): - """Takes a group and activity and removes all ``ParameterizedExchange`` - objects from the group. - - The ``restore_original`` parameter determines if the original amount - values will be restored to those exchanges where a formula was used - to alter the amount. - - """ - if restore_original: - for exc in (ex for ex in activity.exchanges() if "original_amount" in ex): - exc["amount"] = exc["original_amount"] - del exc["original_amount"] - exc.save() - - # Call in loop to get event handling - for pe in ParameterizedExchange.select().where(ParameterizedExchange.group == group): - pe.delete_instance() - - def new_project_parameters(self, data, overwrite=True): - """Correctly enter multiple parameters. - - Will overwrite existing project parameters with the same name, unless ``overwrite`` is false, in which case a ``ValueError`` is raised. - - ``data`` should be a list of dictionaries: - - .. code-block:: python - - [{ - 'name': name of variable (unique), - 'amount': numeric value of variable (optional), - 'formula': formula in Python as string (optional), - optional keys like uncertainty, etc. (no limitations) - }] - - """ - potentially_non_unique_names = [ds["name"] for ds in data] - unique_names = list(set(potentially_non_unique_names)) - assert len(unique_names) == len(potentially_non_unique_names), "Nonunique names: {}".format( - [p for p in unique_names if potentially_non_unique_names.count(p) > 1] - ) - - def reformat(ds): - return { - "name": ds.pop("name"), - "amount": ds.pop("amount", 0), - "formula": ds.pop("formula", None), - "data": ds, - } - - data = [reformat(ds) for ds in data] - new = {o["name"] for o in data} - existing = {o[0] for o in ProjectParameter.select(ProjectParameter.name).tuples()} - - if new.intersection(existing) and not overwrite: - raise ValueError( - "The following parameters already exist:\n{}".format( - "|".join(new.intersection(existing)) - ) - ) - - with self.db.atomic(): - # Remove existing values - # Call in loop to get event handling - for pp in ProjectParameter.select().where(ProjectParameter.name << tuple(new)): - pp.delete_instance() - for dataset in data: - ProjectParameter.create(**dataset) - Group.get_or_create(name="project")[0].expire() - ProjectParameter.recalculate() - - def new_database_parameters(self, data, database, overwrite=True): - """Correctly enter multiple parameters. Deletes **all** existing database parameters for this database. - - Will overwrite existing database parameters with the same name, unless ``overwrite`` is false, in which case a ``ValueError`` is raised. - - ``database`` should be an existing database. ``data`` should be a list of dictionaries: - - .. code-block:: python - - [{ - 'name': name of variable (unique), - 'amount': numeric value of variable (optional), - 'formula': formula in Python as string (optional), - optional keys like uncertainty, etc. (no limitations) - }] - - """ - assert database in databases, "Unknown database" - - potentially_non_unique_names = [ds["name"] for ds in data] - unique_names = list(set(potentially_non_unique_names)) - assert len(unique_names) == len(potentially_non_unique_names), "Nonunique names: {}".format( - [p for p in unique_names if potentially_non_unique_names.count(p) > 1] - ) - - def reformat(ds): - return { - "database": database, - "name": ds.pop("name"), - "amount": ds.pop("amount", 0), - "formula": ds.pop("formula", None), - "data": ds, - } - - data = [reformat(ds) for ds in data] - new = {o["name"] for o in data} - existing = { - o[0] - for o in DatabaseParameter.select(DatabaseParameter.name) - .where(DatabaseParameter.database == database) - .tuples() - } - - if new.intersection(existing) and not overwrite: - raise ValueError( - "The following parameters already exist:\n{}".format( - "|".join(new.intersection(existing)) - ) - ) - - with self.db.atomic(): - # Remove existing values - for dp in DatabaseParameter.select().where( - DatabaseParameter.database == database, - DatabaseParameter.name << tuple(new), - ): - dp.delete_instance() - for dataset in data: - DatabaseParameter.create(**dataset) - Group.get_or_create(name=database)[0].expire() - DatabaseParameter.recalculate(database) - - def new_activity_parameters(self, data, group, overwrite=True): - """Correctly enter multiple parameters. Deletes **all** existing activity parameters for this group. - - Will overwrite existing parameters in the same group with the same name, unless ``overwrite`` is false, in which case a ``ValueError`` is raised. - - Input parameters must refer to a single, existing database. - - ``group`` is the group name; will be autocreated if necessary. ``data`` should be a list of dictionaries: - - .. code-block:: python - - [{ - 'name': name of variable (unique), - 'database': activity database, - 'code': activity code, - 'amount': numeric value of variable (optional), - 'formula': formula in Python as string (optional), - optional keys like uncertainty, etc. (no limitations) - }] - - """ - database = {o["database"] for o in data} - assert len(database) == 1, "Multiple databases" - assert database.pop() in databases, "Unknown database" - - potentially_non_unique_names = [o["name"] for o in data] - unique_names = list(set(potentially_non_unique_names)) - assert len(unique_names) == len(potentially_non_unique_names), "Nonunique names: {}".format( - [p for p in unique_names if potentially_non_unique_names.count(p) > 1] - ) - - Group.get_or_create(name=group) - - def reformat(ds): - return { - "group": group, - "database": ds.pop("database"), - "code": ds.pop("code"), - "name": ds.pop("name"), - "formula": ds.pop("formula", None), - "amount": ds.pop("amount", 0), - "data": ds, - } - - data = [reformat(ds) for ds in data] - new = {o["name"] for o in data} - existing = { - o[0] - for o in ActivityParameter.select(ActivityParameter.name) - .where(ActivityParameter.group == group) - .tuples() - } - - if new.intersection(existing) and not overwrite: - raise ValueError( - "The following parameters already exist:\n{}".format( - "|".join(new.intersection(existing)) - ) - ) - - with self.db.atomic(): - # Remove existing values - for ap in ActivityParameter.select().where( - ActivityParameter.group == group, ActivityParameter.name << new - ): - ap.delete_instance() - for dataset in data: - ActivityParameter.create(**dataset) - Group.get_or_create(name=group)[0].expire() - ActivityParameter.recalculate(group) - - def rename_project_parameter(self, parameter, new_name, update_dependencies=False): - """Given a parameter and a new name, safely update the parameter. - - Will raise a TypeError if the given parameter is of the incorrect type. - Will raise a ValueError if other parameters depend on the given one - and ``update_dependencies`` is False. - - """ - if not isinstance(parameter, ProjectParameter): - raise TypeError("Incorrect parameter type for this method.") - if parameter.name == new_name: - return - - project = ProjectParameter.is_dependency_within_group(parameter.name) - database = DatabaseParameter.is_dependent_on(parameter.name) - activity = ActivityParameter.is_dependent_on(parameter.name, "project") - - if not update_dependencies and any([project, database, activity]): - raise ValueError( - "Parameter '{}' is used in other (downstream) formulas".format(parameter.name) - ) - - with self.db.atomic(): - if project: - ProjectParameter.update_formula_parameter_name(parameter.name, new_name) - if database: - DatabaseParameter.update_formula_project_parameter_name(parameter.name, new_name) - if activity: - ActivityParameter.update_formula_project_parameter_name(parameter.name, new_name) - parameter.name = new_name - parameter.save() - self.recalculate() - - def rename_database_parameter(self, parameter, new_name, update_dependencies=False): - """Given a parameter and a new name, safely update the parameter. - - Will raise a TypeError if the given parameter is of the incorrect type. - Will raise a ValueError if other parameters depend on the given one - and ``update_dependencies`` is False. - - """ - if not isinstance(parameter, DatabaseParameter): - raise TypeError("Incorrect parameter type for this method.") - if parameter.name == new_name: - return - - database = DatabaseParameter.is_dependency_within_group(parameter.name, parameter.database) - activity = ActivityParameter.is_dependent_on(parameter.name, parameter.database) - - if not update_dependencies and any([database, activity]): - raise ValueError( - "Parameter '{}' is used in other (downstream) formulas".format(parameter.name) - ) - - with self.db.atomic(): - if database: - DatabaseParameter.update_formula_database_parameter_name(parameter.name, new_name) - if activity: - ActivityParameter.update_formula_database_parameter_name(parameter.name, new_name) - parameter.name = new_name - parameter.save() - self.recalculate() - - def rename_activity_parameter(self, parameter, new_name, update_dependencies=False): - """Given a parameter and a new name, safely update the parameter. - - Will raise a TypeError if the given parameter is of the incorrect type. - Will raise a ValueError if other parameters depend on the given one - and ``update_dependencies`` is False. - - """ - if not isinstance(parameter, ActivityParameter): - raise TypeError("Incorrect parameter type for this method.") - if parameter.name == new_name: - return - - activity = any( - [ - ActivityParameter.is_dependency_within_group( - parameter.name, parameter.group, include_order=True - ), - ActivityParameter.is_dependent_on(parameter.name, parameter.group), - ] - ) - - if not update_dependencies and activity: - raise ValueError( - "Parameter '{}' is used in other (downstream) formulas".format(parameter.name) - ) - - with self.db.atomic(): - if activity: - ActivityParameter.update_formula_activity_parameter_name( - parameter.name, new_name, include_order=True - ) - parameter.name = new_name - parameter.save() - self.recalculate() - - def recalculate(self): - """Recalculate all expired project, database, and activity parameters, as well as exchanges.""" - if ProjectParameter.expired(): - ProjectParameter.recalculate() - for db in databases: - if DatabaseParameter.expired(db): - DatabaseParameter.recalculate(db) - for obj in Group.select().where(Group.fresh == False): - # Shouldn't be possible? Maybe concurrent access? - if obj.name in databases or obj.name == "project": - continue - ActivityParameter.recalculate(obj.name) - - def __len__(self): - return ( - DatabaseParameter.select().count() - + ProjectParameter.select().count() - + ActivityParameter.select().count() - ) - - def __repr__(self): - return "Parameters manager with {} objects".format(len(self)) - - -parameters = ParameterManager() - - -def get_new_symbols(data, context=None): - interpreter = asteval.Interpreter() - BUILTIN_SYMBOLS = set(interpreter.symtable).union(set(context or set())) - found = set() - for ds in data: - if isinstance(ds, str): - formula = ds - elif "formula" in ds: - formula = ds["formula"] - else: - continue - - nf = asteval.NameFinder() - nf.generic_visit(interpreter.parse(formula)) - found.update(set(nf.names)) - return found.difference(BUILTIN_SYMBOLS) - - -def alter_parameter_formula(parameter, old, new): - """Replace the `old` part with `new` in the formula field and return - the parameter itself. - """ - if hasattr(parameter, "formula"): - parameter.formula = re.sub(r"\b{}\b".format(old), new, parameter.formula) - return parameter diff --git a/bw2data/project.py b/bw2data/project.py index a27b5fd7..556e47f5 100644 --- a/bw2data/project.py +++ b/bw2data/project.py @@ -632,15 +632,6 @@ def signal_dispatcher_generic_no_diff( signal_dispatcher_on_database = partial( signal_dispatcher_generic_no_diff, prefix="database", obj_type="lci_database" ) -signal_dispatcher_on_project_parameter = partial( - signal_dispatcher_generic_no_diff, prefix="project_parameter", obj_type="project_parameter" -) -signal_dispatcher_on_database_parameter = partial( - signal_dispatcher_generic_no_diff, prefix="database_parameter", obj_type="database_parameter" -) -signal_dispatcher_on_activity_parameter = partial( - signal_dispatcher_generic_no_diff, prefix="activity_parameter", obj_type="activity_parameter" -) def signal_dispatcher_on_database_write(sender, name: str) -> int: @@ -656,41 +647,6 @@ def signal_dispatcher_on_database_write(sender, name: str) -> int: ] return projects.dataset.add_revision(deltas) - -def signal_dispatcher_on_update_formula_parameter_name( - sender, old: str, new: str, kind: str, extra: str = "" -) -> int: - from bw2data import revisions - - delta = revisions.Delta( - delta=deepdiff.Delta(deepdiff.DeepDiff(old, new, verbose_level=2)), - obj_type=f"{kind}_parameter", - obj_id="__update_formula_parameter_name_dummy__", - change_type=f"{kind}_parameter_update_formula_{extra}parameter_name", - ) - return projects.dataset.add_revision((delta,)) - - -signal_dispatcher_on_project_parameter_update_formula_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, - kind="project", -) -signal_dispatcher_on_database_parameter_update_formula_project_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, kind="database", extra="project_" -) -signal_dispatcher_on_database_parameter_update_formula_database_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, kind="database", extra="database_" -) -signal_dispatcher_on_activity_parameter_update_formula_project_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, kind="activity", extra="project_" -) -signal_dispatcher_on_activity_parameter_update_formula_database_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, kind="activity", extra="database_" -) -signal_dispatcher_on_activity_parameter_update_formula_activity_parameter_name = partial( - signal_dispatcher_on_update_formula_parameter_name, kind="activity", extra="activity_" -) - # `.connect()` directly just fails silently... signal_dispatcher_on_activity_database_change = partial( signal_dispatcher, operation="activity_database_change" @@ -703,18 +659,6 @@ def signal_dispatcher_on_update_formula_parameter_name( ) signal_dispatcher_on_database_reset = partial(signal_dispatcher_on_database, verb="reset") signal_dispatcher_on_database_delete = partial(signal_dispatcher_on_database, verb="delete") -signal_dispatcher_on_project_parameter_recalculate = partial( - signal_dispatcher_on_project_parameter, verb="recalculate", name="__recalculate_dummy__" -) -signal_dispatcher_on_database_parameter_recalculate = partial( - signal_dispatcher_on_database_parameter, verb="recalculate" -) -signal_dispatcher_on_activity_parameter_recalculate = partial( - signal_dispatcher_on_activity_parameter, verb="recalculate" -) -signal_dispatcher_on_activity_parameter_recalculate_exchanges = partial( - signal_dispatcher_on_activity_parameter, verb="recalculate_exchanges" -) projects = ProjectManager() bw2signals.signaleddataset_on_save.connect(signal_dispatcher) @@ -725,37 +669,6 @@ def signal_dispatcher_on_update_formula_parameter_name( bw2signals.on_database_reset.connect(signal_dispatcher_on_database_reset) bw2signals.on_database_delete.connect(signal_dispatcher_on_database_delete) bw2signals.on_database_write.connect(signal_dispatcher_on_database_write) -bw2signals.on_project_parameter_recalculate.connect( - signal_dispatcher_on_project_parameter_recalculate -) -bw2signals.on_database_parameter_recalculate.connect( - signal_dispatcher_on_database_parameter_recalculate -) -bw2signals.on_activity_parameter_recalculate.connect( - signal_dispatcher_on_activity_parameter_recalculate -) -bw2signals.on_activity_parameter_recalculate_exchanges.connect( - signal_dispatcher_on_activity_parameter_recalculate_exchanges -) - -bw2signals.on_project_parameter_update_formula_parameter_name.connect( - signal_dispatcher_on_project_parameter_update_formula_parameter_name -) -bw2signals.on_database_parameter_update_formula_project_parameter_name.connect( - signal_dispatcher_on_database_parameter_update_formula_project_parameter_name -) -bw2signals.on_database_parameter_update_formula_database_parameter_name.connect( - signal_dispatcher_on_database_parameter_update_formula_database_parameter_name -) -bw2signals.on_activity_parameter_update_formula_project_parameter_name.connect( - signal_dispatcher_on_activity_parameter_update_formula_project_parameter_name -) -bw2signals.on_activity_parameter_update_formula_database_parameter_name.connect( - signal_dispatcher_on_activity_parameter_update_formula_database_parameter_name -) -bw2signals.on_activity_parameter_update_formula_activity_parameter_name.connect( - signal_dispatcher_on_activity_parameter_update_formula_activity_parameter_name -) @wrapt.decorator diff --git a/bw2data/revisions.py b/bw2data/revisions.py index 235a8d3f..bf49a5d0 100644 --- a/bw2data/revisions.py +++ b/bw2data/revisions.py @@ -9,14 +9,6 @@ from bw2data.backends.utils import dict_as_activitydataset, dict_as_exchangedataset from bw2data.database import DatabaseChooser from bw2data.errors import DifferentObjects, IncompatibleClasses -from bw2data.parameters import ( - ActivityParameter, - DatabaseParameter, - Group, - ParameterBase, - ParameterizedExchange, - ProjectParameter, -) from bw2data.signals import SignaledDataset from bw2data.snowflake_ids import snowflake_id_generator from bw2data.utils import get_node @@ -285,131 +277,6 @@ def create(cls, revision_data: dict) -> None: ) -class RevisionedParameter(RevisionedORMProxy): - @classmethod - def _state_as_dict(cls, obj: ParameterBase) -> dict: - return {key: getattr(obj, key) for key in cls.KEYS} - - @classmethod - def current_state_as_dict(cls, obj: ParameterBase) -> dict: - return cls._state_as_dict(obj) - - @classmethod - def previous_state_as_dict(cls, revision_data: dict) -> dict: - orm_object = cls.ORM_CLASS.get_by_id(revision_data["id"]) - return cls._state_as_dict(orm_object) - - @classmethod - def update(cls, revision_data: dict) -> None: - previous = cls.previous_state_as_dict(revision_data) - updated_data = Delta.from_dict(revision_data["delta"]).apply(previous) - updated_orm_object = cls.ORM_CLASS(**updated_data) - updated_orm_object.id = revision_data["id"] - updated_orm_object.save(signal=False) - - @classmethod - def delete(cls, revision_data: dict) -> None: - cls.ORM_CLASS.get_by_id(revision_data["id"]).delete_instance(signal=False) - - @classmethod - def create(cls, revision_data: dict) -> None: - data = Delta.from_dict(revision_data["delta"]).apply({}) - orm_object = cls.ORM_CLASS(**data) - orm_object.id = revision_data["id"] - # Force insert because we specify the primary key already but object not in database - orm_object.save(signal=False, force_insert=True) - - @classmethod - def _unwrap_diff_dict(cls, data: dict) -> dict: - return { - "old": data["delta"]["dictionary_item_removed"]["root['old']"], - "new": data["delta"]["dictionary_item_added"]["root['new']"], - } - - -class RevisionedGroup(RevisionedParameter): - KEYS = ("id", "name", "order") - ORM_CLASS = Group - # Implicitly skips `fresh` and `updated` fields because they are in `KEYS`. - - -class RevisionedParameterizedExchange(RevisionedParameter): - KEYS = ("id", "group", "formula", "exchange") - ORM_CLASS = ParameterizedExchange - - -class RevisionedProjectParameter(RevisionedParameter): - KEYS = ("id", "name", "formula", "amount", "data") - ORM_CLASS = ProjectParameter - - @classmethod - def project_parameter_recalculate(cls, revision_data: dict) -> None: - cls.ORM_CLASS.recalculate(signal=False) - - @classmethod - def project_parameter_update_formula_parameter_name(cls, revision_data: dict) -> None: - cls.ORM_CLASS.update_formula_parameter_name( - signal=False, **cls._unwrap_diff_dict(revision_data) - ) - - -class RevisionedDatabaseParameter(RevisionedParameter): - KEYS = ("id", "database", "name", "formula", "amount", "data") - ORM_CLASS = DatabaseParameter - - @classmethod - def database_parameter_recalculate(cls, revision_data: dict) -> None: - cls.ORM_CLASS.recalculate(database=revision_data["id"], signal=False) - - @classmethod - def database_parameter_update_formula_project_parameter_name(cls, revision_data: dict) -> None: - cls.ORM_CLASS.update_formula_project_parameter_name( - signal=False, **cls._unwrap_diff_dict(revision_data) - ) - - @classmethod - def database_parameter_update_formula_database_parameter_name(cls, revision_data: dict) -> None: - cls.ORM_CLASS.update_formula_database_parameter_name( - signal=False, **cls._unwrap_diff_dict(revision_data) - ) - - -class RevisionedActivityParameter(RevisionedParameter): - KEYS = ("id", "group", "database", "code", "name", "formula", "amount", "data") - ORM_CLASS = ActivityParameter - - @classmethod - def activity_parameter_recalculate(cls, revision_data: dict) -> None: - cls.ORM_CLASS.recalculate(group=revision_data["id"], signal=False) - - @classmethod - def activity_parameter_recalculate_exchanges(cls, revision_data: dict) -> None: - cls.ORM_CLASS.recalculate_exchanges(group=revision_data["id"], signal=False) - - @classmethod - def activity_parameter_update_formula_project_parameter_name(cls, revision_data: dict) -> None: - cls.ORM_CLASS.update_formula_project_parameter_name( - signal=False, **cls._unwrap_diff_dict(revision_data) - ) - - @classmethod - def activity_parameter_update_formula_database_parameter_name(cls, revision_data: dict) -> None: - cls.ORM_CLASS.update_formula_database_parameter_name( - signal=False, **cls._unwrap_diff_dict(revision_data) - ) - - @classmethod - def activity_parameter_update_formula_activity_parameter_name(cls, revision_data: dict) -> None: - dct = { - "old": revision_data["delta"]["dictionary_item_removed"]["root['old']"], - "new": revision_data["delta"]["dictionary_item_added"]["root['new']"], - "include_order": revision_data["delta"]["dictionary_item_added"][ - "root['include_order']" - ], - } - cls.ORM_CLASS.update_formula_activity_parameter_name(signal=False, **dct) - - class RevisionedNode(RevisionedORMProxy): PROXY_CLASS = Activity ORM_CLASS = Activity.ORMDataset @@ -472,20 +339,10 @@ def handle(cls, revision_data: dict) -> None: SIGNALLEDOBJECT_TO_LABEL = { ActivityDataset: "lci_node", ExchangeDataset: "lci_edge", - ProjectParameter: "project_parameter", - DatabaseParameter: "database_parameter", - ActivityParameter: "activity_parameter", - ParameterizedExchange: "parameterized_exchange", - Group: "group", } REVISIONED_LABEL_AS_OBJECT = { "lci_node": RevisionedNode, "lci_edge": RevisionedEdge, "lci_database": RevisionedDatabase, - "project_parameter": RevisionedProjectParameter, - "database_parameter": RevisionedDatabaseParameter, - "activity_parameter": RevisionedActivityParameter, - "parameterized_exchange": RevisionedParameterizedExchange, - "group": RevisionedGroup, } REVISIONS_OBJECT_AS_LABEL = {v: k for k, v in REVISIONED_LABEL_AS_OBJECT.items()} diff --git a/bw2data/signals.py b/bw2data/signals.py index bacf0a01..6dfbf91a 100644 --- a/bw2data/signals.py +++ b/bw2data/signals.py @@ -100,121 +100,6 @@ """, ) -on_project_parameter_recalculate = signal( - "bw2data.on_project_parameter_recalculate", - doc="""Emitted *after* a call to `bw2data.parameters.ProjectParameter.recalculate()`. - -No expected inputs. - -No expected return value. -""", -) - -on_project_parameter_update_formula_parameter_name = signal( - "bw2data.on_project_parameter_update_formula_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.ProjectParameter.update_formula_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str} with *new* parameter name - -No expected return value. -""", -) - -on_database_parameter_recalculate = signal( - "bw2data.on_database_parameter_recalculate", - doc="""Emitted *after* a call to `bw2data.parameters.DatabaseParameter.recalculate()`. - -Expected inputs: - * `name`: str - database name - -No expected return value. -""", -) - -on_database_parameter_update_formula_project_parameter_name = signal( - "bw2data.on_database_parameter_update_formula_project_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.DatabaseParameter.update_formula_project_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str} with *new* parameter name - -No expected return value. -""", -) - -on_database_parameter_update_formula_database_parameter_name = signal( - "bw2data.on_database_parameter_update_formula_database_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.DatabaseParameter.update_formula_database_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str} with *new* parameter name - -No expected return value. -""", -) - -on_activity_parameter_recalculate = signal( - "bw2data.on_activity_parameter_recalculate", - doc="""Emitted *after* a call to `bw2data.parameters.ActivityParameter.recalculate()`. - -Expected inputs: - * `name`: str - group name - -No expected return value. -""", -) - -on_activity_parameter_recalculate_exchanges = signal( - "bw2data.on_activity_parameter_recalculate_exchanges", - doc="""Emitted *after* a call to `bw2data.parameters.ActivityParameter.recalculate_exchanges()`. - -Expected inputs: - * `name`: str - group name - -No expected return value. -""", -) - -on_activity_parameter_update_formula_project_parameter_name = signal( - "bw2data.on_activity_parameter_update_formula_project_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.ActivityParameter.update_formula_project_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str} with *new* parameter name - -No expected return value. -""", -) - -on_activity_parameter_update_formula_database_parameter_name = signal( - "bw2data.on_activity_parameter_update_formula_database_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.ActivityParameter.update_formula_database_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str} with *new* parameter name - -No expected return value. -""", -) - -on_activity_parameter_update_formula_activity_parameter_name = signal( - "bw2data.on_activity_parameter_update_formula_activity_parameter_name", - doc="""Emitted *after* a call to `bw2data.parameters.ActivityParameter.update_formula_activity_parameter_name()`. - -Expected inputs: - * `old` - dict like {"old": str} with *previous* parameter name - * `new` - dict like {"new": str, "include_order": bool} with *new* parameter name - -No expected return value. -""", -) - project_changed = signal( "bw2data.project_changed", doc=""" diff --git a/bw2data/tests.py b/bw2data/tests.py index fbfc8306..add25839 100644 --- a/bw2data/tests.py +++ b/bw2data/tests.py @@ -9,7 +9,6 @@ import wrapt from bw2data import config, databases, geomapping, methods -from bw2data.parameters import parameters from bw2data.project import projects @@ -40,7 +39,6 @@ def test_setup_clean(self): self.assertTrue("GLO" in geomapping) self.assertEqual(len(projects), 1) # Default project self.assertTrue("default" not in projects) - self.assertFalse(len(parameters)) @wrapt.decorator diff --git a/docs/content/events_and_changes.md b/docs/content/events_and_changes.md index 2e774d94..320e6080 100644 --- a/docs/content/events_and_changes.md +++ b/docs/content/events_and_changes.md @@ -7,10 +7,6 @@ * `bw2data.backends.proxies.Exchange` (via `bw2data.backends.schema.ExchangeDataset`) * `bw2data.backends.schema.ExchangeDataset` * `bw2data.meta.databases` -* `bw2data.parameters.ProjectParameter` -* `bw2data.parameters.DatabaseParameter` -* `bw2data.parameters.ActivityParameter` -* `bw2data.parameters.ParameterizedExchange` These change events can grouped into three types: `create`, `update`, and `delete`. @@ -23,10 +19,6 @@ Higher-level objects can cause changes to lower level objects. The object hierar 1. Database 2. Activity 3. Exchange -4a. ProjectParameter -4b. DatabaseParameter -4c. ActivityParameter -5. ParameterizedExchange (but a change to a `ParameterizedExchange` can change the amount in an `Exchange`) So changing the name of a `Database` will cause changes to `Activity` and `Exchange` objects, deleting an `Activity` object will delete `Exchange` objects and possibly `ParameterizedExchange` objects, and changing the value of a `DatabaseParameter` could change a `ParameterizedExchange` which would then cause a change in an `Exchange`. diff --git a/tests/activity_proxy.py b/tests/activity_proxy.py index c79a80bd..3a99c353 100644 --- a/tests/activity_proxy.py +++ b/tests/activity_proxy.py @@ -4,7 +4,6 @@ from bw2data.backends import ActivityDataset, ExchangeDataset from bw2data.database import DatabaseChooser from bw2data.errors import UnknownObject, ValidityError -from bw2data.parameters import ActivityParameter, ParameterizedExchange, parameters from bw2data.tests import bw2test from bw2data.utils import get_activity @@ -298,46 +297,6 @@ def test_copy_with_kwargs(activity): assert ActivityDataset.select().count() == 2 -@bw2test -def test_delete_activity_parameters(): - db = DatabaseChooser("example") - db.register() - - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=0, input=b, type="technosphere", formula="foo * bar + 4").save() - - assert ExchangeDataset.select().count() == 1 - - activity_data = [ - { - "name": "reference_me", - "formula": "sqrt(25)", - "database": "example", - "code": "B", - }, - { - "name": "bar", - "formula": "reference_me + 2", - "database": "example", - "code": "A", - }, - ] - parameters.new_activity_parameters(activity_data, "my group") - parameters.add_exchanges_to_group("my group", a) - - assert ExchangeDataset.select().count() == 1 - - assert ActivityParameter.select().count() == 2 - assert ParameterizedExchange.select().count() == 1 - - a.delete() - assert ActivityParameter.select().count() == 1 - assert not ParameterizedExchange.select().count() - - @bw2test def test_delete_calculation_setups(capsys): db = DatabaseChooser("example") diff --git a/tests/database.py b/tests/database.py index 7ac20476..cae91197 100644 --- a/tests/database.py +++ b/tests/database.py @@ -26,12 +26,6 @@ UntypedExchange, WrongDatabase, ) -from bw2data.parameters import ( - ActivityParameter, - DatabaseParameter, - ParameterizedExchange, - parameters, -) from bw2data.snowflake_ids import EPOCH_START_MS from bw2data.tests import bw2test @@ -900,53 +894,6 @@ def test_no_distributions_if_no_uncertainty(): package.get_resource("a_database_technosphere_matrix.distributions") -@bw2test -def test_database_delete_parameters(): - db = Database("example") - db.register() - - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=0, input=b, type="technosphere", formula="foo * bar + 4").save() - - database_data = [ - { - "name": "red", - "formula": "(blue ** 2) / 5", - }, - {"name": "blue", "amount": 12}, - ] - parameters.new_database_parameters(database_data, "example") - - activity_data = [ - { - "name": "reference_me", - "formula": "sqrt(red - 20)", - "database": "example", - "code": "B", - }, - { - "name": "bar", - "formula": "reference_me + 2", - "database": "example", - "code": "A", - }, - ] - parameters.new_activity_parameters(activity_data, "my group") - parameters.add_exchanges_to_group("my group", a) - - assert ActivityParameter.select().count() == 2 - assert ParameterizedExchange.select().count() == 1 - assert DatabaseParameter.select().count() == 2 - assert len(parameters) == 4 - - del databases["example"] - assert not len(parameters) - assert not ParameterizedExchange.select().count() - - @bw2test def test_delete_duplicate_exchanges(): all_exchanges = lambda db: [exc for ds in db for exc in ds.exchanges()] diff --git a/tests/exchange_proxy.py b/tests/exchange_proxy.py index 8c9d0d1b..c4396058 100644 --- a/tests/exchange_proxy.py +++ b/tests/exchange_proxy.py @@ -1,7 +1,6 @@ from bw2data import Method, databases, geomapping, get_activity, get_node, methods, projects from bw2data.configuration import labels from bw2data.database import DatabaseChooser -from bw2data.parameters import ActivityParameter, ParameterizedExchange, parameters from bw2data.tests import bw2test try: @@ -369,43 +368,6 @@ def test_lca(activity_and_method): assert lca.score == 2 * 42 -@bw2test -def test_delete_parameterized_exchange(): - db = DatabaseChooser("example") - db.register() - - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - exc = a.new_exchange(amount=0, input=b, type="technosphere", formula="foo * bar + 4") - exc.save() - - activity_data = [ - { - "name": "reference_me", - "formula": "sqrt(25)", - "database": "example", - "code": "B", - }, - { - "name": "bar", - "formula": "reference_me + 2", - "database": "example", - "code": "A", - }, - ] - parameters.new_activity_parameters(activity_data, "my group") - parameters.add_exchanges_to_group("my group", a) - - assert ActivityParameter.select().count() == 2 - assert ParameterizedExchange.select().count() == 1 - - exc.delete() - assert ActivityParameter.select().count() == 2 - assert not ParameterizedExchange.select().count() - - def test_exchange_eq(activity): ex = list(activity.exchanges())[0] assert ex == ex diff --git a/tests/parameters.py b/tests/parameters.py deleted file mode 100644 index af6b55dc..00000000 --- a/tests/parameters.py +++ /dev/null @@ -1,1939 +0,0 @@ -import re -import time - -import pytest -from bw2parameters.errors import MissingName -from peewee import IntegrityError - -from bw2data import Database, get_activity, parameters -from bw2data.parameters import ( - ActivityParameter, - DatabaseParameter, - Group, - GroupDependency, - ParameterizedExchange, - ProjectParameter, - parameters, -) -from bw2data.tests import bw2test - -# Regex to search for UUID: https://stackoverflow.com/a/18359032 -uuid4hex = re.compile(r"[0-9a-f]{8}[0-9a-f]{4}4[0-9a-f]{3}[89ab][0-9a-f]{3}[0-9a-f]{12}", re.I) - -###################### -### Project parameters -###################### - - -@bw2test -def test_project_parameters(): - assert not len(parameters) - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - assert obj.name == "foo" - assert obj.amount == 3.14 - assert obj.data == {"uncertainty type": 0} - assert str(obj) - assert isinstance(str(obj), str) - - -@bw2test -def test_project_parameter_autocreate_group(): - assert not Group.select().count() - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - assert Group.get(name="project") - assert not Group.get(name="project").fresh - - -@bw2test -def test_expire_downstream(): - Group.create(fresh=True, name="A") - Group.create(fresh=True, name="B") - GroupDependency.create(group="B", depends="A") - assert Group.get(name="A").fresh - assert Group.get(name="B").fresh - ProjectParameter.expire_downstream("A") - assert not Group.get(name="B").fresh - - -@bw2test -def test_project_parameters_ordering(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - with pytest.raises(TypeError): - obj < 0 - assert not (obj < obj) - another = ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - assert another < obj - - -@bw2test -def test_project_parameters_dict(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - expected = { - "name": "foo", - "amount": 3.14, - "uncertainty type": 0, - } - assert obj.dict == expected - - -@bw2test -def test_project_parameters_load(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - another = ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - expected = { - "foo": {"amount": 3.14, "uncertainty type": 0}, - "bar": {"formula": "2 * foo"}, - } - assert ProjectParameter.load() == expected - assert ProjectParameter.load("project") == expected - assert ProjectParameter.load("foo") == expected - - -@bw2test -def test_project_parameters_static(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - another = ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - assert ProjectParameter.static() == {"foo": 3.14, "bar": None} - assert ProjectParameter.static(only=["foo"]) == {"foo": 3.14} - ProjectParameter.recalculate() - assert ProjectParameter.static() == {"foo": 3.14, "bar": 2 * 3.14} - assert ProjectParameter.static(only=["bar"]) == {"bar": 2 * 3.14} - - -@bw2test -def test_project_parameters_expired(): - assert not ProjectParameter.expired() - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - assert ProjectParameter.expired() - ProjectParameter.recalculate() - assert not ProjectParameter.expired() - - -@bw2test -def test_project_parameters_recalculate(): - ProjectParameter.recalculate() - Group.create(name="project") - ProjectParameter.recalculate() - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - another = ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - ProjectParameter.recalculate() - obj = ProjectParameter.get(name="bar") - assert obj.amount == 2 * 3.14 - - -@bw2test -def test_project_parameters_expire_downstream(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - Group.create(name="bar") - GroupDependency.create(group="bar", depends="project") - assert Group.get(name="bar").fresh - ProjectParameter.recalculate() - assert not Group.get(name="bar").fresh - - -@bw2test -def test_project_autoupdate_triggers(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - first = Group.get(name="project").updated - time.sleep(1.1) - another = ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - second = Group.get(name="project").updated - assert first != second - time.sleep(1.1) - ProjectParameter.update(amount=7).execute() - third = Group.get(name="project").updated - assert second != third - time.sleep(1.1) - ProjectParameter.get(name="foo").delete_instance() - fourth = Group.get(name="project").updated - assert fourth != third - - -@bw2test -def test_project_name_uniqueness(): - obj = ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - with pytest.raises(IntegrityError): - ProjectParameter.create( - name="foo", - amount=7, - ) - - -@bw2test -def test_project_parameter_dependency_chain(): - ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - ProjectParameter.create(name="bar", amount=6.28, formula="foo * 2") - expected = [ - {"kind": "project", "group": "project", "names": set(["foo"])}, - ] - assert ProjectParameter.dependency_chain() == expected - - -@bw2test -def test_project_parameter_dependency_chain_missing(): - ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - ProjectParameter.create(name="baz", amount=8, formula="foo * bar") - with pytest.raises(MissingName): - ProjectParameter.dependency_chain() - - -@bw2test -def test_project_parameter_depend_within_group(): - ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - ProjectParameter.create(name="baz", amount=8, formula="foo * 2") - assert ProjectParameter.is_dependency_within_group("foo") - assert not ProjectParameter.is_dependency_within_group("baz") - - -@bw2test -def test_project_parameter_is_deletable(): - """Project parameters can be deleted if they are no dependencies.""" - ProjectParameter.create(name="foo", amount=3.14) - assert ProjectParameter.get(name="foo").is_deletable() - - -@bw2test -def test_project_parameter_is_not_deletable_project(): - ProjectParameter.create(name="foo", amount=3.14) - ProjectParameter.create(name="bar", amount=1, formula="foo * 2") - assert not ProjectParameter.get(name="foo").is_deletable() - - -@bw2test -def test_project_parameter_is_not_deletable_database(): - ProjectParameter.create(name="foo", amount=3.14) - Database("B").register() - DatabaseParameter.create(database="B", name="bar", amount=1, formula="foo * 5") - # Recalculate to build GroupDependencies. - parameters.recalculate() - assert not ProjectParameter.get(name="foo").is_deletable() - - -@bw2test -def test_project_parameter_is_not_deletable_activity(): - ProjectParameter.create(name="foo", amount=3.14) - Database("B").register() - ActivityParameter.create( - group="baz", database="B", code="first", name="bar", amount=0, formula="foo + 4" - ) - # Recalculate to build GroupDependencies. - parameters.recalculate() - assert not ProjectParameter.get(name="foo").is_deletable() - - -@bw2test -def test_project_parameter_formula_update(): - """Update formulas only where the name of the parameter is an exact match.""" - ProjectParameter.create(name="foo", amount=3.14) - ProjectParameter.create(name="foobar", amount=6.28) - ProjectParameter.create(name="bar", amount=1, formula="foo + 2") - ProjectParameter.create(name="baz", amount=1, formula="foobar * 3") - assert ProjectParameter.select().where(ProjectParameter.formula.contains("foo")).count() == 2 - ProjectParameter.update_formula_parameter_name("foo", "efficiency") - assert ProjectParameter.get(name="bar").formula == "efficiency + 2" - assert ProjectParameter.get(name="baz").formula == "foobar * 3" - - -####################### -### Database parameters -####################### - - -@bw2test -def test_create_database_parameters(): - assert not len(parameters) - obj = DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - assert obj.name == "foo" - assert obj.database == "bar" - assert obj.amount == 3.14 - assert str(obj) - assert isinstance(str(obj), str) - assert len(parameters) - - -@bw2test -def test_database_parameters_group_autocreated(): - assert not Group.select().count() - obj = DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - assert Group.get(name="bar") - assert not Group.get(name="bar").fresh - - -@bw2test -def test_database_parameters_expired(): - assert not DatabaseParameter.expired("bar") - DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - assert DatabaseParameter.expired("bar") - - -@bw2test -def test_database_parameters_dict(): - obj = DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - expected = { - "database": "bar", - "name": "foo", - "amount": 3.14, - } - assert obj.dict == expected - - -@bw2test -def test_database_parameters_load(): - DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - DatabaseParameter.create(database="bar", name="baz", formula="foo + baz") - expected = { - "foo": {"database": "bar", "amount": 3.14}, - "baz": {"database": "bar", "formula": "foo + baz"}, - } - assert DatabaseParameter.load("bar") == expected - - -@bw2test -def test_database_parameters_static(): - DatabaseParameter.create( - database="bar", - name="foo", - amount=3.14, - ) - DatabaseParameter.create(database="bar", name="baz", amount=7, formula="foo + baz") - expected = {"foo": 3.14, "baz": 7} - assert DatabaseParameter.static("bar") == expected - assert DatabaseParameter.static("bar", only=["baz"]) == {"baz": 7} - - -@bw2test -def test_database_parameters_check(): - with pytest.raises(IntegrityError): - DatabaseParameter.create( - database="project", - name="foo", - amount=3.14, - ) - - -@bw2test -def test_database_autoupdate_triggers(): - obj = DatabaseParameter.create( - database="A", - name="foo", - amount=3.14, - ) - first = Group.get(name="A").updated - time.sleep(1.1) - another = DatabaseParameter.create( - database="A", - name="bar", - formula="2 * foo", - ) - second = Group.get(name="A").updated - assert first != second - time.sleep(1.1) - DatabaseParameter.update(amount=7).execute() - third = Group.get(name="A").updated - assert second != third - time.sleep(1.1) - DatabaseParameter.get(name="foo").delete_instance() - fourth = Group.get(name="A").updated - assert fourth != third - - -@bw2test -def test_database_uniqueness_constraint(): - DatabaseParameter.create(database="A", name="foo", amount=3.14, data={"uncertainty type": 0}) - with pytest.raises(IntegrityError): - DatabaseParameter.create( - database="A", - name="foo", - amount=7, - ) - - -@bw2test -def test_database_parameter_cross_database_constraint(): - """Database parameters cannot use parameters on other databases.""" - Database("B").register() - Database("C").register() - DatabaseParameter.create( - database="B", - name="car", - amount=8, - ) - DatabaseParameter.create( - database="C", - name="plane", - formula="car ** 5", - ) - with pytest.raises(MissingName): - DatabaseParameter.recalculate("C") - - -@bw2test -def test_update_database_parameters(): - assert not Group.select().count() - assert not GroupDependency.select().count() - - DatabaseParameter.create( - database="A", - name="B", - amount=5, - ) - o = DatabaseParameter.create( - database="A", - name="C", - formula="B * 2 + foo", - ) - Group.create(name="Zed") - GroupDependency.create(group="Zed", depends="A") - assert Group.get(name="A") - with pytest.raises(MissingName): - DatabaseParameter.recalculate("A") - o.formula = "B * 2" - o.save() - DatabaseParameter.recalculate("A") - assert Group.get(name="A").fresh - assert DatabaseParameter.get(name="C").amount == 10 - assert not Group.get(name="Zed").fresh - - o.formula = "B * 2 + foo + bar" - o.save() - ProjectParameter.create(name="foo", amount=3.14, data={"uncertainty type": 0}) - ProjectParameter.create( - name="bar", - formula="2 * foo", - ) - assert Group.get(name="project") - Database("A").register() - - obj = DatabaseParameter.get(name="C") - assert obj.amount != 3.14 * 3 + 10 - with pytest.raises(GroupDependency.DoesNotExist): - GroupDependency.get(group="A", depends="project") - - DatabaseParameter.recalculate("A") - assert GroupDependency.get(group="A", depends="project") - assert Group.get(name="A") - assert Group.get(name="project") - obj = DatabaseParameter.get(name="C") - assert obj.amount == 3.14 * 3 + 10 - - -@bw2test -def test_database_parameter_dependency_chain(): - Database("B").register() - DatabaseParameter.create( - database="B", - name="car", - formula="2 ** fly", - amount=8, - ) - DatabaseParameter.create( - database="B", - name="bike", - formula="car - hike", - amount=2, - ) - ProjectParameter.create( - name="hike", - formula="2 * 2 * 2", - amount=6, - ) - ProjectParameter.create( - name="fly", - formula="3", - amount=3, - ) - expected = [ - {"kind": "project", "group": "project", "names": set(["fly", "hike"])}, - ] - assert DatabaseParameter.dependency_chain("B") == expected - assert DatabaseParameter.dependency_chain("missing") == [] - - -@bw2test -def test_database_parameter_dependency_chain_missing(): - Database("B").register() - DatabaseParameter.create( - database="B", - name="car", - formula="2 ** fly", - amount=8, - ) - ProjectParameter.create( - name="hike", - formula="2 * 2 * 2", - amount=6, - ) - with pytest.raises(MissingName): - DatabaseParameter.dependency_chain("B") - - -@bw2test -def test_database_parameter_dependency_chain_include_self(): - Database("B").register() - DatabaseParameter.create( - database="B", - name="car", - formula="2 ** fly", - amount=8, - ) - DatabaseParameter.create( - database="B", - name="truck", - formula="car * 5", - ) - ProjectParameter.create( - name="fly", - formula="3", - amount=3, - ) - expected = [ - {"kind": "project", "group": "project", "names": set(["fly"])}, - {"kind": "database", "group": "B", "names": set(["car"])}, - ] - # Method now also includes required names within group - assert DatabaseParameter.dependency_chain("B", include_self=True) == expected - - -@bw2test -def test_database_parameter_depend_within_group(): - Database("B").register() - Database("C").register() - DatabaseParameter.create( - database="B", - name="car", - formula="2 ** fly", - amount=8, - ) - DatabaseParameter.create( - database="B", - name="truck", - amount=2, - formula="car * 5", - ) - DatabaseParameter.create( - database="C", - name="fly", - amount=7, - ) - DatabaseParameter.create( - database="C", - name="parade", - amount=1, - formula="fly * 2.7", - ) - ProjectParameter.create( - name="fly", - formula="3", - amount=3, - ) - parameters.recalculate() - assert DatabaseParameter.is_dependency_within_group("car", "B") - assert not DatabaseParameter.is_dependency_within_group("truck", "B") - assert DatabaseParameter.is_dependency_within_group("fly", "C") - - -@bw2test -def test_database_parameter_is_deletable(): - """Database parameters can be deleted if they are no dependencies.""" - Database("B").register() - DatabaseParameter.create(database="B", name="car", amount=8) - assert DatabaseParameter.get(name="car").is_deletable() - - -@bw2test -def test_database_parameter_is_not_deletable_database(): - Database("B").register() - DatabaseParameter.create(database="B", name="car", amount=8) - DatabaseParameter.create(database="B", name="truck", formula="car * 5", amount=4) - assert not DatabaseParameter.get(name="car").is_deletable() - - -@bw2test -def test_database_parameter_is_not_deletable_activity(): - Database("B").register() - DatabaseParameter.create(database="B", name="car", amount=8) - ActivityParameter.create( - group="cars", - database="B", - code="first", - name="bar", - amount=0, - formula="car + 4", - ) - # Build GroupDependencies - parameters.recalculate() - assert not DatabaseParameter.get(name="car").is_deletable() - - -@bw2test -def test_database_parameter_is_dependent_on(): - """Databases parameters can be dependent on project parameters.""" - Database("B").register() - ProjectParameter.create(name="foo", amount=2) - ProjectParameter.create(name="bar", amount=5) - DatabaseParameter.create(database="B", name="baz", amount=1, formula="foo + 2") - parameters.recalculate() - assert DatabaseParameter.is_dependent_on("foo") - assert not DatabaseParameter.is_dependent_on("bar") - - -@bw2test -def test_database_parameter_formula_update_project(): - """Update formulas of database parameters, only update the formulas - where the actual ProjectParameter is referenced. - """ - ProjectParameter.create(name="foo", amount=2) - ProjectParameter.create(name="tracks", amount=14) - Database("B").register() - Database("C").register() - DatabaseParameter.create(database="B", name="bar", amount=1, formula="foo + 2") - DatabaseParameter.create(database="C", name="bing", amount=1, formula="foo + 2") - DatabaseParameter.create(database="C", name="foo", amount=8, formula="tracks * 2") - parameters.recalculate() - assert DatabaseParameter.get(name="bar").formula == "foo + 2" - assert DatabaseParameter.get(name="bing").formula == "foo + 2" - DatabaseParameter.update_formula_project_parameter_name("foo", "baz") - assert DatabaseParameter.get(name="bar").formula == "baz + 2" - assert DatabaseParameter.get(name="bing").formula == "foo + 2" - - -@bw2test -def test_database_parameter_formula_update_database(): - """Update formulas of database parameters, only update the formulas - where the actual DatabaseParameter is referenced. - """ - ProjectParameter.create(name="foo", amount=2) - Database("B").register() - Database("C").register() - DatabaseParameter.create(database="B", name="bar", amount=1, formula="foo + 2") - DatabaseParameter.create(database="C", name="bing", amount=1, formula="foo + 2") - DatabaseParameter.create( - database="C", - name="foo", - amount=8, - ) - parameters.recalculate() - assert DatabaseParameter.get(name="bar").formula == "foo + 2" - assert DatabaseParameter.get(name="bing").formula == "foo + 2" - DatabaseParameter.update_formula_database_parameter_name("foo", "baz") - assert DatabaseParameter.get(name="bar").formula == "foo + 2" - assert DatabaseParameter.get(name="bing").formula == "baz + 2" - - -########################### -### Parameterized exchanges -########################### - - -@bw2test -def test_create_parameterized_exchange_missing_group(): - with pytest.raises(IntegrityError): - obj = ParameterizedExchange.create(group="A", exchange=42, formula="foo + bar") - - -@bw2test -def test_create_parameterized_exchange(): - assert not ParameterizedExchange.select().count() - ActivityParameter.insert_dummy("A", ("b", "c")) - obj = ParameterizedExchange.create(group="A", exchange=42, formula="foo + bar") - assert obj.group == "A" - assert obj.exchange == 42 - assert obj.formula == "foo + bar" - assert ParameterizedExchange.select().count() - - -@bw2test -def test_create_parameterized_exchange_nonunique(): - ActivityParameter.insert_dummy("A", ("b", "c")) - ParameterizedExchange.create(group="A", exchange=42, formula="foo + bar") - with pytest.raises(IntegrityError): - ParameterizedExchange.create(group="B", exchange=42, formula="2 + 3") - - -####################### -### Activity parameters -####################### - - -@pytest.fixture -@bw2test -def chain(): - Database("B").register() - Database("K").register() - Group.create(name="G", order=["A"]) - ActivityParameter.create( - group="A", - database="B", - code="C", - name="D", - formula="2 ** 3", - amount=1, - ) - ActivityParameter.create( - group="A", - database="B", - code="E", - name="F", - formula="foo + bar + D", - amount=2, - ) - ActivityParameter.create( - group="G", - database="K", - code="H", - name="J", - formula="F + D * 2", - amount=3, - ) - DatabaseParameter.create( - database="B", - name="foo", - formula="2 ** 2", - amount=5, - ) - ProjectParameter.create( - name="bar", - formula="2 * 2 * 2", - amount=6, - ) - - -@bw2test -def test_create_activity_parameter(): - assert not ActivityParameter.select().count() - obj = ActivityParameter.create(group="A", database="B", code="C", name="D", amount=3.14) - assert obj.group == "A" - assert obj.database == "B" - assert obj.code == "C" - assert obj.name == "D" - assert obj.amount == 3.14 - assert str(obj) - assert isinstance(str(obj), str) - assert ActivityParameter.select().count() - assert len(parameters) - - -@bw2test -def test_activity_parameters_group_autocreated(): - assert not Group.select().count() - ActivityParameter.create(group="A", database="B", code="C", name="D", amount=3.14) - assert Group.get(name="A") - assert not Group.get(name="A").fresh - - -@bw2test -def test_activity_parameter_expired(): - assert not ActivityParameter.expired("A") - ActivityParameter.create(group="A", database="B", code="C", name="D", amount=3.14) - assert ActivityParameter.expired("A") - Group.get(name="A").freshen() - assert not ActivityParameter.expired("A") - - -@bw2test -def test_activity_parameter_dict(): - a = ActivityParameter.create(group="A", database="B", code="C", name="D", amount=3.14) - expected = {"database": "B", "code": "C", "name": "D", "amount": 3.14} - assert a.dict == expected - b = ActivityParameter.create( - group="A", - database="B", - code="E", - name="F", - amount=7, - data={"foo": "bar"}, - formula="7 * 1", - ) - expected = { - "database": "B", - "code": "E", - "name": "F", - "amount": 7, - "foo": "bar", - "formula": "7 * 1", - } - assert b.dict == expected - - -@bw2test -def test_activity_parameter_load(): - ActivityParameter.create( - group="A", - database="B", - code="E", - name="F", - amount=7, - data={"foo": "bar"}, - formula="7 * 1", - ) - expected = { - "F": { - "database": "B", - "code": "E", - "amount": 7, - "foo": "bar", - "formula": "7 * 1", - } - } - assert ActivityParameter.load("A") == expected - - -def test_activity_parameter_static(chain): - expected = {"D": 1, "F": 2} - assert ActivityParameter.static("A") == expected - expected = {} - assert ActivityParameter.static("A", only=[]) == expected - expected = {"D": 1} - assert ActivityParameter.static("A", only=["D"]) == expected - expected = {"D": 1, "F": 2, "foo": 5, "bar": 6} - assert ActivityParameter.static("A", full=True) == expected - expected = {"foo": 5, "bar": 6} - assert ActivityParameter.static("A", full=True, only=["foo", "bar"]) == expected - - -@bw2test -def test_activity_parameter_recalculate_shortcut(): - assert not ActivityParameter.recalculate("A") - ActivityParameter.create(group="A", database="B", code="C", name="D", amount=3.14) - Group.get(name="A").freshen() - assert not ActivityParameter.recalculate("A") - - -def test_activity_parameter_dependency_chain(chain): - expected = [{"kind": "activity", "group": "A", "names": set(["D", "F"])}] - assert ActivityParameter.dependency_chain("G") == expected - expected = [ - {"kind": "database", "group": "B", "names": set(["foo"])}, - {"kind": "project", "group": "project", "names": set(["bar"])}, - ] - assert ActivityParameter.dependency_chain("A") == expected - - -def test_activity_parameter_dependency_chain_missing(chain): - """Use unknown parameter 'K' in formula to test for MissingName error.""" - ActivityParameter.create( - group="G", - database="K", - code="L", - name="M", - formula="foo + bar / K", - amount=7, - ) - with pytest.raises(MissingName): - ActivityParameter.dependency_chain("G") - - -def test_activity_parameter_dependency_chain_includes_exchanges(chain): - ProjectParameter.create(name="something_new", amount=10) - db = Database("K") - a = db.new_activity(code="something something danger zone", name="An activity") - a.save() - a.new_exchange(amount=0, input=a, type="production", formula="something_new + 4 - J").save() - parameters.add_exchanges_to_group("G", a) - - expected = [ - {"kind": "activity", "group": "A", "names": {"D", "F"}}, - {"group": "project", "kind": "project", "names": {"something_new"}}, - ] - assert ActivityParameter.dependency_chain("G") == expected - - -def test_activity_parameter_dependency_chain_include_self(chain): - """Out of the parameters 'D' and 'F' in group 'A', only 'D' counts - as a dependency for group 'A'. - - This means that 'F' can be freely deleted, after which 'D' is no longer - a dependency for group 'A' (as 'D' was a dependency of 'F') and can now - also be deleted. - """ - expected = [ - {"kind": "database", "group": "B", "names": set(["foo"])}, - {"kind": "project", "group": "project", "names": set(["bar"])}, - ] - assert ActivityParameter.dependency_chain("A") == expected - expected = [ - {"kind": "activity", "group": "A", "names": set(["D"])}, - {"kind": "database", "group": "B", "names": set(["foo"])}, - {"kind": "project", "group": "project", "names": set(["bar"])}, - ] - assert ActivityParameter.dependency_chain("A", include_self=True) == expected - - -def test_activity_parameter_dependency_chain_include_self_exchanges(chain): - """Out of the parameters 'J' and 'H' in group 'G', only 'H' counts - as a dependency as 'J' is not used by either 'H' or by any exchanges. - """ - ActivityParameter.create( - group="G", - database="K", - code="L", - name="H", - amount=7, - ) - db = Database("K") - a = db.new_activity(code="not a robot", name="actually an activity") - a.save() - a.new_exchange(amount=0, input=a, type="production", formula="15 / H").save() - parameters.add_exchanges_to_group("G", a) - - expected = [ - {"kind": "activity", "group": "A", "names": set(["D", "F"])}, - ] - assert ActivityParameter.dependency_chain("G") == expected - expected = [ - {"kind": "activity", "group": "A", "names": set(["D", "F"])}, - {"kind": "activity", "group": "G", "names": set(["H"])}, - ] - assert ActivityParameter.dependency_chain("G", include_self=True) == expected - - -def test_activity_parameter_depend_within_group(chain): - """When considering only dependencies within the given group. 'D' is - a dependency within the group 'A', while 'F' is not. - """ - assert ActivityParameter.is_dependency_within_group("D", "A") - assert not ActivityParameter.is_dependency_within_group("F", "A") - - -def test_activity_parameter_depend_within_group_include(chain): - """The 'J' parameter in group 'G' depends on the 'F' parameter in group - 'A'. 'F' doesn't exist within the 'G' group but is instead linked to the - 'J' parameter through the 'G' group order. - """ - parameters.recalculate() - assert ActivityParameter.is_dependent_on("F", "A") - assert not ActivityParameter.is_dependency_within_group("F", "G") - assert ActivityParameter.is_dependency_within_group("F", "G", include_order=True) - - -@bw2test -def test_activity_parameter_dummy(): - assert not ActivityParameter.select().count() - ActivityParameter.insert_dummy("A", ("B", "C")) - assert ActivityParameter.select().count() == 1 - a = ActivityParameter.get() - assert a.name.startswith("__dummy_") and uuid4hex.search(a.name) - assert a.database == "B" - assert a.amount == 0 - - ActivityParameter.insert_dummy("A", ("B", "C")) - assert ActivityParameter.select().count() == 1 - - -@bw2test -def test_activity_parameter_multiple_dummies(): - assert not ActivityParameter.select().count() - ActivityParameter.insert_dummy("A", ("B", "C")) - ActivityParameter.insert_dummy("A", ("B", "D")) - assert ActivityParameter.select().count() == 2 - assert all( - ap.name.startswith("__dummy_") and uuid4hex.search(ap.name) - for ap in ActivityParameter.select() - ) - - -def test_activity_parameter_static_dependencies(chain): - expected = {"foo": 5, "bar": 6} - assert ActivityParameter._static_dependencies("A") == expected - expected = {"bar": 6, "D": 1, "F": 2} - assert ActivityParameter._static_dependencies("G") == expected - - -@bw2test -def test_activity_parameter_recalculate_exchanges(): - db = Database("example") - db.register() - assert not len(parameters) - assert not len(db) - - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=0, input=b, type="technosphere", formula="foo * bar + 4").save() - - project_data = [ - { - "name": "foo", - "formula": "green / 7", - }, - {"name": "green", "amount": 7}, - ] - parameters.new_project_parameters(project_data) - - database_data = [ - { - "name": "red", - "formula": "(foo + blue ** 2) / 5", - }, - {"name": "blue", "amount": 12}, - ] - parameters.new_database_parameters(database_data, "example") - - activity_data = [ - { - "name": "reference_me", - "formula": "sqrt(red - 20)", - "database": "example", - "code": "B", - }, - { - "name": "bar", - "formula": "reference_me + 2", - "database": "example", - "code": "A", - }, - ] - parameters.new_activity_parameters(activity_data, "my group") - - parameters.add_exchanges_to_group("my group", a) - ActivityParameter.recalculate_exchanges("my group") - - for exc in a.exchanges(): - # (((1 + 12 ** 2) / 5 - 20) ** 0.5 + 2) + 4 - assert exc.amount == 9 - - -@bw2test -def test_pe_no_activities_parameter_group_error(): - db = Database("example") - db.register() - assert not len(parameters) - assert not len(db) - - a = db.new_activity(code="A", name="An activity") - a.save() - a.new_exchange(amount=0, input=a, type="production").save() - - for exc in a.exchanges(): - obj = ParameterizedExchange( - exchange=exc._document.id, - group="my group", - formula="1 + 1", - ) - with pytest.raises(IntegrityError): - obj.save() - - -@bw2test -def test_recalculate_exchanges_no_activities_parameters(): - db = Database("example") - db.register() - assert not len(parameters) - assert not len(db) - - a = db.new_activity(code="A", name="An activity") - a.save() - a.new_exchange(amount=0, input=a, type="production", formula="foo + 4").save() - - project_data = [ - { - "name": "foo", - "formula": "green / 7", - }, - {"name": "green", "amount": 7}, - ] - parameters.new_project_parameters(project_data) - - assert ActivityParameter.select().count() == 0 - parameters.add_exchanges_to_group("my group", a) - ActivityParameter.recalculate_exchanges("my group") - - for exc in a.exchanges(): - assert exc.amount == 5 - assert exc.get("formula") - - assert ActivityParameter.select().count() == 1 - a = ActivityParameter.get() - assert a.name.startswith("__dummy_") and uuid4hex.search(a.name) - - -@bw2test -def test_activity_parameter_recalculate(): - Database("B").register() - ActivityParameter.create(group="A", database="B", code="C", name="D", formula="2 ** 3") - ActivityParameter.create(group="A", database="B", code="E", name="F", formula="2 * D") - assert not Group.get(name="A").fresh - ActivityParameter.recalculate("A") - assert ActivityParameter.get(name="D").amount == 8 - assert ActivityParameter.get(name="F").amount == 16 - assert Group.get(name="A").fresh - - Database("K").register() - ActivityParameter.create(group="G", database="K", code="H", name="J", formula="F + D * 2") - ActivityParameter.create( - group="G", - database="K", - code="E", - name="F", - amount=3, - ) - assert not Group.get(name="G").fresh - with pytest.raises(MissingName): - ActivityParameter.recalculate("G") - - assert not Group.get(name="G").fresh - g = Group.get(name="G") - g.order = ["A"] - g.save() - ActivityParameter.recalculate("G") - assert Group.get(name="G").fresh - assert ActivityParameter.get(name="J").amount == 19 - assert ActivityParameter.get(name="F", database="K").amount == 3 - - DatabaseParameter.create( - database="B", - name="foo", - formula="2 ** 2", - ) - ProjectParameter.create( - name="bar", - formula="2 * 2 * 2", - ) - a = ActivityParameter.get(database="B", code="E") - a.formula = "foo + bar + D" - a.save() - assert not Group.get(name="A").fresh - ActivityParameter.recalculate("A") - assert ActivityParameter.get(database="B", code="E").amount == 4 + 8 + 8 - assert Group.get(name="A").fresh - - -def test_activity_parameter_is_deletable(chain): - """An activity parameter is deletable if it is not a dependency of another - activity parameter. - """ - # Ensure that all GroupDependencies exist. - Group.get(name="G").expire() - parameters.recalculate() - - # Is not used by any other activity parameter - assert ActivityParameter.get(name="J", group="G").is_deletable() - # Is used by the 'F' activity parameter from the same group. - assert not ActivityParameter.get(name="D", group="A").is_deletable() - # Is used by the 'J' activity parameter from group 'G' - assert not ActivityParameter.get(name="F", group="A").is_deletable() - - -def test_activity_parameter_is_dependent_on(chain): - """An activity parameter can be dependent on any other type of parameter.""" - # Ensure that GroupDependencies exist. - parameters.recalculate() - - # Some activity parameter is using the "bar" project parameter - assert ActivityParameter.is_dependent_on("bar", "project") - # Some activity parameter is using the "foo" database parameter - assert ActivityParameter.is_dependent_on("foo", "B") - # Some activity parameter is using the "F" activity parameter - assert ActivityParameter.is_dependent_on("F", "A") - # No activity parameter is dependent on the "J" activity parameter - assert not ActivityParameter.is_dependent_on("J", "G") - - -def test_activity_parameter_formula_update_project(chain): - ActivityParameter.create( - group="G", - database="K", - code="AA", - name="bar", - amount=2, - ) - ActivityParameter.create( - group="G", database="K", code="BB", name="baz", amount=5, formula="bar * 6" - ) - parameters.recalculate() - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + D" - assert ActivityParameter.get(name="baz", group="G").formula == "bar * 6" - ActivityParameter.update_formula_project_parameter_name("bar", "banana") - assert ActivityParameter.get(name="F", group="A").formula == "foo + banana + D" - assert ActivityParameter.get(name="baz", group="G").formula == "bar * 6" - - -def test_activity_parameter_formula_update_database(chain): - ActivityParameter.create( - group="G", - database="K", - code="AA", - name="foo", - amount=2, - ) - ActivityParameter.create( - group="G", database="K", code="BB", name="baz", amount=5, formula="foo * 6" - ) - parameters.recalculate() - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + D" - assert ActivityParameter.get(name="baz", group="G").formula == "foo * 6" - ActivityParameter.update_formula_database_parameter_name("foo", "mango") - assert ActivityParameter.get(name="F", group="A").formula == "mango + bar + D" - assert ActivityParameter.get(name="baz", group="G").formula == "foo * 6" - - -def test_activity_parameter_formula_update_activity(chain): - parameters.recalculate() - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + D" - assert ActivityParameter.get(name="J", group="G").formula == "F + D * 2" - ActivityParameter.update_formula_activity_parameter_name("D", "dingo") - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + dingo" - assert ActivityParameter.get(name="J", group="G").formula == "F + D * 2" - - -def test_activity_parameter_formula_update_activity_include(chain): - parameters.recalculate() - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + D" - assert ActivityParameter.get(name="J", group="G").formula == "F + D * 2" - ActivityParameter.update_formula_activity_parameter_name("D", "dingo", include_order=True) - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + dingo" - assert ActivityParameter.get(name="J", group="G").formula == "F + dingo * 2" - - -@bw2test -def test_activity_parameter_crossdatabase_triggers(): - ActivityParameter.create(group="A", database="B", name="C", code="D") - with pytest.raises(IntegrityError): - ActivityParameter.create(group="A", database="E", name="F", code="G") - with pytest.raises(IntegrityError): - a = ActivityParameter.get(name="C") - a.database = "E" - a.save() - with pytest.raises(IntegrityError): - ActivityParameter.update(database="C").execute() - - -@bw2test -def test_activity_parameter_crossgroup_triggers(): - ActivityParameter.create( - group="A", - database="B", - name="C", - code="D", - amount=11, - ) - with pytest.raises(IntegrityError): - ActivityParameter.create( - group="E", - database="B", - name="C", - code="D", - amount=1, - ) - ActivityParameter.create( - group="E", - database="B", - name="C", - code="F", - amount=1, - ) - - -@bw2test -def test_activity_parameter_autoupdate_triggers(): - obj = ActivityParameter.create( - group="A", - database="B", - name="C", - code="D", - amount=11, - ) - first = Group.get(name="A").updated - time.sleep(1.1) - another = ActivityParameter.create( - group="A", - database="B", - code="E", - name="F", - formula="2 * foo", - ) - second = Group.get(name="A").updated - assert first != second - time.sleep(1.1) - ActivityParameter.update(amount=7).execute() - third = Group.get(name="A").updated - assert second != third - time.sleep(1.1) - ActivityParameter.get(name="F").delete_instance() - fourth = Group.get(name="A").updated - assert fourth != third - - -@bw2test -def test_activity_parameter_checks_uniqueness_constraints(): - ActivityParameter.create( - group="A", - database="B", - name="C", - code="D", - amount=11, - ) - with pytest.raises(IntegrityError): - ActivityParameter.create( - group="A", - database="B", - name="C", - code="G", - amount=111, - ) - - -@bw2test -def test_activity_parameter_checks(): - with pytest.raises(IntegrityError): - ActivityParameter.create(group="project", database="E", name="F", code="G") - with pytest.raises(IntegrityError): - ActivityParameter.create(group="E", database="E", name="F", code="G") - - -########## -### Groups -########## - - -@bw2test -def test_group(): - o = Group.create(name="foo") - assert o.fresh - o.expire() - assert not o.fresh - o = Group.get(name="foo") - assert not o.fresh - o.freshen() - assert o.fresh - o = Group.get(name="foo") - assert o.fresh - with pytest.raises(IntegrityError): - Group.create(name="foo") - Group.create(name="bar") - - -@bw2test -def test_group_purging(): - Database("A").register() - Database("B").register() - o = Group.create(name="one", order=["C", "project", "B", "D", "A"]) - expected = ["C", "D"] - assert o.updated - assert o.fresh - assert o.order == expected - assert Group.get(name="one").order == expected - - -###################### -### Group dependencies -###################### - - -@bw2test -def test_group_dependency(): - d = GroupDependency.create(group="foo", depends="bar") - assert d.group == "foo" - assert d.depends == "bar" - - -@bw2test -def test_group_dependency_save_checks(): - with pytest.raises(ValueError): - GroupDependency.create(group="project", depends="foo") - Database("A").register() - GroupDependency.create(group="A", depends="project") - with pytest.raises(ValueError): - GroupDependency.create(group="A", depends="foo") - - -@bw2test -def test_group_dependency_constraints(): - GroupDependency.create(group="foo", depends="bar") - with pytest.raises(IntegrityError): - GroupDependency.create(group="foo", depends="bar") - with pytest.raises(IntegrityError): - GroupDependency.create(group="foo", depends="foo") - - -@bw2test -def test_group_dependency_circular(): - GroupDependency.create(group="foo", depends="bar") - with pytest.raises(IntegrityError): - GroupDependency.create(group="bar", depends="foo") - - -@bw2test -def test_group_dependency_override(): - """GroupDependency can be overridden by having a parameter with the same - name within the group. - """ - ProjectParameter.create(name="foo", amount=2) - Database("B").register() - DatabaseParameter.create(database="B", name="bar", amount=1, formula="foo * 5") - parameters.recalculate() - assert GroupDependency.select().where(GroupDependency.depends == "project").count() == 1 - assert DatabaseParameter.get(name="bar").amount == 10 - DatabaseParameter.create( - database="B", - name="foo", - amount=8, - ) - parameters.recalculate() - assert GroupDependency.select().where(GroupDependency.depends == "project").count() == 0 - assert DatabaseParameter.get(name="bar").amount == 40 - - -###################### -### Parameters manager -###################### - - -@bw2test -def test_parameters_new_project_parameters_uniqueness(): - with pytest.raises(AssertionError): - parameters.new_project_parameters([{"name": "foo"}, {"name": "foo"}]) - - -@bw2test -def test_parameters_new_project_parameters(): - assert not len(parameters) - ProjectParameter.create(name="foo", amount=17) - ProjectParameter.create(name="baz", amount=10) - assert len(parameters) == 2 - assert ProjectParameter.get(name="foo").amount == 17 - data = [ - {"name": "foo", "amount": 4}, - {"name": "bar", "formula": "foo + 3"}, - ] - parameters.new_project_parameters(data) - assert len(parameters) == 3 - assert ProjectParameter.get(name="foo").amount == 4 - assert ProjectParameter.get(name="bar").amount == 7 - assert ProjectParameter.get(name="baz").amount == 10 - assert Group.get(name="project").fresh - - -@bw2test -def test_parameters_new_project_parameters_no_overwrite(): - ProjectParameter.create(name="foo", amount=17) - data = [ - {"name": "foo", "amount": 4}, - {"name": "bar", "formula": "foo + 3"}, - ] - with pytest.raises(ValueError): - parameters.new_project_parameters(data, overwrite=False) - - -@bw2test -def test_parameters_repr(): - assert repr(parameters) == "Parameters manager with 0 objects" - - -@bw2test -def test_parameters_recalculate(): - Database("B").register() - ActivityParameter.create(group="A", database="B", code="C", name="D", formula="2 ** 3") - ActivityParameter.create(group="A", database="B", code="E", name="F", formula="foo + bar + D") - DatabaseParameter.create( - database="B", - name="foo", - formula="2 ** 2", - ) - ProjectParameter.create( - name="bar", - formula="2 * 2 * 2", - ) - parameters.recalculate() - assert ProjectParameter.get(name="bar").amount == 8 - assert DatabaseParameter.get(name="foo").amount == 4 - assert ActivityParameter.get(name="F").amount == 20 - assert ActivityParameter.get(name="D").amount == 8 - - -@bw2test -def test_parameters_new_database_parameters(): - with pytest.raises(AssertionError): - parameters.new_database_parameters([], "another") - Database("another").register() - with pytest.raises(AssertionError): - parameters.new_database_parameters([{"name": "foo"}, {"name": "foo"}], "another") - DatabaseParameter.create(name="foo", database="another", amount=0) - DatabaseParameter.create(name="baz", database="another", amount=21) - assert len(parameters) == 2 - assert DatabaseParameter.get(name="foo").amount == 0 - data = [ - {"name": "foo", "amount": 4}, - {"name": "bar", "formula": "foo + 3"}, - ] - parameters.new_database_parameters(data, "another") - assert len(parameters) == 3 - assert DatabaseParameter.get(name="foo").amount == 4 - assert DatabaseParameter.get(name="bar").amount == 7 - assert DatabaseParameter.get(name="baz").amount == 21 - assert Group.get(name="another").fresh - - -@bw2test -def test_parameters_new_database_parameters_no_overwrite(): - Database("another").register() - DatabaseParameter.create(name="foo", database="another", amount=0) - with pytest.raises(ValueError): - parameters.new_database_parameters( - [{"name": "foo", "amount": 4}], "another", overwrite=False - ) - - -@bw2test -def test_parameters_new_activity_parameters_errors(): - with pytest.raises(AssertionError): - parameters.new_activity_parameters([], "example") - with pytest.raises(AssertionError): - parameters.new_activity_parameters([{"database": 1}, {"database": 2}], "example") - - with pytest.raises(AssertionError): - parameters.new_activity_parameters([{"database": "unknown"}], "example") - - Database("A").register() - with pytest.raises(AssertionError): - parameters.new_activity_parameters( - [{"database": "A", "name": "foo"}, {"database": "A", "name": "foo"}], - "example", - ) - - -@bw2test -def test_parameters_new_activity_parameters(): - assert not len(parameters) - assert not Group.select().count() - Database("A").register() - ActivityParameter.create(group="another", database="A", name="baz", code="D", amount=49) - ActivityParameter.create(group="another", database="A", name="foo", code="E", amount=101) - assert len(parameters) == 2 - assert ActivityParameter.get(name="foo").amount == 101 - assert ActivityParameter.get(name="baz").amount == 49 - data = [ - {"database": "A", "code": "B", "name": "foo", "amount": 4}, - { - "database": "A", - "code": "C", - "name": "bar", - "formula": "foo + 3", - "uncertainty type": 0, - }, - ] - parameters.new_activity_parameters(data, "another") - assert len(parameters) == 3 - assert ActivityParameter.get(name="foo").amount == 4 - assert ActivityParameter.get(name="foo").code == "B" - assert ActivityParameter.get(name="baz").amount == 49 - a = ActivityParameter.get(code="C") - assert a.database == "A" - assert a.name == "bar" - assert a.formula == "foo + 3" - assert a.data == {"uncertainty type": 0} - assert a.amount == 7 - assert ActivityParameter.get(name="foo").amount == 4 - assert Group.get(name="another").fresh - - -@bw2test -def test_parameters_new_activity_parameters_no_overlap(): - Database("A").register() - ActivityParameter.create(group="another", database="A", name="foo", code="D", amount=49) - data = [ - {"database": "A", "code": "B", "name": "foo", "amount": 4}, - { - "database": "A", - "code": "C", - "name": "bar", - "formula": "foo + 3", - "uncertainty type": 0, - }, - ] - with pytest.raises(ValueError): - parameters.new_activity_parameters(data, "another", overwrite=False) - - -@bw2test -def test_parameters_rename_project_parameter(): - """Project parameters can be renamed.""" - param = ProjectParameter.create( - name="foo", - amount=7, - ) - assert ProjectParameter.select().where(ProjectParameter.name == "foo").count() == 1 - parameters.rename_project_parameter(param, "foobar") - with pytest.raises(ProjectParameter.DoesNotExist): - ProjectParameter.get(name="foo") - assert ProjectParameter.select().where(ProjectParameter.name == "foobar").count() == 1 - - -@bw2test -def test_parameters_rename_project_parameter_incorrect_type(): - Database("B").register() - param = DatabaseParameter.create( - database="B", - name="foo", - amount=5, - ) - with pytest.raises(TypeError): - parameters.rename_project_parameter(param, "bar") - - -@bw2test -def test_parameters_rename_project_parameter_dependencies(): - """Updating downstream parameters will update all relevant formulas - to use the new name for the parameter. - """ - param = ProjectParameter.create( - name="foo", - amount=7, - ) - ProjectParameter.create(name="bar", amount=1, formula="foo * 2") - assert ProjectParameter.is_dependency_within_group("foo") - parameters.rename_project_parameter(param, "baz", update_dependencies=True) - assert ProjectParameter.get(name="bar").formula == "baz * 2" - - -@bw2test -def test_parameters_rename_project_parameter_dependencies_fail(): - """An exception is raised if rename is attempted without updating - downstream if other parameters depend on that parameter. - """ - param = ProjectParameter.create( - name="foo", - amount=7, - ) - ProjectParameter.create(name="bar", amount=1, formula="foo * 2") - with pytest.raises(ValueError): - parameters.rename_project_parameter(param, "baz") - - -def test_parameters_rename_project_parameter_dependencies_full(chain): - """Updating downstream parameters will update all relevant formulas - to use the new name for the parameter. - - Parameter amounts do no change as only the name is altered. - """ - ProjectParameter.create(name="double_bar", amount=12, formula="bar * 2") - DatabaseParameter.create(database="B", name="bing", amount=2, formula="bar ** 5") - parameters.recalculate() - assert ProjectParameter.is_dependency_within_group("bar") - assert DatabaseParameter.is_dependent_on("bar") - assert ActivityParameter.is_dependent_on("bar", "project") - assert ProjectParameter.get(name="double_bar").amount == 16 - assert DatabaseParameter.get(name="bing", database="B").amount == 32768 - assert ActivityParameter.get(name="F", group="A").amount == 20 - - param = ProjectParameter.get(name="bar") - parameters.rename_project_parameter(param, "new_bar", update_dependencies=True) - - assert ProjectParameter.get(name="double_bar").formula == "new_bar * 2" - assert DatabaseParameter.get(name="bing", database="B").formula == "new_bar ** 5" - assert ActivityParameter.get(name="F", group="A").formula == "foo + new_bar + D" - assert ProjectParameter.get(name="double_bar").amount == 16 - assert DatabaseParameter.get(name="bing", database="B").amount == 32768 - assert ActivityParameter.get(name="F", group="A").amount == 20 - - -@bw2test -def test_parameters_rename_database_parameter(): - Database("B").register() - param = DatabaseParameter.create( - database="B", - name="foo", - amount=5, - ) - assert DatabaseParameter.select().where(DatabaseParameter.name == "foo").count() == 1 - parameters.rename_database_parameter(param, "bar") - with pytest.raises(DatabaseParameter.DoesNotExist): - DatabaseParameter.get(name="foo") - assert DatabaseParameter.select().where(DatabaseParameter.name == "bar").count() == 1 - - -def test_parameters_rename_database_parameter_dependencies(chain): - DatabaseParameter.create(database="B", name="baz", amount=1, formula="foo + 2") - parameters.recalculate() - param = DatabaseParameter.get(name="foo") - parameters.rename_database_parameter(param, "foobar", True) - assert DatabaseParameter.get(name="baz").formula == "foobar + 2" - assert ActivityParameter.get(name="F", group="A").formula == "foobar + bar + D" - - -def test_parameters_rename_activity_parameter(chain): - parameters.recalculate() - param = ActivityParameter.get(name="J", group="G") - parameters.rename_activity_parameter(param, "John") - with pytest.raises(ActivityParameter.DoesNotExist): - ActivityParameter.get(name="J", group="G") - assert ( - ActivityParameter.select() - .where(ActivityParameter.name == "John", ActivityParameter.group == "G") - .count() - == 1 - ) - - -def test_parameters_rename_activity_parameter_dependencies(chain): - parameters.recalculate() - param = ActivityParameter.get(name="D", group="A") - parameters.rename_activity_parameter(param, "Dirk", True) - assert ActivityParameter.get(name="F", group="A").formula == "foo + bar + Dirk" - assert ActivityParameter.get(name="J", group="G").formula == "F + Dirk * 2" - - -@bw2test -def test_parameters_rename_activity_parameter_group_exchange(): - """Rename 'D' from group 'A' updates ParameterizedExchange and - underlying exchange. - """ - db = Database("B") - db.register() - ActivityParameter.create( - group="A", - database="B", - code="C", - name="D", - formula="2 ** 3", - amount=1, - ) - a = db.new_activity(code="newcode", name="new activity") - a.save() - a.new_exchange(amount=1, input=a, type="production", formula="D + 2").save() - parameters.add_exchanges_to_group("A", a) - ActivityParameter.recalculate_exchanges("A") - - param = ActivityParameter.get(name="D", group="A") - parameters.rename_activity_parameter(param, "Correct", True) - assert ParameterizedExchange.get(group="A").formula == "Correct + 2" - exc = next(iter(a.production())) - assert exc.amount == 10 - assert exc.get("formula") == "Correct + 2" - - -@bw2test -def test_parameters_rename_activity_parameter_order_exchange(): - """Rename 'D' from group 'A' updates ParameterizedExchange and - underlying exchange in group 'G' - """ - db = Database("K") - db.register() - ActivityParameter.create( - group="A", - database="K", - code="C", - name="D", - formula="2 ** 3", - amount=1, - ) - a = db.new_activity(code="newcode", name="new activity") - a.save() - a.new_exchange(amount=1, input=a, type="production", formula="D + 2").save() - Group.create(name="G", order=["A"], fresh=False) - parameters.add_exchanges_to_group("G", a) - ActivityParameter.recalculate_exchanges("G") - - param = ActivityParameter.get(name="D", group="A") - parameters.rename_activity_parameter(param, "Correct", update_dependencies=True) - - assert ParameterizedExchange.get(group="G").formula == "Correct + 2" - exc = next(iter(a.production())) - assert exc.amount == 10 - assert exc.get("formula") == "Correct + 2" - - -@bw2test -def test_parameters_add_to_group_empty(): - db = Database("example") - db.register() - assert not len(parameters) - assert not len(db) - assert not Group.select().count() - a = db.new_activity( - code="A", - name="An activity", - ) - a.save() - assert parameters.add_to_group("my group", a) is None - assert Group.get(name="my group") - assert not len(parameters) - - -@bw2test -def test_parameters_add_to_group(): - db = Database("example") - db.register() - assert not len(parameters) - assert not len(db) - assert not Group.select().count() - - ActivityParameter.create( - group="my group", - database="example", - name="bye bye", - code="A", - amount=1, - ) - - a = db.new_activity( - code="A", - name="An activity", - parameters=[ - {"amount": 4, "name": "one", "foo": "bar"}, - {"amount": 42, "name": "two", "formula": "this + that"}, - ], - ) - a.save() - assert "parameters" in get_activity(("example", "A")) - - assert parameters.add_to_group("my group", a) == 2 - assert Group.get(name="my group") - assert not ActivityParameter.select().where(ActivityParameter.name == "bye bye").count() - expected = ( - ("one", 4, None, {"foo": "bar"}), - ("two", 42, "this + that", {}), - ) - for ap in ActivityParameter.select(): - assert (ap.name, ap.amount, ap.formula, ap.data) in expected - assert "parameters" not in get_activity(("example", "A")) - - -@bw2test -def test_parameters_remove_from_group(): - db = Database("example") - db.register() - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=0, input=b, type="technosphere", formula="bar + 4").save() - activity_data = [ - { - "name": "reference_me", - "formula": "sqrt(25)", - "database": "example", - "code": "B", - }, - { - "name": "bar", - "formula": "reference_me + 2", - "database": "example", - "code": "A", - }, - ] - parameters.new_activity_parameters(activity_data, "my group") - parameters.add_exchanges_to_group("my group", a) - assert not get_activity(("example", "A")).get("parameters") - assert ActivityParameter.select().count() == 2 - assert ParameterizedExchange.select().count() == 1 - - parameters.remove_from_group("my group", a) - assert ActivityParameter.select().count() == 1 - assert not ParameterizedExchange.select().count() - assert get_activity(("example", "A"))["parameters"] - - -@bw2test -def test_parameters_save_restore_exchange_amount(): - """The original amount of the exchange is restored when it is no - longer parameterized. - """ - db = Database("example") - db.register() - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=5, input=b, type="technosphere", formula="bing + 5").save() - - activity_data = [ - { - "name": "bing", - "amount": "7", - "database": "example", - "code": "A", - } - ] - parameters.new_activity_parameters(activity_data, "calculate") - parameters.add_exchanges_to_group("calculate", a) - # The original amount and current amount is 5 - for exc in a.exchanges(): - assert exc["amount"] == 5 - assert "original_amount" in exc and exc["original_amount"] == 5 - - ActivityParameter.recalculate_exchanges("calculate") - # Parameterization has caused the amount to change. - for exc in a.exchanges(): - assert exc["amount"] == 12 - assert "original_amount" in exc - - # Remove parameterization from the activity, restoring the original amount - parameters.remove_from_group("calculate", a) - for exc in a.exchanges(): - assert exc["amount"] == 5 - assert "original_amount" not in exc - - -@bw2test -def test_parameters_save_keep_changed_exchange_amount(): - db = Database("example") - db.register() - a = db.new_activity(code="A", name="An activity") - a.save() - b = db.new_activity(code="B", name="Another activity") - b.save() - a.new_exchange(amount=5, input=b, type="technosphere", formula="bing + 5").save() - - activity_data = [ - { - "name": "bing", - "amount": "7", - "database": "example", - "code": "A", - } - ] - parameters.new_activity_parameters(activity_data, "calculate") - parameters.add_exchanges_to_group("calculate", a) - # The original amount and current amount is 5 - for exc in a.exchanges(): - assert exc["amount"] == 5 - assert "original_amount" in exc and exc["original_amount"] == 5 - - ActivityParameter.recalculate_exchanges("calculate") - # Parameterization has caused the amount to change. - for exc in a.exchanges(): - assert exc["amount"] == 12 - assert "original_amount" in exc - - # Remove parameterization from the activity, keeping the changed amount - parameters.remove_from_group("calculate", a, restore_amounts=False) - for exc in a.exchanges(): - assert exc["amount"] == 12 - assert "original_amount" in exc diff --git a/tests/unit/test_activity_parameter_events.py b/tests/unit/test_activity_parameter_events.py deleted file mode 100644 index e6033645..00000000 --- a/tests/unit/test_activity_parameter_events.py +++ /dev/null @@ -1,778 +0,0 @@ -import json - -from bw2data.database import DatabaseChooser -from bw2data.parameters import ActivityParameter, Group -from bw2data.project import projects -from bw2data.snowflake_ids import snowflake_id_generator -from bw2data.tests import bw2test - - -@bw2test -def test_activity_parameter_revision_expected_format_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - - assert not ActivityParameter.select().count() - assert projects.dataset.revision is None - - DatabaseChooser("test-database").register() - - projects.dataset.set_sourced() - - dp = ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - - from pprint import pprint - - pprint( - [ - json.load(open(fp)) - for fp in (projects.dataset.dir / "revisions").iterdir() - if fp.stem.lower() != "head" and fp.is_file() - ] - ) - - assert dp.id > 1e6 - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp.id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": dp.id, - "database": "test-database", - "group": "test-group", - "code": "test-code", - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - DatabaseChooser("test-database").register() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - dp_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp_id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": dp_id, - "database": "test-database", - "group": "test-group", - "code": "test-code", - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ActivityParameter.select().count() == 1 - dp = ActivityParameter.get(id=dp_id) - assert dp.data == {"foo": "bar"} - assert dp.database == "test-database" - assert dp.code == "test-code" - assert dp.group == "test-group" - assert dp.amount == 5 - assert dp.formula == "1 * 2 + 3" - assert dp.name == "example" - - -@bw2test -def test_activity_parameter_revision_expected_format_update(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-database").register() - dp = ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - dp.name = "another" - dp.amount = 7 - dp.save() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_update(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-database").register() - dp = ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ActivityParameter.select().count() == 1 - dp = ActivityParameter.get(id=dp.id) - assert dp.data == {"foo": "bar"} - assert dp.amount == 7 - assert dp.formula == "1 * 2 + 3" - assert dp.name == "another" - - -@bw2test -def test_activity_parameter_revision_expected_format_delete(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-database").register() - dp = ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - dp.delete_instance() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_delete(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-database").register() - dp = ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - assert ActivityParameter.select().count() == 1 - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": dp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - assert not ActivityParameter.select().count() - - -@bw2test -def test_activity_parameter_revision_expected_format_recalculate(num_revisions): - projects.set_current("activity-event") - - # Needed to have a parameter which could be obsolete - otherwise `recalculate` just - # no-op exits - DatabaseChooser("test-database").register() - ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ActivityParameter.recalculate("test-group") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "test-group", - "change_type": "activity_parameter_recalculate", - "delta": {}, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_recalculate(num_revisions, monkeypatch): - def fake_recalculate(group, signal=True): - assert group == "test-group" - assert not signal - - monkeypatch.setattr(ActivityParameter, "recalculate", fake_recalculate) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "test-group", - "change_type": "activity_parameter_recalculate", - "delta": {}, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_activity_parameter_revision_expected_format_recalculate_exchanges(num_revisions): - projects.set_current("activity-event") - - # Needed to have a parameter which could be obsolete - otherwise `recalculate` just - # no-op exits - DatabaseChooser("test-database").register() - ActivityParameter.create( - database="test-database", - code="test-code", - group="test-group", - name="example", - formula="1 * 2 + 3", - amount=5, - data={"foo": "bar"}, - ) - ActivityParameter.recalculate("test-group") - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ActivityParameter.recalculate_exchanges("test-group") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "test-group", - "change_type": "activity_parameter_recalculate_exchanges", - "delta": {}, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_recalculate_exchanges(num_revisions, monkeypatch): - def fake_recalculate(group, signal=True): - assert group == "test-group" - assert not signal - - monkeypatch.setattr(ActivityParameter, "recalculate_exchanges", fake_recalculate) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "test-group", - "change_type": "activity_parameter_recalculate_exchanges", - "delta": {}, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_activity_parameter_revision_expected_format_update_formula_project_parameter_name( - num_revisions, -): - projects.set_current("activity-event") - DatabaseChooser("test-database").register() - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ActivityParameter.update_formula_project_parameter_name(old="one2three", new="123") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_project_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_update_formula_project_parameter_name( - num_revisions, monkeypatch -): - def fake_update(old, new, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(ActivityParameter, "update_formula_project_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_project_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_activity_parameter_revision_expected_format_update_formula_database_parameter_name( - num_revisions, -): - projects.set_current("activity-event") - DatabaseChooser("test-database").register() - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ActivityParameter.update_formula_database_parameter_name(old="one2three", new="123") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_database_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_update_formula_database_parameter_name( - num_revisions, monkeypatch -): - def fake_update(old, new, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(ActivityParameter, "update_formula_database_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_database_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_activity_parameter_revision_expected_format_update_formula_activity_parameter_name( - num_revisions, -): - projects.set_current("activity-event") - DatabaseChooser("test-database").register() - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ActivityParameter.update_formula_activity_parameter_name(old="one2three", new="123") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_activity_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123", "root['include_order']": False}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_activity_parameter_revision_apply_update_formula_activity_parameter_name( - num_revisions, monkeypatch -): - def fake_update(old, new, include_order, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(ActivityParameter, "update_formula_activity_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "activity_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "activity_parameter_update_formula_activity_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123", "root['include_order']": False}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) diff --git a/tests/unit/test_database_parameter_events.py b/tests/unit/test_database_parameter_events.py deleted file mode 100644 index 223d01a1..00000000 --- a/tests/unit/test_database_parameter_events.py +++ /dev/null @@ -1,564 +0,0 @@ -import json - -from bw2data.database import DatabaseChooser -from bw2data.parameters import DatabaseParameter, Group -from bw2data.project import projects -from bw2data.snowflake_ids import snowflake_id_generator -from bw2data.tests import bw2test - - -@bw2test -def test_database_parameter_revision_expected_format_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - - assert not DatabaseParameter.select().count() - assert projects.dataset.revision is None - - DatabaseChooser("test-example").register() - - projects.dataset.set_sourced() - - dp = DatabaseParameter.create( - database="test-example", name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - assert dp.id > 1e6 - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp.id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": dp.id, - "database": "test-example", - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - DatabaseChooser("test-example").register() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - dp_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp_id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": dp_id, - "database": "test-example", - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert DatabaseParameter.select().count() == 1 - dp = DatabaseParameter.get(id=dp_id) - assert dp.data == {"foo": "bar"} - assert dp.database == "test-example" - assert dp.amount == 5 - assert dp.formula == "1 * 2 + 3" - assert dp.name == "example" - - -@bw2test -def test_database_parameter_revision_expected_format_update(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-example").register() - dp = DatabaseParameter.create( - name="example", database="test-example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - dp.name = "another" - dp.amount = 7 - dp.save() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_update(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-example").register() - dp = DatabaseParameter.create( - name="example", database="test-example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert DatabaseParameter.select().count() == 1 - dp = DatabaseParameter.get(id=dp.id) - assert dp.data == {"foo": "bar"} - assert dp.amount == 7 - assert dp.formula == "1 * 2 + 3" - assert dp.name == "another" - - -@bw2test -def test_database_parameter_revision_expected_format_delete(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-example").register() - dp = DatabaseParameter.create( - name="example", database="test-example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - dp.delete_instance() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_delete(num_revisions): - projects.set_current("activity-event") - - DatabaseChooser("test-example").register() - dp = DatabaseParameter.create( - name="example", database="test-example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - assert DatabaseParameter.select().count() == 1 - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": dp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - assert not DatabaseParameter.select().count() - - -@bw2test -def test_database_parameter_revision_expected_format_recalculate(num_revisions): - projects.set_current("activity-event") - - # Needed to have a parameter which could be obsolete - otherwise `recalculate` just - # no-op exits - DatabaseChooser("test-example").register() - DatabaseParameter.create( - name="example", database="test-example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"} - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - DatabaseParameter.recalculate("test-example") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "test-example", - "change_type": "database_parameter_recalculate", - "delta": {}, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_recalculate(num_revisions, monkeypatch): - def fake_recalculate(database, signal=True): - assert database == "test-example" - assert not signal - - monkeypatch.setattr(DatabaseParameter, "recalculate", fake_recalculate) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "test-example", - "change_type": "database_parameter_recalculate", - "delta": {}, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_database_parameter_revision_expected_format_update_formula_project_parameter_name( - num_revisions, -): - projects.set_current("activity-event") - DatabaseChooser("test-example").register() - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - DatabaseParameter.update_formula_project_parameter_name(old="one2three", new="123") - - # from pprint import pprint - # pprint([ - # json.load(open(fp)) - # for fp in (projects.dataset.dir / "revisions").iterdir() - # if fp.stem.lower() != "head" and fp.is_file() - # ]) - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "database_parameter_update_formula_project_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_update_formula_project_parameter_name( - num_revisions, monkeypatch -): - def fake_update(old, new, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(DatabaseParameter, "update_formula_project_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "database_parameter_update_formula_project_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_database_parameter_revision_expected_format_update_formula_database_parameter_name( - num_revisions, -): - projects.set_current("activity-event") - DatabaseChooser("test-example").register() - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - DatabaseParameter.update_formula_database_parameter_name(old="one2three", new="123") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "database_parameter_update_formula_database_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_database_parameter_revision_apply_update_formula_database_parameter_name( - num_revisions, monkeypatch -): - def fake_update(old, new, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(DatabaseParameter, "update_formula_database_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "database_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "database_parameter_update_formula_database_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) diff --git a/tests/unit/test_group.py b/tests/unit/test_group.py deleted file mode 100644 index 4f0d8427..00000000 --- a/tests/unit/test_group.py +++ /dev/null @@ -1,252 +0,0 @@ -import json - -from bw2data.parameters import Group -from bw2data.project import projects -from bw2data.snowflake_ids import snowflake_id_generator -from bw2data.tests import bw2test - - -@bw2test -def test_group_revision_expected_format_create(num_revisions): - projects.set_current("activity-event") - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - group = Group.create(name="A", order=[]) - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group.id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": {"id": group.id, "name": "A", "order": []}, - } - } - }, - } - ], - } - - assert revision == expected - assert num_revisions(projects) == 1 - - -@bw2test -def test_group_revision_apply_create(num_revisions): - projects.set_current("activity-event") - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - group_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group_id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": {"id": group_id, "name": "A", "order": []}, - } - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - assert not num_revisions(projects) - - group = Group.get(Group.id == group_id) - assert group.name == "A" - assert group.order == [] - - -@bw2test -def test_group_revision_expected_format_delete(num_revisions): - projects.set_current("activity-event") - - group = Group.create(name="A", order=[]) - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - group.delete_instance() - - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - assert revision == expected - assert num_revisions(projects) == 1 - - -@bw2test -def test_group_revision_apply_delete(num_revisions): - projects.set_current("activity-event") - - group = Group.create(name="A", order=[]) - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - assert not Group.select().count() - assert not num_revisions(projects) - - -@bw2test -def test_group_revision_expected_format_update(num_revisions): - projects.set_current("activity-event") - - group = Group.create(name="A", order=[]) - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - group.order = ["foo", "bar"] - group.save() - - parent = projects.dataset.revision - assert parent is not None - - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group.id, - "change_type": "update", - "delta": { - "iterable_item_added": {"root['order'][0]": "foo", "root['order'][1]": "bar"} - }, - } - ], - } - - assert revision == expected - assert num_revisions(projects) == 1 - - -@bw2test -def test_group_revision_apply_update(num_revisions): - projects.set_current("activity-event") - - group = Group.create(name="A", order=[]) - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "group", - "id": group.id, - "change_type": "update", - "delta": { - "iterable_item_added": {"root['order'][0]": "foo", "root['order'][1]": "bar"} - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - group = Group.get(Group.id == group.id) - assert group.order == ["foo", "bar"] - assert not num_revisions(projects) diff --git a/tests/unit/test_parameterized_exchange_events.py b/tests/unit/test_parameterized_exchange_events.py deleted file mode 100644 index 6f937f88..00000000 --- a/tests/unit/test_parameterized_exchange_events.py +++ /dev/null @@ -1,341 +0,0 @@ -import json - -from bw2data.database import DatabaseChooser -from bw2data.parameters import ActivityParameter, ParameterizedExchange -from bw2data.project import projects -from bw2data.snowflake_ids import snowflake_id_generator -from bw2data.tests import bw2test - - -@bw2test -def test_parameterized_exchange_revision_expected_format_create(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - - assert not ParameterizedExchange.select().count() - assert projects.dataset.revision is None - - projects.dataset.set_sourced() - - pe = ParameterizedExchange.create( - group="test-group", - exchange=edge.id, - formula="1 * 2 + 3", - ) - assert pe.id > 1e6 - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe.id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": pe.id, - "group": "test-group", - "formula": "1 * 2 + 3", - "exchange": edge.id, - }, - } - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_parameterized_exchange_revision_apply_create(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - pe_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe_id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": pe_id, - "group": "test-group", - "formula": "1 * 2 + 3", - "exchange": edge.id, - }, - } - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ParameterizedExchange.select().count() == 1 - pe = ParameterizedExchange.get(id=pe_id) - assert pe.group == "test-group" - assert pe.formula == "1 * 2 + 3" - assert pe.exchange == edge.id - - -@bw2test -def test_parameterized_exchange_revision_expected_format_update(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - pe = ParameterizedExchange.create( - group="test-group", - exchange=edge.id, - formula="1 * 2 + 3", - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - pe.formula = "7 / 3.141" - pe.save() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe.id, - "change_type": "update", - "delta": {"values_changed": {"root['formula']": {"new_value": "7 / 3.141"}}}, - } - ], - } - - assert revision == expected - - -@bw2test -def test_parameterized_exchange_revision_apply_update(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - pe = ParameterizedExchange.create( - group="test-group", - exchange=edge.id, - formula="1 * 2 + 3", - ) - - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe.id, - "change_type": "update", - "delta": {"values_changed": {"root['formula']": {"new_value": "7 / 3.141"}}}, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ParameterizedExchange.select().count() == 1 - dp = ParameterizedExchange.get(id=pe.id) - assert dp.formula == "7 / 3.141" - assert pe.group == "test-group" - assert pe.exchange == edge.id - - -@bw2test -def test_parameterized_exchange_revision_expected_format_delete(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - pe = ParameterizedExchange.create( - group="test-group", - exchange=edge.id, - formula="1 * 2 + 3", - ) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - pe.delete_instance() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_parameterized_exchange_revision_apply_delete(num_revisions): - projects.set_current("activity-event") - - database = DatabaseChooser("db") - database.register() - node = database.new_node(code="A", name="A") - node.save() - other = database.new_node(code="B", name="B2", type="product") - other.save() - edge = node.new_edge(input=other, type="technosphere", amount=0.1, arbitrary="foo") - edge.save() - ActivityParameter.insert_dummy("test-group", (node["database"], node["code"])) - pe = ParameterizedExchange.create( - group="test-group", - exchange=edge.id, - formula="1 * 2 + 3", - ) - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "parameterized_exchange", - "id": pe.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - assert not ParameterizedExchange.select().count() diff --git a/tests/unit/test_project_parameter_events.py b/tests/unit/test_project_parameter_events.py deleted file mode 100644 index 9c944c3f..00000000 --- a/tests/unit/test_project_parameter_events.py +++ /dev/null @@ -1,451 +0,0 @@ -import json - -from bw2data.parameters import Group, ProjectParameter -from bw2data.project import projects -from bw2data.snowflake_ids import snowflake_id_generator -from bw2data.tests import bw2test - - -@bw2test -def test_project_parameter_revision_expected_format_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - - assert not ProjectParameter.select().count() - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - pp = ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - assert pp.id > 1e6 - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp.id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": pp.id, - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_project_parameter_revision_apply_create(num_revisions, monkeypatch): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - pp_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp_id, - "change_type": "create", - "delta": { - "type_changes": { - "root": { - "old_type": "NoneType", - "new_type": "dict", - "new_value": { - "id": pp_id, - "name": "example", - "formula": "1 * 2 + 3", - "amount": 5, - "data": {"foo": "bar"}, - }, - } - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ProjectParameter.select().count() == 1 - pp = ProjectParameter.get(id=pp_id) - assert pp.data == {"foo": "bar"} - assert pp.amount == 5 - assert pp.formula == "1 * 2 + 3" - assert pp.name == "example" - - -@bw2test -def test_project_parameter_revision_expected_format_update(num_revisions): - projects.set_current("activity-event") - - pp = ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - pp.name = "another" - pp.amount = 7 - pp.save() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_project_parameter_revision_apply_update(num_revisions): - projects.set_current("activity-event") - pp = ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - - projects.dataset.set_sourced() - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp.id, - "change_type": "update", - "delta": { - "type_changes": { - "root['amount']": {"old_type": "float", "new_type": "int", "new_value": 7} - }, - "values_changed": {"root['name']": {"new_value": "another"}}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - assert ProjectParameter.select().count() == 1 - pp = ProjectParameter.get(id=pp.id) - assert pp.data == {"foo": "bar"} - assert pp.amount == 7 - assert pp.formula == "1 * 2 + 3" - assert pp.name == "another" - - -@bw2test -def test_project_parameter_revision_expected_format_delete(num_revisions): - projects.set_current("activity-event") - - pp = ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - pp.delete_instance() - - assert num_revisions(projects) == 1 - - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - assert revision == expected - - -@bw2test -def test_project_parameter_revision_apply_delete(num_revisions): - projects.set_current("activity-event") - pp = ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - assert ProjectParameter.select().count() == 1 - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": pp.id, - "change_type": "delete", - "delta": { - "type_changes": { - "root": {"old_type": "dict", "new_type": "NoneType", "new_value": None} - } - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - assert not ProjectParameter.select().count() - - -@bw2test -def test_project_parameter_revision_expected_format_recalculate(num_revisions): - projects.set_current("activity-event") - - # Needed to have a parameter which could be obsolete - otherwise `recalculate` just - # no-op exits - ProjectParameter.create(name="example", formula="1 * 2 + 3", amount=5, data={"foo": "bar"}) - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ProjectParameter.recalculate() - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": "__recalculate_dummy__", - "change_type": "project_parameter_recalculate", - "delta": {}, - } - ], - } - - assert revision == expected - - -@bw2test -def test_project_parameter_revision_apply_recalculate(num_revisions, monkeypatch): - def fake_recalculate(ignored=None, signal=True): - assert not signal - - monkeypatch.setattr(ProjectParameter, "recalculate", fake_recalculate) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": "__recalculate_dummy__", - "change_type": "project_parameter_recalculate", - "delta": {}, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) - - -@bw2test -def test_project_parameter_revision_expected_format_update_formula_parameter_name( - num_revisions, monkeypatch -): - def no_signal_save(self, *args, **kwargs): - kwargs["signal"] = False - return super(Group, self).save(*args, **kwargs) - - monkeypatch.setattr(Group, "save", no_signal_save) - - projects.set_current("activity-event") - - assert projects.dataset.revision is None - projects.dataset.set_sourced() - - ProjectParameter.update_formula_parameter_name(old="one2three", new="123") - - assert num_revisions(projects) == 1 - assert projects.dataset.revision is not None - with open(projects.dataset.dir / "revisions" / f"{projects.dataset.revision}.rev", "r") as f: - revision = json.load(f) - - expected = { - "metadata": { - "parent_revision": None, - "revision": projects.dataset.revision, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "project_parameter_update_formula_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - assert revision == expected - - -@bw2test -def project_parameter_revision_apply_update_formula_parameter_name(num_revisions, monkeypatch): - def fake_update(old, new, signal=True): - assert old == "one2three" - assert new == "123" - assert not signal - - monkeypatch.setattr(ProjectParameter, "update_formula_parameter_name", fake_update) - - projects.set_current("activity-event") - assert projects.dataset.revision is None - - revision_id = next(snowflake_id_generator) - revision = { - "metadata": { - "parent_revision": None, - "revision": revision_id, - "authors": "Anonymous", - "title": "Untitled revision", - "description": "No description", - }, - "data": [ - { - "type": "project_parameter", - "id": "__update_formula_parameter_name_dummy__", - "change_type": "project_parameter_update_formula_parameter_name", - "delta": { - "dictionary_item_added": {"root['new']": "123"}, - "dictionary_item_removed": {"root['old']": "one2three"}, - }, - } - ], - } - - projects.dataset.apply_revision(revision) - assert projects.dataset.revision == revision_id - - assert not num_revisions(projects) From fa24b28bea55f72c86f8d4b24e674530a243d6e9 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 15:51:22 +0100 Subject: [PATCH 4/9] Postgres enforces proper boolean types --- bw2data/project.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bw2data/project.py b/bw2data/project.py index 556e47f5..2e84b32a 100644 --- a/bw2data/project.py +++ b/bw2data/project.py @@ -46,8 +46,8 @@ def lockable(): class ProjectDataset(Model): # Event sourcing - is_sourced = BooleanField(default=False, constraints=[SQL("DEFAULT 0")]) revision = IntegerField(null=True) + is_sourced = BooleanField(default=False, constraints=[SQL("DEFAULT false")]) data = PickleField() name = TextField(index=True, unique=True) @@ -55,7 +55,7 @@ class ProjectDataset(Model): # But for backwards compatibility we need a default `True` value # and this hack is the recommended way to get this behaviour. # See https://docs.peewee-orm.com/en/latest/peewee/models.html?highlight=table%20generation - full_hash = BooleanField(default=True, constraints=[SQL("DEFAULT 1")]) + full_hash = BooleanField(default=True, constraints=[SQL("DEFAULT true")]) def __str__(self): return "Project: {}".format(self.name) From 477009efc53000b0dc428167a46f2f205e29f75a Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 15:52:17 +0100 Subject: [PATCH 5/9] Need BigIntegerField for 64-bit integers on Postgres --- bw2data/project.py | 4 ++-- bw2data/snowflake_ids.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bw2data/project.py b/bw2data/project.py index 2e84b32a..a617dc9d 100644 --- a/bw2data/project.py +++ b/bw2data/project.py @@ -12,7 +12,7 @@ import deepdiff import wrapt from bw_processing import safe_filename -from peewee import SQL, BooleanField, DoesNotExist, IntegerField, Model, SqliteDatabase, TextField +from peewee import SQL, BooleanField, DoesNotExist, BigIntegerField, Model, SqliteDatabase, TextField from platformdirs import PlatformDirs import bw2data.signals as bw2signals @@ -46,8 +46,8 @@ def lockable(): class ProjectDataset(Model): # Event sourcing - revision = IntegerField(null=True) is_sourced = BooleanField(default=False, constraints=[SQL("DEFAULT false")]) + revision = BigIntegerField(null=True) data = PickleField() name = TextField(index=True, unique=True) diff --git a/bw2data/snowflake_ids.py b/bw2data/snowflake_ids.py index 3872cafe..c6a398b1 100644 --- a/bw2data/snowflake_ids.py +++ b/bw2data/snowflake_ids.py @@ -1,6 +1,6 @@ import uuid -from peewee import IntegerField +from peewee import BigIntegerField from snowflake import SnowflakeGenerator from bw2data.signals import SignaledDataset @@ -25,7 +25,7 @@ class SnowflakeIDBaseClass(SignaledDataset): - id = IntegerField(primary_key=True) + id = BigIntegerField(primary_key=True) def save(self, **kwargs): if self.id is None: From ca4e1d3c8ee3104a602d3eeaf20d3e723ec0e865 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 15:56:23 +0100 Subject: [PATCH 6/9] Postgres primary key BigIntegerField isn't auto-incrementing --- bw2data/backends/base.py | 2 +- bw2data/backends/utils.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/bw2data/backends/base.py b/bw2data/backends/base.py index dcf69164..12e403d3 100644 --- a/bw2data/backends/base.py +++ b/bw2data/backends/base.py @@ -526,7 +526,7 @@ def _efficient_write_dataset( if "output" not in exchange: exchange["output"] = (ds["database"], ds["code"]) - exchanges.append(dict_as_exchangedataset(exchange)) + exchanges.append(dict_as_exchangedataset(exchange, add_snowflake_id=True)) # Query gets passed as INSERT INTO x VALUES ('?', '?'...) # SQLite3 has a limit of 999 variables, diff --git a/bw2data/backends/utils.py b/bw2data/backends/utils.py index d3287564..b95b56a0 100644 --- a/bw2data/backends/utils.py +++ b/bw2data/backends/utils.py @@ -85,8 +85,8 @@ def dict_as_activitydataset(ds: Any, add_snowflake_id: bool = False) -> dict: return val -def dict_as_exchangedataset(ds: Any) -> dict: - return { +def dict_as_exchangedataset(ds: Any, add_snowflake_id: bool = False) -> dict: + val = { "data": ds, "input_database": ds["input"][0], "input_code": ds["input"][1], @@ -94,6 +94,11 @@ def dict_as_exchangedataset(ds: Any) -> dict: "output_code": ds["output"][1], "type": ds["type"], } + # Use during `insert_many` calls as these skip auto id generation because they don't call + # `.save()` + if add_snowflake_id: + val["id"] = next(snowflake_id_generator) + return val def get_obj_as_dict(cls: SignaledDataset, obj_id: Optional[int]) -> dict: From e5f7f54497954e1eb4cfa254985d175f574d86b7 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 15:57:56 +0100 Subject: [PATCH 7/9] Allow Postgres usage with psycopg2 psycopg2 is default for Peewee --- bw2data/sqlite.py | 33 ++++++++++++++++++++++++++++++--- pyproject.toml | 3 ++- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/bw2data/sqlite.py b/bw2data/sqlite.py index 392152f9..350148ca 100644 --- a/bw2data/sqlite.py +++ b/bw2data/sqlite.py @@ -1,7 +1,9 @@ import json import pickle +import os +from pathlib import Path -from peewee import BlobField, SqliteDatabase, TextField +from peewee import BlobField, SqliteDatabase, TextField, PostgresqlDatabase from bw2data.logs import stdout_feedback_logger @@ -15,13 +17,38 @@ def python_value(self, value): class SubstitutableDatabase: - def __init__(self, filepath, tables): + def __init__(self, filepath: Path, tables: list): self._filepath = filepath self._tables = tables self._database = self._create_database() + def check_postgres(self) -> dict: + if not os.environ.get('BW_DATA_POSTGRES'): + return {} + return { + "db": os.environ['BW_DATA_POSTGRES_DATABASE'], + "user": os.environ.get('BW_DATA_POSTGRES_USER'), + "password": os.environ.get('BW_DATA_POSTGRES_PASSWORD'), + "port": int(os.environ.get('BW_DATA_POSTGRES_PORT', 5432)), + "url": os.environ.get('BW_DATA_POSTGRES_URL', "localhost"), + } + def _create_database(self): - db = SqliteDatabase(self._filepath) + pg_config = self.check_postgres() + if not pg_config: + db = SqliteDatabase(self._filepath) + stdout_feedback_logger.info("Using SQLite driver") + else: + db = PostgresqlDatabase( + pg_config['db'], + user=pg_config['user'], + password=pg_config['password'], + host=pg_config['url'], + port=pg_config['port'], + ) + stdout_feedback_logger.info( + f"Using Postgres driver with database {pg_config['db']} and user {pg_config['user']}" + ) for model in self._tables: model.bind(db, bind_refs=False, bind_backrefs=False) db.connect() diff --git a/pyproject.toml b/pyproject.toml index 7dda9751..f29677d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ dependencies = [ "peewee>=3.9.4", "pint<0.24", "platformdirs", + "psycopg2", "pydantic-settings", "rapidfuzz; sys_platform != 'emscripten'", "scipy", @@ -62,7 +63,7 @@ tracker = "https://github.com/brightway-lca/bw2data/issues" # seems to work, at least for now testing = [ "bw2data", - "bw2calc>=2.0.dev17", + "bw2calc>=2.0", "pytest", "pytest-cov", "python-coveralls" From 0ae9e224bd04ee0b3515534999103086ebc3b5cf Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 16:00:13 +0100 Subject: [PATCH 8/9] Update tests to support Postgres via Docker Uses testcontainers and changes decorator to pytest parameterized fixture which runs on both SQLite and Postgres. Testcontainers requires a working installation of Docker. See https://java.testcontainers.org/supported_docker_environment/ --- bw2data/tests.py | 47 ++++++++++++++++++++++++++++++++++++++++------- pyproject.toml | 6 +++++- tests/conftest.py | 2 ++ 3 files changed, 47 insertions(+), 8 deletions(-) diff --git a/bw2data/tests.py b/bw2data/tests.py index add25839..c5c3f725 100644 --- a/bw2data/tests.py +++ b/bw2data/tests.py @@ -1,4 +1,5 @@ import atexit +import os import random import shutil import string @@ -6,7 +7,12 @@ import unittest from pathlib import Path -import wrapt +try: + from testcontainers.postgres import PostgresContainer +except ImportError: + PostgresContainer = None + +import pytest from bw2data import config, databases, geomapping, methods from bw2data.project import projects @@ -41,15 +47,42 @@ def test_setup_clean(self): self.assertTrue("default" not in projects) -@wrapt.decorator -def bw2test(wrapped, instance, args, kwargs): +@pytest.fixture(params=[True, False]) +def bw_test_fixture(request, tmp_path) -> None: config.dont_warn = True config.is_test = True - tempdir = Path(tempfile.mkdtemp()) project_name = "".join(random.choices(string.ascii_lowercase, k=18)) + if request.param and PostgresContainer: + postgres = PostgresContainer("postgres:16") + postgres.start() + + def remove_container(): + postgres.stop() + + del os.environ['BW_DATA_POSTGRES_URL'] + del os.environ['BW_DATA_POSTGRES_PORT'] + del os.environ['BW_DATA_POSTGRES_USER'] + del os.environ['BW_DATA_POSTGRES_PASSWORD'] + del os.environ['BW_DATA_POSTGRES_DATABASE'] + del os.environ['BW_DATA_POSTGRES'] + + request.addfinalizer(remove_container) + + os.environ['BW_DATA_POSTGRES'] = "1" + os.environ['BW_DATA_POSTGRES_URL'] = postgres.get_container_host_ip() + os.environ['BW_DATA_POSTGRES_PORT'] = postgres.get_exposed_port(5432) + os.environ['BW_DATA_POSTGRES_USER'] = postgres.username + os.environ['BW_DATA_POSTGRES_PASSWORD'] = postgres.password + os.environ['BW_DATA_POSTGRES_DATABASE'] = postgres.dbname + else: + os.environ['BW_DATA_POSTGRES'] = "" + + def remove_envvar(): + del os.environ['BW_DATA_POSTGRES'] + + request.addfinalizer(remove_envvar) + projects.change_base_directories( - base_dir=tempdir, base_logs_dir=tempdir, project_name=project_name, update=False + base_dir=tmp_path, base_logs_dir=tmp_path, project_name=project_name, update=False ) projects._is_temp_dir = True - atexit.register(shutil.rmtree, tempdir) - return wrapped(*args, **kwargs) diff --git a/pyproject.toml b/pyproject.toml index f29677d1..48115082 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,9 @@ testing = [ "bw2calc>=2.0", "pytest", "pytest-cov", - "python-coveralls" + "python-coveralls", + "pytest-randomly", + "testcontainers[postgres]", ] dev = [ "build", @@ -74,8 +76,10 @@ dev = [ "pylint", "pytest", "pytest-cov", + "python-coveralls", "pytest-randomly", "setuptools", + "testcontainers[postgres]", ] [tool.setuptools] diff --git a/tests/conftest.py b/tests/conftest.py index 6d93ebed..eed97609 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,8 @@ import pytest +from bw2data.tests import bw_test_fixture + sqlite3.enable_callback_tracebacks(True) From 88660699db019019171c8a26c0255023823ab1b0 Mon Sep 17 00:00:00 2001 From: Chris Mutel Date: Tue, 7 Jan 2025 16:00:29 +0100 Subject: [PATCH 9/9] Adapt `tests/database.py` to new fixture --- tests/database.py | 176 +++++++++++++++------------------------------- 1 file changed, 58 insertions(+), 118 deletions(-) diff --git a/tests/database.py b/tests/database.py index cae91197..f6eee341 100644 --- a/tests/database.py +++ b/tests/database.py @@ -27,7 +27,6 @@ WrongDatabase, ) from bw2data.snowflake_ids import EPOCH_START_MS -from bw2data.tests import bw2test from .fixtures import biosphere from .fixtures import food as food_data @@ -35,8 +34,7 @@ @pytest.fixture -@bw2test -def food(): +def food(bw_test_fixture): d = Database("biosphere") d.write(biosphere) d = Database("food") @@ -51,8 +49,7 @@ def test_food(food): ### Basic functions -@bw2test -def test_get_code(): +def test_get_code(bw_test_fixture): d = Database("biosphere") d.write(biosphere) activity = d.get("1") @@ -61,8 +58,7 @@ def test_get_code(): assert activity.id > EPOCH_START_MS -@bw2test -def test_get_kwargs(): +def test_get_kwargs(bw_test_fixture): d = Database("biosphere") d.write(biosphere) activity = d.get(name="an emission") @@ -71,8 +67,7 @@ def test_get_kwargs(): assert activity.id > EPOCH_START_MS -@bw2test -def test_iter(): +def test_iter(bw_test_fixture): d = Database("biosphere") d.write(biosphere) activity = next(iter(d)) @@ -80,8 +75,7 @@ def test_iter(): assert activity["name"] in ("an emission", "another emission") -@bw2test -def test_get_random(): +def test_get_random(bw_test_fixture): d = Database("biosphere") d.write(biosphere) activity = d.random() @@ -105,8 +99,7 @@ def test_copy_metadata(food): assert databases["repas"]["custom"] == "something" -@bw2test -def test_copy_does_deepcopy(): +def test_copy_does_deepcopy(bw_test_fixture): data = { ("old name", "1"): { "exchanges": [{"input": ("old name", "1"), "amount": 1.0, "type": "technosphere"}] @@ -121,16 +114,14 @@ def test_copy_does_deepcopy(): assert list(d.load().values())[0]["exchanges"][0]["input"] == ("old name", "1") -@bw2test -def test_raise_wrong_database(): +def test_raise_wrong_database(bw_test_fixture): data = {("foo", "1"): {}} d = Database("bar") with pytest.raises(WrongDatabase): d.write(data) -@bw2test -def test_deletes_from_database(): +def test_deletes_from_database(bw_test_fixture): d = Database("biosphere") d.write(biosphere) assert "biosphere" in databases @@ -147,8 +138,7 @@ def test_deletes_from_database(): ) == (0,) -@bw2test -def test_deletes_from_calculation_setups(): +def test_deletes_from_calculation_setups(bw_test_fixture): d = Database("biosphere") d.write(biosphere) @@ -159,16 +149,14 @@ def test_deletes_from_calculation_setups(): assert calculation_setups["foo"]["inv"] == [] -@bw2test -def test_delete_warning(): +def test_delete_warning(bw_test_fixture): d = Database("biosphere") d.write(biosphere) with pytest.warns(UserWarning): d.delete() -@bw2test -def test_relabel_data(): +def test_relabel_data(bw_test_fixture): old_data = { ("old and boring", "1"): { "exchanges": [ @@ -208,8 +196,7 @@ def test_relabel_data(): ### Metadata -@bw2test -def test_find_graph_dependents(): +def test_find_graph_dependents(bw_test_fixture): databases["one"] = {"depends": ["two", "three"]} databases["two"] = {"depends": ["four", "five"]} databases["three"] = {"depends": ["four"]} @@ -226,16 +213,14 @@ def test_find_graph_dependents(): } -@bw2test -def test_register(): +def test_register(bw_test_fixture): database = Database("testy") database.register() assert "testy" in databases assert "depends" in databases["testy"] -@bw2test -def test_deregister(): +def test_deregister(bw_test_fixture): d = Database("food") d.register() assert "food" in databases @@ -243,8 +228,7 @@ def test_deregister(): assert "food" not in databases -@bw2test -def test_write_sets_databases_number_attribute(): +def test_write_sets_databases_number_attribute(bw_test_fixture): d = Database("biosphere") d.write(biosphere) assert databases["biosphere"]["number"] == len(biosphere) @@ -253,8 +237,7 @@ def test_write_sets_databases_number_attribute(): ### Processed arrays -@bw2test -def test_process_unknown_object(): +def test_process_unknown_object(bw_test_fixture): database = Database("testy") data = { ("testy", "A"): {}, @@ -272,24 +255,21 @@ def test_process_unknown_object(): ### String handling -@bw2test -def test_naughty_activity_codes(): +def test_naughty_activity_codes(bw_test_fixture): db = Database("foo") data = {("foo", str(i)): {"name": x} for i, x in enumerate(get_naughty())} db.write(data) assert set(get_naughty()) == set(x["name"] for x in db) -@bw2test -def test_setup(): +def test_setup(bw_test_fixture): d = Database("biosphere") d.write(biosphere) d = Database("food") d.write(food_data) -@bw2test -def test_rename(): +def test_rename(bw_test_fixture): d = Database("biosphere") d.write(biosphere) d = Database("food") @@ -305,8 +285,7 @@ def test_rename(): assert exc["input"][0] in ("biosphere", "buildings") -@bw2test -def test_exchange_save(): +def test_exchange_save(bw_test_fixture): database = Database("testy") data = { ("testy", "A"): {}, @@ -333,7 +312,6 @@ def test_exchange_save(): assert exc["amount"] == 2 -@bw2test @pytest.mark.skip() def test_dirty_activities(): database = Database("testy") @@ -359,8 +337,7 @@ def test_dirty_activities(): assert lca.supply_array[lca.activity_dict[("testy", "A")]] == 0.5 -@bw2test -def test_process_invalid_exchange_value(): +def test_process_invalid_exchange_value(bw_test_fixture): database = Database("testy") data = { ("testy", "A"): {}, @@ -375,8 +352,7 @@ def test_process_invalid_exchange_value(): database.write(data) -@bw2test -def test_untyped_exchange_error(): +def test_untyped_exchange_error(bw_test_fixture): database = Database("testy") database_data = { ("testy", "A"): {"exchanges": [{"amount": 1, "input": ("testy", "A")}]}, @@ -385,8 +361,7 @@ def test_untyped_exchange_error(): database.write(database_data, process=False) -@bw2test -def test_no_input_raises_invalid_exchange(): +def test_no_input_raises_invalid_exchange(bw_test_fixture): database = Database("testy") database_data = { ("testy", "A"): {"exchanges": [{"amount": 1}]}, @@ -395,8 +370,7 @@ def test_no_input_raises_invalid_exchange(): database.write(database_data, process=False) -@bw2test -def test_no_amount_raises_invalid_exchange(): +def test_no_amount_raises_invalid_exchange(bw_test_fixture): database = Database("testy") database_data = { ("testy", "A"): {"exchanges": [{"input": ("testy", "A"), "type": "technosphere"}]}, @@ -405,8 +379,7 @@ def test_no_amount_raises_invalid_exchange(): database.write(database_data, process=False) -@bw2test -def test_zero_amount_is_valid_exchange(): +def test_zero_amount_is_valid_exchange(bw_test_fixture): database = Database("testy") database_data = { ("testy", "A"): { @@ -416,8 +389,7 @@ def test_zero_amount_is_valid_exchange(): database.write(database_data, process=False) -@bw2test -def test_process_checks_process_type(): +def test_process_checks_process_type(bw_test_fixture): database = Database("a database") database.write( { @@ -430,8 +402,7 @@ def test_process_checks_process_type(): assert database.process() is None -@bw2test -def test_geomapping_array_includes_only_processes(): +def test_geomapping_array_includes_only_processes(bw_test_fixture): database = Database("a database") database.write( { @@ -449,8 +420,7 @@ def test_geomapping_array_includes_only_processes(): assert array[0]["col"] == geomapping["bar"] -@bw2test -def test_geomapping_array_normalization(): +def test_geomapping_array_normalization(bw_test_fixture): database = Database("a database") database.register(location_normalization={"RoW": "GLO"}) database.write( @@ -470,8 +440,7 @@ def test_geomapping_array_normalization(): assert array[1]["col"] == geomapping["GLO"] -@bw2test -def test_processed_array(): +def test_processed_array(bw_test_fixture): database = Database("a database") database.write( { @@ -499,8 +468,7 @@ def test_processed_array(): assert array[0]["uncertainty_type"] == 7 -@bw2test -def test_processed_array_with_metadata(): +def test_processed_array_with_metadata(bw_test_fixture): database = Database("a database") database.write( { @@ -547,8 +515,7 @@ def test_processed_array_with_metadata(): ) -@bw2test -def test_processed_array_with_non_process_nodes(): +def test_processed_array_with_non_process_nodes(bw_test_fixture): database = Database("a database") database.write( { @@ -587,14 +554,12 @@ def test_processed_array_with_non_process_nodes(): assert array.shape == (1,) -@bw2test -def test_base_class(): +def test_base_class(bw_test_fixture): database = Database("a database") assert database._metadata is databases -@bw2test -def test_find_dependents(): +def test_find_dependents(bw_test_fixture): database = Database("a database") database.write( { @@ -648,8 +613,7 @@ def test_find_dependents(): assert database.find_dependents(ignore={"awkward"}) == ["biosphere", "foo"] -@bw2test -def test_set_dependents(): +def test_set_dependents(bw_test_fixture): foo = Database("foo") foo.write( { @@ -704,8 +668,7 @@ def test_set_dependents(): assert databases["a database"]["depends"] == ["biosphere", "foo"] -@bw2test -def test_process_without_exchanges_still_in_processed_array(): +def test_process_without_exchanges_still_in_processed_array(bw_test_fixture): database = Database("a database") database.write({("a database", "foo"): {}}) @@ -715,8 +678,7 @@ def test_process_without_exchanges_still_in_processed_array(): assert array.shape == (1,) -@bw2test -def test_random_empty(): +def test_random_empty(bw_test_fixture): database = Database("a database") database.write({}) with warnings.catch_warnings() as w: @@ -724,8 +686,7 @@ def test_random_empty(): assert database.random() is None -@bw2test -def test_new_node(): +def test_new_node(bw_test_fixture): database = Database("a database") database.register() act = database.new_node("foo", this="that", name="something") @@ -738,8 +699,7 @@ def test_new_node(): assert act["this"] == "that" -@bw2test -def test_new_node_code_optional(): +def test_new_node_code_optional(bw_test_fixture): database = Database("a database") database.register() act = database.new_node(this="that", name="something") @@ -752,16 +712,14 @@ def test_new_node_code_optional(): assert act["this"] == "that" -@bw2test -def test_new_node_warn_type_technosphere(): +def test_new_node_warn_type_technosphere(bw_test_fixture): database = Database("a database") database.register() with pytest.warns(UserWarning, match="\nEdge type label used for node"): database.new_node(this="that", name="something", type="technosphere").save() -@bw2test -def test_new_node_error(): +def test_new_node_error(bw_test_fixture): database = Database("a database") database.register() act = database.new_node("foo", this="that", name="something") @@ -771,8 +729,7 @@ def test_new_node_error(): database.new_node("foo") -@bw2test -def test_new_activity(): +def test_new_activity(bw_test_fixture): database = Database("a database") database.register() act = database.new_activity("foo", this="that", name="something") @@ -785,8 +742,7 @@ def test_new_activity(): assert act["this"] == "that" -@bw2test -def test_can_split_processes_products(): +def test_can_split_processes_products(bw_test_fixture): database = Database("a database") database.write( { @@ -815,8 +771,7 @@ def test_can_split_processes_products(): assert array["row"][0] == get_id(("a database", "product")) -@bw2test -def test_sqlite_processed_array_order(): +def test_sqlite_processed_array_order(bw_test_fixture): database = Database("testy_new") data = { ("testy_new", "C"): {}, @@ -871,8 +826,7 @@ def test_sqlite_processed_array_order(): assert np.allclose(array["col"], [x[1] for x in b]) -@bw2test -def test_no_distributions_if_no_uncertainty(): +def test_no_distributions_if_no_uncertainty(bw_test_fixture): database = Database("a database") database.write( { @@ -894,8 +848,7 @@ def test_no_distributions_if_no_uncertainty(): package.get_resource("a_database_technosphere_matrix.distributions") -@bw2test -def test_delete_duplicate_exchanges(): +def test_delete_duplicate_exchanges(bw_test_fixture): all_exchanges = lambda db: [exc for ds in db for exc in ds.exchanges()] db = Database("test-case") @@ -923,8 +876,7 @@ def test_delete_duplicate_exchanges(): assert len(all_exchanges(db)) == 3 -@bw2test -def test_add_geocollections_dict(capsys): +def test_add_geocollections_dict(bw_test_fixture, capsys): db = Database("test-case") db.write( { @@ -941,8 +893,7 @@ def test_add_geocollections_dict(capsys): assert "Not able" in capsys.readouterr().out -@bw2test -def test_add_geocollections_list(capsys): +def test_add_geocollections_list(bw_test_fixture, capsys): db = Database("test-case") db.write( [ @@ -972,8 +923,7 @@ def test_add_geocollections_list(capsys): assert "Not able" in capsys.readouterr().out -@bw2test -def test_set_geocollections(capsys): +def test_set_geocollections(bw_test_fixture, capsys): db = Database("test-case") db.write( { @@ -1007,8 +957,7 @@ def test_set_geocollections(capsys): assert db.metadata["geocollections"] == ["foo", "this", "world"] -@bw2test -def test_add_geocollections_unable(capsys): +def test_add_geocollections_unable(bw_test_fixture, capsys): db = Database("test-case") db.write( { @@ -1020,8 +969,7 @@ def test_add_geocollections_unable(capsys): assert "Not able" in capsys.readouterr().out -@bw2test -def test_add_geocollections_no_unable_for_product(capsys): +def test_add_geocollections_no_unable_for_product(bw_test_fixture, capsys): db = Database("test-case") db.write( { @@ -1038,8 +986,7 @@ def test_add_geocollections_no_unable_for_product(capsys): @pytest.fixture -@bw2test -def df_fixture(): +def df_fixture(bw_test_fixture): Database("biosphere").write(biosphere) Database("food").write(food_data) @@ -1212,8 +1159,7 @@ def test_nodes_to_dataframe_unsorted(df_fixture): assert df.shape == (2, 8) -@bw2test -def test_warn_activity_type(): +def test_warn_activity_type(bw_test_fixture): db = Database("test-case") data = { ("test-case", "1"): { @@ -1228,8 +1174,7 @@ def test_warn_activity_type(): db.write(data) -@bw2test -def test_warn_activity_key(): +def test_warn_activity_key(bw_test_fixture): db = Database("test-case") data = { ("test-case", "1"): { @@ -1246,8 +1191,7 @@ def test_warn_activity_key(): db.write(data) -@bw2test -def test_no_warn_activity_key_no_check_typos(recwarn): +def test_no_warn_activity_key_no_check_typos(bw_test_fixture, recwarn): db = Database("test-case") data = { ("test-case", "1"): { @@ -1263,8 +1207,7 @@ def test_no_warn_activity_key_no_check_typos(recwarn): assert not any("Possible incorrect activity key" in str(rec.message) for rec in recwarn) -@bw2test -def test_warn_activity_key_yes_check_typos(recwarn): +def test_warn_activity_key_yes_check_typos(bw_test_fixture, recwarn): db = Database("test-case") data = { ("test-case", "1"): { @@ -1280,8 +1223,7 @@ def test_warn_activity_key_yes_check_typos(recwarn): assert any("Possible incorrect activity key" in str(rec.message) for rec in recwarn) -@bw2test -def test_warn_exchange_type(): +def test_warn_exchange_type(bw_test_fixture): db = Database("test-case") data = { ("test-case", "0"): { @@ -1308,8 +1250,7 @@ def test_warn_exchange_type(): db.write(data) -@bw2test -def test_warn_exchange_key(): +def test_warn_exchange_key(bw_test_fixture): db = Database("test-case") data = { ("test-case", "0"): { @@ -1335,8 +1276,7 @@ def test_warn_exchange_key(): db.write(data) -@bw2test -def test_iotable_sourced_project(): +def test_iotable_sourced_project(bw_test_fixture): projects.dataset.set_sourced() with pytest.raises(ValueError): Database("foo", backend="iotable")