Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions bw2data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,12 @@
"Node",
"Normalization",
"normalizations",
"parameters",
# "parameters",
"preferences",
"prepare_lca_inputs",
"ProcessedDataStore",
"projects",
"Searcher",
# "Searcher",
"set_data_dir",
"Weighting",
"weightings",
Expand Down Expand Up @@ -71,7 +71,7 @@
from bw2data.utils import get_activity, get_node
from bw2data.data_store import DataStore, ProcessedDataStore
from bw2data.method import Method
from bw2data.search import Searcher, IndexManager
# from bw2data.search import Searcher, IndexManager
from bw2data.weighting_normalization import Weighting, Normalization
from bw2data.backends import convert_backend, get_id, Node, Edge
from bw2data.compat import prepare_lca_inputs, Mapping, get_multilca_data_objs
Expand All @@ -80,7 +80,7 @@
mapping = Mapping()

from bw2data.updates import Updates
from bw2data.parameters import parameters
# from bw2data.parameters import parameters

Updates.check_status()

Expand Down
80 changes: 43 additions & 37 deletions bw2data/backends/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
)
from bw2data.logs import stdout_feedback_logger
from bw2data.query import Query
from bw2data.search import IndexManager, Searcher
# from bw2data.search import IndexManager, Searcher
from bw2data.signals import on_database_reset, on_database_write
from bw2data.utils import as_uncertainty_dict, get_geocollection, get_node, set_correct_process_type

Expand Down Expand Up @@ -526,7 +526,7 @@ def _efficient_write_dataset(

if "output" not in exchange:
exchange["output"] = (ds["database"], ds["code"])
exchanges.append(dict_as_exchangedataset(exchange))
exchanges.append(dict_as_exchangedataset(exchange, add_snowflake_id=True))

# Query gets passed as INSERT INTO x VALUES ('?', '?'...)
# SQLite3 has a limit of 999 variables,
Expand Down Expand Up @@ -732,20 +732,24 @@ def new_node(self, code: str = None, **kwargs):
return obj

def make_searchable(self, reset: bool = False, signal: bool = True):
if self.name not in databases:
raise UnknownObject("This database is not yet registered")
if self._searchable and not reset:
stdout_feedback_logger.info("This database is already searchable")
return
databases[self.name]["searchable"] = True
databases.flush(signal=signal)
IndexManager(self.filename).create()
IndexManager(self.filename).add_datasets(self)
return

# if self.name not in databases:
# raise UnknownObject("This database is not yet registered")
# if self._searchable and not reset:
# stdout_feedback_logger.info("This database is already searchable")
# return
# databases[self.name]["searchable"] = True
# databases.flush(signal=signal)
# IndexManager(self.filename).create()
# IndexManager(self.filename).add_datasets(self)

def make_unsearchable(self, signal: bool = True):
databases[self.name]["searchable"] = False
databases.flush(signal=signal)
IndexManager(self.filename).delete_database()
return

# databases[self.name]["searchable"] = False
# databases.flush(signal=signal)
# IndexManager(self.filename).delete_database()

def delete(
self, keep_params: bool = False, warn: bool = True, vacuum: bool = True, signal: bool = True
Expand Down Expand Up @@ -785,26 +789,26 @@ def purge(dct: dict) -> dict:

ActivityDataset.delete().where(ActivityDataset.database == self.name).execute()
ExchangeDataset.delete().where(ExchangeDataset.output_database == self.name).execute()
IndexManager(self.filename).delete_database()

if not keep_params:
from bw2data.parameters import (
ActivityParameter,
DatabaseParameter,
ParameterizedExchange,
)

groups = tuple(
{
o[0]
for o in ActivityParameter.select(ActivityParameter.group)
.where(ActivityParameter.database == self.name)
.tuples()
}
)
ParameterizedExchange.delete().where(ParameterizedExchange.group << groups).execute()
ActivityParameter.delete().where(ActivityParameter.database == self.name).execute()
DatabaseParameter.delete().where(DatabaseParameter.database == self.name).execute()
# IndexManager(self.filename).delete_database()

# if not keep_params:
# from bw2data.parameters import (
# ActivityParameter,
# DatabaseParameter,
# ParameterizedExchange,
# )

# groups = tuple(
# {
# o[0]
# for o in ActivityParameter.select(ActivityParameter.group)
# .where(ActivityParameter.database == self.name)
# .tuples()
# }
# )
# ParameterizedExchange.delete().where(ParameterizedExchange.group << groups).execute()
# ActivityParameter.delete().where(ActivityParameter.database == self.name).execute()
# DatabaseParameter.delete().where(DatabaseParameter.database == self.name).execute()

if vacuum_needed:
sqlite3_lci_db.vacuum()
Expand Down Expand Up @@ -1003,9 +1007,11 @@ def search(self, string, **kwargs):
* ``proxy``: Return ``Activity`` proxies instead of dictionary index Models. Default is ``True``.

Returns a list of ``Activity`` datasets."""
with Searcher(self.filename) as s:
results = s.search(string=string, **kwargs)
return results
raise NotImplementedError

# with Searcher(self.filename) as s:
# results = s.search(string=string, **kwargs)
# return results

def set_geocollections(self):
"""Set ``geocollections`` attribute for databases which don't currently have it."""
Expand Down
46 changes: 23 additions & 23 deletions bw2data/backends/proxies.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from bw2data.errors import ValidityError
from bw2data.logs import stdout_feedback_logger
from bw2data.proxies import ActivityProxyBase, ExchangeProxyBase
from bw2data.search import IndexManager
# from bw2data.search import IndexManager
from bw2data.signals import on_activity_code_change, on_activity_database_change


Expand Down Expand Up @@ -264,7 +264,7 @@ def key(self):

def delete(self, signal: bool = True):
from bw2data import Database, calculation_setups
from bw2data.parameters import ActivityParameter, ParameterizedExchange
# from bw2data.parameters import ActivityParameter, ParameterizedExchange

def purge(obj: Activity, dct: dict) -> dict:
return {
Expand All @@ -273,15 +273,15 @@ def purge(obj: Activity, dct: dict) -> dict:
if key != obj._data["id"] and key != (obj._data["database"], obj._data["code"])
}

try:
ap = ActivityParameter.get(database=self[0], code=self[1])
ParameterizedExchange.delete().where(ParameterizedExchange.group == ap.group).execute()
ActivityParameter.delete().where(
ActivityParameter.database == self[0], ActivityParameter.code == self[1]
).execute()
except ActivityParameter.DoesNotExist:
pass
IndexManager(Database(self["database"]).filename).delete_dataset(self._data)
# try:
# ap = ActivityParameter.get(database=self[0], code=self[1])
# ParameterizedExchange.delete().where(ParameterizedExchange.group == ap.group).execute()
# ActivityParameter.delete().where(
# ActivityParameter.database == self[0], ActivityParameter.code == self[1]
# ).execute()
# except ActivityParameter.DoesNotExist:
# pass
# IndexManager(Database(self["database"]).filename).delete_dataset(self._data)
self.exchanges().delete(allow_in_sourced_project=True)
self.upstream().delete(allow_in_sourced_project=True)

Expand Down Expand Up @@ -352,8 +352,8 @@ def save(self, signal: bool = True, data_already_set: bool = False, force_insert
if self.get("location") and self["location"] not in geomapping:
geomapping.add([self["location"]])

if databases[self["database"]].get("searchable", True):
IndexManager(Database(self["database"]).filename).update_dataset(self._data)
# if databases[self["database"]].get("searchable", True):
# IndexManager(Database(self["database"]).filename).update_dataset(self._data)

def _change_code(self, new_code: str, signal: bool = True):
if self["code"] == new_code:
Expand Down Expand Up @@ -383,11 +383,11 @@ def _change_code(self, new_code: str, signal: bool = True):
).execute()

if databases[self["database"]].get("searchable"):
from bw2data import Database
# from bw2data import Database

IndexManager(Database(self["database"]).filename).delete_dataset(self)
# IndexManager(Database(self["database"]).filename).delete_dataset(self)
self._data["code"] = new_code
IndexManager(Database(self["database"]).filename).add_datasets([self])
# IndexManager(Database(self["database"]).filename).add_datasets([self])
else:
self._data["code"] = new_code

Expand Down Expand Up @@ -420,11 +420,11 @@ def _change_database(self, new_database: str, signal: bool = True):
).execute()

if databases[self["database"]].get("searchable"):
from bw2data import Database
# from bw2data import Database

IndexManager(Database(self["database"]).filename).delete_dataset(self)
# IndexManager(Database(self["database"]).filename).delete_dataset(self)
self._data["database"] = new_database
IndexManager(Database(self["database"]).filename).add_datasets([self])
# IndexManager(Database(self["database"]).filename).add_datasets([self])
else:
self._data["database"] = new_database

Expand Down Expand Up @@ -603,11 +603,11 @@ def save(self, signal: bool = True, data_already_set: bool = False, force_insert
self._document.save(signal=signal, force_insert=force_insert)

def delete(self, signal: bool = True):
from bw2data.parameters import ParameterizedExchange
# from bw2data.parameters import ParameterizedExchange

ParameterizedExchange.delete().where(
ParameterizedExchange.exchange == self._document.id
).execute()
# ParameterizedExchange.delete().where(
# ParameterizedExchange.exchange == self._document.id
# ).execute()
self._document.delete_instance(signal=signal)
databases.set_dirty(self["output"][0])
self = None
9 changes: 7 additions & 2 deletions bw2data/backends/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,20 @@ def dict_as_activitydataset(ds: Any, add_snowflake_id: bool = False) -> dict:
return val


def dict_as_exchangedataset(ds: Any) -> dict:
return {
def dict_as_exchangedataset(ds: Any, add_snowflake_id: bool = False) -> dict:
val = {
"data": ds,
"input_database": ds["input"][0],
"input_code": ds["input"][1],
"output_database": ds["output"][0],
"output_code": ds["output"][1],
"type": ds["type"],
}
# Use during `insert_many` calls as these skip auto id generation because they don't call
# `.save()`
if add_snowflake_id:
val["id"] = next(snowflake_id_generator)
return val


def get_obj_as_dict(cls: SignaledDataset, obj_id: Optional[int]) -> dict:
Expand Down
Loading