Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 18 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,24 @@ build-backend = "setuptools.build_meta"

[project]
name = "temporal-mapper"
version = "1.1.0"
authors = [
{name = "Kaleb D Ruscitti", email="kaleb.ruscitti@uwaterloo.ca"},
]
version = "1.1.0"
description = "Implementation of density-based Mapper for temporal topic modelling."
readme = "README.md"
license = {text = "BSD-3-Clause license"}
keywords = ["Mapper", "TDA", "Morse Theory", "Temporal Topic Modeling"]
license = {text = "BSD-3-Clause"}
keywords = ["mapper", "tda", "morse theory", "temporal topic modeling"]

classifiers = [
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
]

dependencies = [
"adjustText",
"numpy",
"matplotlib",
"pandas",
Expand All @@ -20,11 +30,12 @@ dependencies = [
"scikit-learn",
"scipy",
"networkx",
"numba",
]
[project.optional-dependencies]
visualizations = [
"datamapplot",
"datashader",
"vectorizers",
]

[project.urls]
Homepage = "https://github.com/TutteInstitute/temporal-mapper"
Repository = "https://github.com/TutteInstitute/temporal-mapper"
Documentation = "https://temporal-mapper.readthedocs.io"
5 changes: 3 additions & 2 deletions src/temporalmapper/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import temporalmapper.temporal_mapper as tm_main
import temporalmapper.utilities as tm_utils

TemporalMapper = tm_main.TemporalMapper
centroid_datamap = tm_main.centroid_datamap
time_semantic_plot = tm_main.time_semantic_plot
centroid_datamap = tm_utils.centroid_datamap
time_semantic_plot = tm_utils.time_semantic_plot
57 changes: 22 additions & 35 deletions src/temporalmapper/temporal_mapper.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,49 @@
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
from temporalmapper.utilities import *
from temporalmapper.weighted_clustering import *
from tqdm import tqdm, trange
from sklearn.metrics import pairwise_distances
from tqdm import trange
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from scipy.sparse import issparse
from sklearn.neighbors import NearestNeighbors
from sklearn.base import ClusterMixin
from scipy.sparse import issparse
from datamapplot.palette_handling import palette_from_datamap
import matplotlib as mpl
from copy import deepcopy
import plotly.graph_objects as go

from temporalmapper.utilities import (
std_sigmoid,
time_semantic_plot,
compute_time_semantic_positions,
prepare_plotly_graph_objects,
)
from temporalmapper.weighted_clustering import (
square,
cosine_window,
weighted_clusters,
)

"""TemporalMapper class
minimal usage example:

# load from your data file:
data : (n_dim, N_data) array-like
time : (N_data,) array-like
semantic_dist : (N_data,) array-like
# choose an sklearn clusterer:
clusterer = HDBSCAN()

# init and build the graph:
TG = TemporalGraph(
mapper = TemporalGraph(
time,
data,
clusterer,
N_checkpoints = 10,
)

TG.build()
myGraph = TG.G
mapper.build()
myGraph = mapper.G

# generate a matplotlib figure
mapper.temporal_plot()
"""


Expand All @@ -54,6 +62,9 @@ class TemporalMapper:
Run the density-based mapper algorithm to construct the temporal graph.
get_vertex_data(str node):
Returns the index of elements of ``data`` which are in vertex ``node``.
get_dir_subvertices(str node, float threshold = 0.0, bool backwards=False):
Returns the vertices that descend from ``node`` with outedge weight at least ``threshold``.
If ``backwards = True``, returns the ancestors instead of descendants.
temporal_plot():
Returns a matplotlib axis containing a temporal plot
interactive_temporal_plot():
Expand Down Expand Up @@ -185,30 +196,6 @@ def _compute_checkpoints(self):
self._compute_critical_points()
return checkpoints

def _compute_critical_points(self):
if self.distance is None:
self._compute_knn()
if verbose:
print("Computing morse critical points...")

std_time = np.copy(self.time)
std_time = self.scaler.fit_transform(std_time.reshape(-1, 1))
temporal_delta = [
np.mean(std_time[indx] - std_time[indx[0]]) for indx in TG.dist_indices
]
temporal_delta = np.squeeze(np.vstack(temporal_delta))
event_strength = temporal_delta / self.distance[:, -1]
## smooth it out a bit
smooth_strength = np.zeros(self.n_samples)
for k in trange(self.time, disable=self.disable):
smooth_strength += (
tmwc.square(self.time[k], self.time, 1, 0.05) * event_strength[k]
)
## find peaks & troughs
peaks = find_peaks(smooth_vals, prominence=0.8, height=0.5)[0]
troughs = find_peaks(-smooth_vals, prominence=0.8, height=0.5)[0]
critical_points = np.hstack((peaks, troughs))

def _compute_knn(self):
"""Run sklearn NearestNeighbours to compute knns."""
if self.verbose:
Expand Down
51 changes: 0 additions & 51 deletions src/temporalmapper/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ def std_sigmoid(x):
transform = (x - mu) / (std)
return 1 / (1 + np.exp(-1 * transform))


def cluster_avg_1D(cluster_data, y_data):
"""Average out the y_data in each cluster,
to use as y-axis positions for the graph visualization"""
Expand Down Expand Up @@ -67,56 +66,6 @@ def epsilon_balls(data, epsilon):
indices.append(idx)
return distances, indices


def graph_to_holoviews(G, dataset_func=None):
"""Take TemporalGraph.G and output the required HoloViews objects for a modified Sankey diagram."""
nxNodes = G.nodes()
nodes = nxNodes # lol
cnt = 0
orphans = []
idx = 0
for node in nxNodes:
if G.degree(node) == 0:
cnt += 1
orphans.append(node)
continue
G.nodes()[node]["index"] = idx
idx += 1

for node in orphans:
G.remove_node(node)
nxNodes = G.nodes()
if cnt != 0:
print(f"Warning: removed {cnt} orphan nodes from the graph.")
nodes_ = {"index": [], "size": [], "label": [], "colour": [], "column": []}
for i, node in enumerate(nxNodes):
nodes_["index"].append(i)
nodes_["size"].append(nodes[node]["count"])
try:
nodes_["label"].append(nodes[node]["label"])
except KeyError:
nodes_["label"].append(nodes[node]["index"])
nodes_["colour"].append("#ffffff")
nodes_["column"].append(nodes[node]["slice_no"])

cmap = {nodes[node]["index"]: nodes[node]["colour"] for node in nodes}
try:
nodes = hv.Dataset(nodes_, "index", ["size", "label", "colour", "column"])
except NameError:
nodes = dataset_func(nodes_, "index", ["size", "label", "colour", "column"])

edges = []

for u, v, d in G.edges(data=True):
uidx = nxNodes[u]["index"]
vidx = nxNodes[v]["index"]
u_size = nxNodes[u]["count"]
v_size = nxNodes[v]["count"]
edges.append((uidx, vidx, (u_size * d["src_weight"], v_size * d["dst_weight"])))

return nodes, edges, cmap


def compute_cluster_yaxis(clusters, semantic_dist, func=cluster_avg_1D):
y_data = []
for tslice in clusters:
Expand Down
8 changes: 1 addition & 7 deletions src/temporalmapper/weighted_clustering.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,5 @@
import sys
import numpy as np
import pandas as pd
import math
import numba
from tqdm import tqdm, trange
from warnings import warn

from tqdm import tqdm

def gaussian(t0, t, density, binwidth, epsilon=0.1, params=None):
""" Returns weights for samples at times t for a Gaussian kernel centered at t0 """
Expand Down