From c5872e046e4bc6fab516a24c9f923b87d97a5b72 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 12 Jan 2021 14:45:10 -0800 Subject: [PATCH 01/36] Initial commit, added header file for dqm --- dimod/include/dimod/adjvectordqm.h | 55 ++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 dimod/include/dimod/adjvectordqm.h diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h new file mode 100644 index 000000000..ac3c21985 --- /dev/null +++ b/dimod/include/dimod/adjvectordqm.h @@ -0,0 +1,55 @@ +// Copyright 2020 D-Wave Systems Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef DIMOD_ADJVECTORDQM_H_ +#define DIMOD_ADJVECTORDQM_H_ + +#include +#include +#include +#include + +#include "dimod/utils.h" +#include "dimod/adjvectorbqm.h" + +namespace dimod { + +template +class AdjVectorDQM { +public: + AdjVectorBQM _bqm; + std::vector _case_starts; + std::vector> _adj; + + AdjVectorBQM() { _case_starts.push_back(0); } + + void add_variable(size_t num_cases) { + assert(num_cases > 0); + auto v = _adj.resize(_adj.size() + 1); + for(auto n = 0; n < num_cases; n++){ + _bqm.add_variable(); + } + _case_starts.push_back(_bqm.num_variables()); + return v; + } + + // Skipping copy routine since a default copy constructor will work. + // No deep copying is needed. + + std::vector energies(std::vector< + + +} // namespace dimod + +#endif // DIMOD_ADJVECTORDQM_H_ From b9161874ce3382e39bcef55ba1695883de49f636 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 12 Jan 2021 18:42:22 -0800 Subject: [PATCH 02/36] Added the energy function --- dimod/include/dimod/adjvectordqm.h | 36 +++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index ac3c21985..fa38c60e7 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -44,12 +44,46 @@ class AdjVectorDQM { return v; } + void get_linear(V v, B* biases) { + + + + } + + // Skipping copy routine since a default copy constructor will work. // No deep copying is needed. - std::vector energies(std::vector< + void energies(C* p_samples, int num_variables, int num_samples, B* p_energies) { + assert(num_variables == _bqm.num_variables()); + memset(p_energies, 0, num_variables * sizeof(C)); + for(auto si = 0; si < num_samples; si++) { + C* p_curr_sample = samples + si * num_variables; + for(auto u = 0; u < num_variables; u++) { + auto case_u = p_curr_sample_es[u]; + assert(case_u < num_cases(u)); + auto cu = _case_starts[u] + case_u; + p_energies[si] += _bqm.get_linear(cu); + for(auto vi = 0; vi < _adj[u].size(); vi++) { + auto v = _adj[u][vi]; + // We only care about lower triangle. + if( v > u) { + break; + } + auto case_v = p_cur_sample[v]; + auto cv = _case_starts[v] + case_v; + auto out = _bqm.get_quadratic(cu,cv); + if(out.second) { + p_energies[si]+= out.first; + } + } + } + } + } + + } } // namespace dimod #endif // DIMOD_ADJVECTORDQM_H_ From 3dc630d52776e2f8e6d7b1b1c0c6be50c24e1f32 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 12 Jan 2021 19:44:57 -0800 Subject: [PATCH 03/36] Added more functions --- dimod/include/dimod/adjvectordqm.h | 42 +++++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index fa38c60e7..c936d96e8 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -34,8 +34,10 @@ class AdjVectorDQM { AdjVectorBQM() { _case_starts.push_back(0); } - void add_variable(size_t num_cases) { - assert(num_cases > 0); + int add_variable(size_t num_cases) { + if(num_cases <=0 ) { + return -1; + } auto v = _adj.resize(_adj.size() + 1); for(auto n = 0; n < num_cases; n++){ _bqm.add_variable(); @@ -45,9 +47,39 @@ class AdjVectorDQM { } void get_linear(V v, B* biases) { - - + for(int case_v = 0; case_v < num_cases(v); case_v++) { + biases[case_v] = _bqm.get_linear(_case_starts[v] + case_v); + } + } + B get_linear_case(V v, C case_v) { + assert(case_v >= 0 && case_v < num_cases(v)); + return _bqm.get_linear(case_starts[v] + case_v); + } + + int get_quadratic(V u, V v, std::vector>& quadratic_biases) { + assert(u >=0 && u < _adj.size()); + assert(v >=0 && v < _adj.size()); + auto it = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); + if( it == _adj[u].end() || *it != v) { + return -1; + } + auto num_cases_u = num_cases(u); + auto num_cases_v = num_cases(v); + quadratic_biases.resize(num_cases_u); + for(int i = 0; i < u ; i++){ + quadratic_biases.resize(num_cases_v, 0); + } + + for(auto case_u = 0; case_u < num_cases_u; case_u++) { + auto span = _bqm.neighborhood(case_u + _case_starts[u], _case_starts[v]); + while(span.first != span.second && *(span.first) < _case_starts[v+1]) { + case_v = *(span.first) - _case_starts[v]; + quadratic_biases[case_u][case_v] = *(span.first).second; + span.first++; + } + } + return 0; } @@ -81,8 +113,6 @@ class AdjVectorDQM { } } - - } } // namespace dimod From e98a393cd9e78445a470b0348bbece51e8d01dd6 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 13 Jan 2021 15:57:41 -0800 Subject: [PATCH 04/36] Refinements non-specific --- dimod/include/dimod/adjvectordqm.h | 75 +++++++++++++++++++++--------- 1 file changed, 52 insertions(+), 23 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index c936d96e8..ea90de3f3 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -28,17 +28,30 @@ namespace dimod { template class AdjVectorDQM { public: + using bias_type = B; + using case_type = C; + using variable_type = V; + using size_type = std::size_t; + AdjVectorBQM _bqm; - std::vector _case_starts; - std::vector> _adj; + std::vector _case_starts; + std::vector> _adj; AdjVectorBQM() { _case_starts.push_back(0); } - int add_variable(size_t num_cases) { - if(num_cases <=0 ) { - return -1; + variable_type num_cases(variable_type v = -1) { + assert(v < this->num_variables()); + if(v < 0) { + return _bqm.num_variables(); + } else { + return (_case_starts[v+1] - _case_starts[v]); } - auto v = _adj.resize(_adj.size() + 1); + } + + variable_type add_variable(size_t num_cases) { + assert(num_cases > 0); + auto v = _adj.size(); + _adj.resize(v+1); for(auto n = 0; n < num_cases; n++){ _bqm.add_variable(); } @@ -46,46 +59,62 @@ class AdjVectorDQM { return v; } - void get_linear(V v, B* biases) { - for(int case_v = 0; case_v < num_cases(v); case_v++) { - biases[case_v] = _bqm.get_linear(_case_starts[v] + case_v); - } + void get_linear(variable_type v, bias_type* biases) { + assert(v >= 0 && v < this->num_variables()); + for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { + biases[case_v] = _bqm.get_linear(_case_starts[v] + case_v); + } } - B get_linear_case(V v, C case_v) { + bias_type get_linear_case(variable_type v, case_type case_v) { + assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < num_cases(v)); - return _bqm.get_linear(case_starts[v] + case_v); + return _bqm.get_linear(_case_starts[v] + case_v); } - int get_quadratic(V u, V v, std::vector>& quadratic_biases) { + bool get_quadratic(variable_type u, variable_type v, bias_type* quadratic_biases) { assert(u >=0 && u < _adj.size()); assert(v >=0 && v < _adj.size()); auto it = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); if( it == _adj[u].end() || *it != v) { - return -1; + return false; } auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); - quadratic_biases.resize(num_cases_u); - for(int i = 0; i < u ; i++){ - quadratic_biases.resize(num_cases_v, 0); - } - for(auto case_u = 0; case_u < num_cases_u; case_u++) { - auto span = _bqm.neighborhood(case_u + _case_starts[u], _case_starts[v]); + auto span = _bqm.neighborhood(_case_starts[u] + case_u, _case_starts[v]); while(span.first != span.second && *(span.first) < _case_starts[v+1]) { case_v = *(span.first) - _case_starts[v]; quadratic_biases[case_u][case_v] = *(span.first).second; span.first++; } } - return 0; + return true; + } + + bias_type get_quadratic_case(variable_type u, case_type case_u, variable_type v, case_type case_v) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < num_cases(v)); + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < num_cases(v)); + + auto cu = _case_starts[u] + case_u; + auto cv = _case_starts[v] + case_v; + return _bqm.get_quadratic(cu , cv).first; + } + + size_type num_case_interactions() { + return _bqm.num_interactions(); } + size_type num_variaables_interactions() { + size_type num_intearctions = 0; + for(auto v = 0, vend = this->num_variables(); v++) { - // Skipping copy routine since a default copy constructor will work. - // No deep copying is needed. + } +} + void energies(C* p_samples, int num_variables, int num_samples, B* p_energies) { assert(num_variables == _bqm.num_variables()); memset(p_energies, 0, num_variables * sizeof(C)); From 2497a30d872b376728956917fd9bbf1c227f3e5d Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 13 Jan 2021 20:39:15 -0800 Subject: [PATCH 05/36] Finished dqm basic functions except to-from conversion --- dimod/include/dimod/adjvectordqm.h | 88 +++++++++++++++++++++++++----- 1 file changed, 73 insertions(+), 15 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index ea90de3f3..409063a3d 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -38,6 +38,10 @@ class AdjVectorDQM { std::vector> _adj; AdjVectorBQM() { _case_starts.push_back(0); } + + variable_type num_variables() { + return _adj.size(); + } variable_type num_cases(variable_type v = -1) { assert(v < this->num_variables()); @@ -71,10 +75,11 @@ class AdjVectorDQM { assert(case_v >= 0 && case_v < num_cases(v)); return _bqm.get_linear(_case_starts[v] + case_v); } - + + // Returns false if there is no interaction among the variables. bool get_quadratic(variable_type u, variable_type v, bias_type* quadratic_biases) { - assert(u >=0 && u < _adj.size()); - assert(v >=0 && v < _adj.size()); + assert(u >=0 && u < this->num_variables()); + assert(v >=0 && v < this->num_variables()); auto it = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); if( it == _adj[u].end() || *it != v) { return false; @@ -97,7 +102,7 @@ class AdjVectorDQM { assert(case_u >= 0 && case_u < num_cases(v)); assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < num_cases(v)); - + // should add assert for u != v ? auto cu = _case_starts[u] + case_u; auto cv = _case_starts[v] + case_v; return _bqm.get_quadratic(cu , cv).first; @@ -108,27 +113,80 @@ class AdjVectorDQM { } size_type num_variaables_interactions() { - size_type num_intearctions = 0; - for(auto v = 0, vend = this->num_variables(); v++) { - - + size_type num = 0; + for(auto v = 0, vend = this->num_variables(); v < vend; v++) { + num+= _adj[v].size(); } + return (num/2); } - void energies(C* p_samples, int num_variables, int num_samples, B* p_energies) { - assert(num_variables == _bqm.num_variables()); - memset(p_energies, 0, num_variables * sizeof(C)); + void set_linear(variable_type v, bias_type* p_biases) { + for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { + _bqm.set_linear(_case_starts[v] + case_v, p_biases[case_v]); + } + } + + void set_linear_case(variable_type v, case_type case_v, bias_type b) { + assert(case_v >= 0 && case_v < this->num_cases(v)); + _bqm.set_linear(_case_starts[v] + case_v, b); + } + + bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { + assert(u >=0 && u < this->num_variables()); + assert(v >=0 && v < this->num_variables()); + assert(u != v); + num_cases_u = num_cases(u); + num_cases_v = num_cases(v); + auto num_cases_u = num_cases(u); + auto num_cases_v = num_cases(v); + for(auto case_u = 0; case_u < num_cases_u; case_u++) { + cu = _case_starts[u] + case_u; + for(auto case_v = 0; case_v < num_cases_v; case_v++) { + cv = _case_starts[v] + case_v; + auto bias = p_biases[cu * num_cases_v + case_v]; + _bqm.set_quadratic(cu, cv, bias); + } + } + auto low = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); + if( low == _adj[u].end() || *low != v) { + _adj[u].insert(low, v); + _adj[v].insert(std::lower_bound(_adj[v].begin(), _adj[v].end(), u), u); + } + return true; + } + + bool set_quadratic_case(variable_type u, case_type case_u, variable_type v, case_type case_v, bias_type bias) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < num_cases(v)); + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < num_cases(v)); + auto cu = _case_starts[u] + case_u; + auto cv = _case_starts[v] + case_v; + _bqm.set_quadratic(cu, cv, bias); + auto low = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); + if( low == _adj[u].end() || *low != v) { + _adj[u].insert(low, v); + _adj[v].insert(std::lower_bound(_adj[v].begin(), _adj[v].end(), u), u); + } + return true; + } + + void energies(case_type* p_samples, int num_samples, variable_type num_variables, bias_type* p_energies) { + assert(num_variables == this->num_variables()); + memset(p_energies, 0, num_samples * sizeof(bias_type)); + #pragma omp parallel for for(auto si = 0; si < num_samples; si++) { - C* p_curr_sample = samples + si * num_variables; + case_type* p_curr_sample = samples + (si * num_variables); + bias_type* p_curr_energy = p_energies + si; for(auto u = 0; u < num_variables; u++) { - auto case_u = p_curr_sample_es[u]; + auto case_u = p_curr_sample[u]; assert(case_u < num_cases(u)); auto cu = _case_starts[u] + case_u; - p_energies[si] += _bqm.get_linear(cu); + *p_curr_energy += _bqm.get_linear(cu); for(auto vi = 0; vi < _adj[u].size(); vi++) { auto v = _adj[u][vi]; // We only care about lower triangle. - if( v > u) { + if(v > u) { break; } auto case_v = p_cur_sample[v]; From d66cb301e9e3c290cd73aaf5474d8b2f869b523a Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 14 Jan 2021 14:10:02 -0800 Subject: [PATCH 06/36] Changed names to be compatible with current cython code --- dimod/include/dimod/adjvectordqm.h | 105 ++++++++++++++--------------- 1 file changed, 52 insertions(+), 53 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 409063a3d..e78a24614 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -25,71 +25,70 @@ namespace dimod { -template +template class AdjVectorDQM { public: using bias_type = B; - using case_type = C; using variable_type = V; using size_type = std::size_t; - AdjVectorBQM _bqm; - std::vector _case_starts; - std::vector> _adj; + AdjVectorBQM bqm_; + std::vector case_starts_; + std::vector> adj_; - AdjVectorBQM() { _case_starts.push_back(0); } + AdjVectorBQM() { case_starts_.push_back(0); } variable_type num_variables() { - return _adj.size(); + return adj_.size(); } variable_type num_cases(variable_type v = -1) { assert(v < this->num_variables()); if(v < 0) { - return _bqm.num_variables(); + return bqm_.num_variables(); } else { - return (_case_starts[v+1] - _case_starts[v]); + return (case_starts_[v+1] - case_starts_[v]); } } variable_type add_variable(size_t num_cases) { assert(num_cases > 0); - auto v = _adj.size(); - _adj.resize(v+1); + auto v = adj_.size(); + adj_.resize(v+1); for(auto n = 0; n < num_cases; n++){ - _bqm.add_variable(); + bqm_.add_variable(); } - _case_starts.push_back(_bqm.num_variables()); + case_starts_.push_back(bqm_.num_variables()); return v; } void get_linear(variable_type v, bias_type* biases) { assert(v >= 0 && v < this->num_variables()); for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { - biases[case_v] = _bqm.get_linear(_case_starts[v] + case_v); + biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); } } - bias_type get_linear_case(variable_type v, case_type case_v) { + bias_type get_linear_case(variable_type v, variable_type case_v) { assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < num_cases(v)); - return _bqm.get_linear(_case_starts[v] + case_v); + return bqm_.get_linear(case_starts_[v] + case_v); } // Returns false if there is no interaction among the variables. bool get_quadratic(variable_type u, variable_type v, bias_type* quadratic_biases) { assert(u >=0 && u < this->num_variables()); assert(v >=0 && v < this->num_variables()); - auto it = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); - if( it == _adj[u].end() || *it != v) { + auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if( it == adj_[u].end() || *it != v) { return false; } auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); for(auto case_u = 0; case_u < num_cases_u; case_u++) { - auto span = _bqm.neighborhood(_case_starts[u] + case_u, _case_starts[v]); - while(span.first != span.second && *(span.first) < _case_starts[v+1]) { - case_v = *(span.first) - _case_starts[v]; + auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); + while(span.first != span.second && *(span.first) < case_starts_[v+1]) { + case_v = *(span.first) - case_starts_[v]; quadratic_biases[case_u][case_v] = *(span.first).second; span.first++; } @@ -97,38 +96,38 @@ class AdjVectorDQM { return true; } - bias_type get_quadratic_case(variable_type u, case_type case_u, variable_type v, case_type case_v) { + bias_type get_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < num_cases(v)); assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < num_cases(v)); // should add assert for u != v ? - auto cu = _case_starts[u] + case_u; - auto cv = _case_starts[v] + case_v; - return _bqm.get_quadratic(cu , cv).first; + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + return bqm_.get_quadratic(cu , cv).first; } size_type num_case_interactions() { - return _bqm.num_interactions(); + return bqm_.num_interactions(); } size_type num_variaables_interactions() { size_type num = 0; for(auto v = 0, vend = this->num_variables(); v < vend; v++) { - num+= _adj[v].size(); + num+= adj_[v].size(); } return (num/2); } void set_linear(variable_type v, bias_type* p_biases) { for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { - _bqm.set_linear(_case_starts[v] + case_v, p_biases[case_v]); + bqm_.set_linear(case_starts_[v] + case_v, p_biases[case_v]); } } - void set_linear_case(variable_type v, case_type case_v, bias_type b) { + void set_linear_case(variable_type v, variable_type case_v, bias_type b) { assert(case_v >= 0 && case_v < this->num_cases(v)); - _bqm.set_linear(_case_starts[v] + case_v, b); + bqm_.set_linear(case_starts_[v] + case_v, b); } bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { @@ -140,58 +139,58 @@ class AdjVectorDQM { auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); for(auto case_u = 0; case_u < num_cases_u; case_u++) { - cu = _case_starts[u] + case_u; + cu = case_starts_[u] + case_u; for(auto case_v = 0; case_v < num_cases_v; case_v++) { - cv = _case_starts[v] + case_v; + cv = case_starts_[v] + case_v; auto bias = p_biases[cu * num_cases_v + case_v]; - _bqm.set_quadratic(cu, cv, bias); + bqm_.set_quadratic(cu, cv, bias); } } - auto low = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); - if( low == _adj[u].end() || *low != v) { - _adj[u].insert(low, v); - _adj[v].insert(std::lower_bound(_adj[v].begin(), _adj[v].end(), u), u); + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if( low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); } return true; } - bool set_quadratic_case(variable_type u, case_type case_u, variable_type v, case_type case_v, bias_type bias) { + bool set_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v, bias_type bias) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < num_cases(v)); assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < num_cases(v)); - auto cu = _case_starts[u] + case_u; - auto cv = _case_starts[v] + case_v; - _bqm.set_quadratic(cu, cv, bias); - auto low = std::lower_bound(_adj[u].begin(), _adj[u].end(), v); - if( low == _adj[u].end() || *low != v) { - _adj[u].insert(low, v); - _adj[v].insert(std::lower_bound(_adj[v].begin(), _adj[v].end(), u), u); + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + bqm_.set_quadratic(cu, cv, bias); + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if( low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); } return true; } - void energies(case_type* p_samples, int num_samples, variable_type num_variables, bias_type* p_energies) { + void energies(variable_type* p_samples, int num_samples, variable_type num_variables, bias_type* p_energies) { assert(num_variables == this->num_variables()); memset(p_energies, 0, num_samples * sizeof(bias_type)); #pragma omp parallel for for(auto si = 0; si < num_samples; si++) { - case_type* p_curr_sample = samples + (si * num_variables); + variable_type* p_curr_sample = samples + (si * num_variables); bias_type* p_curr_energy = p_energies + si; for(auto u = 0; u < num_variables; u++) { auto case_u = p_curr_sample[u]; assert(case_u < num_cases(u)); - auto cu = _case_starts[u] + case_u; - *p_curr_energy += _bqm.get_linear(cu); - for(auto vi = 0; vi < _adj[u].size(); vi++) { - auto v = _adj[u][vi]; + auto cu = case_starts_[u] + case_u; + *p_curr_energy += bqm_.get_linear(cu); + for(auto vi = 0; vi < adj_[u].size(); vi++) { + auto v = adj_[u][vi]; // We only care about lower triangle. if(v > u) { break; } auto case_v = p_cur_sample[v]; - auto cv = _case_starts[v] + case_v; - auto out = _bqm.get_quadratic(cu,cv); + auto cv = case_starts_[v] + case_v; + auto out = bqm_.get_quadratic(cu,cv); if(out.second) { p_energies[si]+= out.first; } From 743bcac6fb0cf16152f63a30c6071231c86eb1c6 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 14 Jan 2021 14:54:20 -0800 Subject: [PATCH 07/36] NFC --- dimod/include/dimod/adjvectordqm.h | 346 +++++++++++++++-------------- 1 file changed, 177 insertions(+), 169 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index e78a24614..9bdddb427 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -20,186 +20,194 @@ #include #include -#include "dimod/utils.h" #include "dimod/adjvectorbqm.h" +#include "dimod/utils.h" namespace dimod { template class AdjVectorDQM { -public: + public: using bias_type = B; using variable_type = V; using size_type = std::size_t; - AdjVectorBQM bqm_; - std::vector case_starts_; - std::vector> adj_; - - AdjVectorBQM() { case_starts_.push_back(0); } - - variable_type num_variables() { - return adj_.size(); - } - - variable_type num_cases(variable_type v = -1) { - assert(v < this->num_variables()); - if(v < 0) { - return bqm_.num_variables(); - } else { - return (case_starts_[v+1] - case_starts_[v]); - } - } - - variable_type add_variable(size_t num_cases) { - assert(num_cases > 0); - auto v = adj_.size(); - adj_.resize(v+1); - for(auto n = 0; n < num_cases; n++){ - bqm_.add_variable(); - } - case_starts_.push_back(bqm_.num_variables()); - return v; - } - - void get_linear(variable_type v, bias_type* biases) { - assert(v >= 0 && v < this->num_variables()); - for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { - biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); - } - } - - bias_type get_linear_case(variable_type v, variable_type case_v) { - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); - return bqm_.get_linear(case_starts_[v] + case_v); - } - - // Returns false if there is no interaction among the variables. - bool get_quadratic(variable_type u, variable_type v, bias_type* quadratic_biases) { - assert(u >=0 && u < this->num_variables()); - assert(v >=0 && v < this->num_variables()); - auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if( it == adj_[u].end() || *it != v) { - return false; - } - auto num_cases_u = num_cases(u); - auto num_cases_v = num_cases(v); - for(auto case_u = 0; case_u < num_cases_u; case_u++) { - auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); - while(span.first != span.second && *(span.first) < case_starts_[v+1]) { - case_v = *(span.first) - case_starts_[v]; - quadratic_biases[case_u][case_v] = *(span.first).second; - span.first++; - } - } - return true; - } - - bias_type get_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v) { - assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < num_cases(v)); - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); - // should add assert for u != v ? - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; - return bqm_.get_quadratic(cu , cv).first; - } - - size_type num_case_interactions() { - return bqm_.num_interactions(); - } - - size_type num_variaables_interactions() { - size_type num = 0; - for(auto v = 0, vend = this->num_variables(); v < vend; v++) { - num+= adj_[v].size(); - } - return (num/2); -} - - void set_linear(variable_type v, bias_type* p_biases) { - for(auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { - bqm_.set_linear(case_starts_[v] + case_v, p_biases[case_v]); + AdjVectorBQM bqm_; + std::vector case_starts_; + std::vector> adj_; + + AdjVectorBQM() { case_starts_.push_back(0); } + + variable_type num_variables() { return adj_.size(); } + + size_type num_variaables_interactions() { + size_type num = 0; + for (auto v = 0, vend = this->num_variables(); v < vend; v++) { + num += adj_[v].size(); + } + return (num / 2); + } + + variable_type num_cases(variable_type v = -1) { + assert(v < this->num_variables()); + if (v < 0) { + return bqm_.num_variables(); + } else { + return (case_starts_[v + 1] - case_starts_[v]); + } } - } - - void set_linear_case(variable_type v, variable_type case_v, bias_type b) { - assert(case_v >= 0 && case_v < this->num_cases(v)); - bqm_.set_linear(case_starts_[v] + case_v, b); - } - - bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { - assert(u >=0 && u < this->num_variables()); - assert(v >=0 && v < this->num_variables()); - assert(u != v); - num_cases_u = num_cases(u); - num_cases_v = num_cases(v); - auto num_cases_u = num_cases(u); - auto num_cases_v = num_cases(v); - for(auto case_u = 0; case_u < num_cases_u; case_u++) { - cu = case_starts_[u] + case_u; - for(auto case_v = 0; case_v < num_cases_v; case_v++) { - cv = case_starts_[v] + case_v; - auto bias = p_biases[cu * num_cases_v + case_v]; - bqm_.set_quadratic(cu, cv, bias); - } - } - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if( low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); - } - return true; - } - - bool set_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v, bias_type bias) { - assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < num_cases(v)); - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; - bqm_.set_quadratic(cu, cv, bias); - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if( low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); - } - return true; - } - - void energies(variable_type* p_samples, int num_samples, variable_type num_variables, bias_type* p_energies) { - assert(num_variables == this->num_variables()); - memset(p_energies, 0, num_samples * sizeof(bias_type)); - #pragma omp parallel for - for(auto si = 0; si < num_samples; si++) { - variable_type* p_curr_sample = samples + (si * num_variables); - bias_type* p_curr_energy = p_energies + si; - for(auto u = 0; u < num_variables; u++) { - auto case_u = p_curr_sample[u]; - assert(case_u < num_cases(u)); - auto cu = case_starts_[u] + case_u; - *p_curr_energy += bqm_.get_linear(cu); - for(auto vi = 0; vi < adj_[u].size(); vi++) { - auto v = adj_[u][vi]; - // We only care about lower triangle. - if(v > u) { - break; - } - auto case_v = p_cur_sample[v]; - auto cv = case_starts_[v] + case_v; - auto out = bqm_.get_quadratic(cu,cv); - if(out.second) { - p_energies[si]+= out.first; - } - } - } - } - } - - } + + size_type num_case_interactions() { return bqm_.num_interactions(); } + + variable_type add_variable(size_t num_cases) { + assert(num_cases > 0); + auto v = adj_.size(); + adj_.resize(v + 1); + for (auto n = 0; n < num_cases; n++) { + bqm_.add_variable(); + } + case_starts_.push_back(bqm_.num_variables()); + return v; + } + + void get_linear(variable_type v, bias_type* biases) { + assert(v >= 0 && v < this->num_variables()); + for (auto case_v = 0, num_cases_v = this->num_cases(v); + case_v < num_cases_v; case_v++) { + biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); + } + } + + bias_type get_linear_case(variable_type v, variable_type case_v) { + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < num_cases(v)); + return bqm_.get_linear(case_starts_[v] + case_v); + } + + void set_linear(variable_type v, bias_type* p_biases) { + for (auto case_v = 0, num_cases_v = this->num_cases(v); + case_v < num_cases_v; case_v++) { + bqm_.set_linear(case_starts_[v] + case_v, p_biases[case_v]); + } + } + + void set_linear_case(variable_type v, variable_type case_v, bias_type b) { + assert(case_v >= 0 && case_v < this->num_cases(v)); + bqm_.set_linear(case_starts_[v] + case_v, b); + } + + // Returns false if there is no interaction among the variables. + bool get_quadratic(variable_type u, variable_type v, + bias_type* quadratic_biases) { + assert(u >= 0 && u < this->num_variables()); + assert(v >= 0 && v < this->num_variables()); + auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (it == adj_[u].end() || *it != v) { + return false; + } + auto num_cases_u = num_cases(u); + auto num_cases_v = num_cases(v); + for (auto case_u = 0; case_u < num_cases_u; case_u++) { + auto span = bqm_.neighborhood(case_starts_[u] + case_u, + case_starts_[v]); + while (span.first != span.second && + *(span.first) < case_starts_[v + 1]) { + case_v = *(span.first) - case_starts_[v]; + quadratic_biases[case_u * num_cases_v + case_v] = + *(span.first).second; + span.first++; + } + } + return true; + } + + bias_type get_quadratic_case(variable_type u, variable_type case_u, + variable_type v, variable_type case_v) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < num_cases(v)); + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < num_cases(v)); + // should add assert for u != v ? + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + return bqm_.get_quadratic(cu, cv).first; + } + + bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { + assert(u >= 0 && u < this->num_variables()); + assert(v >= 0 && v < this->num_variables()); + assert(u != v); + num_cases_u = num_cases(u); + num_cases_v = num_cases(v); + auto num_cases_u = num_cases(u); + auto num_cases_v = num_cases(v); + for (auto case_u = 0; case_u < num_cases_u; case_u++) { + cu = case_starts_[u] + case_u; + for (auto case_v = 0; case_v < num_cases_v; case_v++) { + cv = case_starts_[v] + case_v; + auto bias = p_biases[cu * num_cases_v + case_v]; + bqm_.set_quadratic(cu, cv, bias); + } + } + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), + u); + } + return true; + } + + // Check if boolean type is still okay + bool set_quadratic_case(variable_type u, variable_type case_u, + variable_type v, variable_type case_v, + bias_type bias) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < num_cases(v)); + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < num_cases(v)); + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + bqm_.set_quadratic(cu, cv, bias); + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), + u); + } + return true; + } + + void energies(variable_type* p_samples, int num_samples, + variable_type num_variables, bias_type* p_energies) { + assert(num_variables == this->num_variables()); + memset(p_energies, 0, num_samples * sizeof(bias_type)); +#pragma omp parallel for + for (auto si = 0; si < num_samples; si++) { + variable_type* p_curr_sample = samples + (si * num_variables); + bias_type* p_curr_energy = p_energies + si; + for (auto u = 0; u < num_variables; u++) { + auto case_u = p_curr_sample[u]; + assert(case_u < num_cases(u)); + auto cu = case_starts_[u] + case_u; + *p_curr_energy += bqm_.get_linear(cu); + for (auto vi = 0; vi < adj_[u].size(); vi++) { + auto v = adj_[u][vi]; + // We only care about lower triangle. + if (v > u) { + break; + } + auto case_v = p_cur_sample[v]; + auto cv = case_starts_[v] + case_v; + auto out = bqm_.get_quadratic(cu, cv); + if (out.second) { + p_energies[si] += out.first; + } + } + } + } + } +} } // namespace dimod #endif // DIMOD_ADJVECTORDQM_H_ From e87373f6d4f86c7b1bd2e3362d5dac02e5098c59 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 14 Jan 2021 17:03:40 -0800 Subject: [PATCH 08/36] fixes --- dimod/include/dimod/adjvectordqm.h | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 9bdddb427..22c95ab11 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -59,7 +59,7 @@ class AdjVectorDQM { size_type num_case_interactions() { return bqm_.num_interactions(); } - variable_type add_variable(size_t num_cases) { + variable_type add_variable(variable_type num_cases) { assert(num_cases > 0); auto v = adj_.size(); adj_.resize(v + 1); @@ -179,18 +179,18 @@ class AdjVectorDQM { } void energies(variable_type* p_samples, int num_samples, - variable_type num_variables, bias_type* p_energies) { - assert(num_variables == this->num_variables()); - memset(p_energies, 0, num_samples * sizeof(bias_type)); + variable_type sample_length, bias_type* p_energies) { + assert(sample_length == this->num_variables()); + auto num_variables = sample_length; #pragma omp parallel for for (auto si = 0; si < num_samples; si++) { - variable_type* p_curr_sample = samples + (si * num_variables); - bias_type* p_curr_energy = p_energies + si; + variable_type* p_curr_sample = p_samples + (si * num_variables); + double current_sample_energy = 0; for (auto u = 0; u < num_variables; u++) { auto case_u = p_curr_sample[u]; assert(case_u < num_cases(u)); auto cu = case_starts_[u] + case_u; - *p_curr_energy += bqm_.get_linear(cu); + current_sample_energy+= bqm_.get_linear(cu); for (auto vi = 0; vi < adj_[u].size(); vi++) { auto v = adj_[u][vi]; // We only care about lower triangle. @@ -201,10 +201,11 @@ class AdjVectorDQM { auto cv = case_starts_[v] + case_v; auto out = bqm_.get_quadratic(cu, cv); if (out.second) { - p_energies[si] += out.first; + current_sample_energy+= out.first; } } } + p_energies[si] = current_sample_energy; } } } From 4cf6687f599320cd4780397ea10e9741fce56f19 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Fri, 15 Jan 2021 11:56:19 -0800 Subject: [PATCH 09/36] Fix asserts & parallelize quadratic operations --- dimod/include/dimod/adjvectordqm.h | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 22c95ab11..86537d514 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -80,7 +80,7 @@ class AdjVectorDQM { bias_type get_linear_case(variable_type v, variable_type case_v) { assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); + assert(case_v >= 0 && case_v < this->num_cases(v)); return bqm_.get_linear(case_starts_[v] + case_v); } @@ -107,6 +107,7 @@ class AdjVectorDQM { } auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); +#pragma omp parallel for for (auto case_u = 0; case_u < num_cases_u; case_u++) { auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); @@ -124,9 +125,9 @@ class AdjVectorDQM { bias_type get_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v) { assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < num_cases(v)); + assert(case_u >= 0 && case_u < this->num_cases(u)); assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); + assert(case_v >= 0 && case_v < this->num_cases(v)); // should add assert for u != v ? auto cu = case_starts_[u] + case_u; auto cv = case_starts_[v] + case_v; @@ -137,10 +138,9 @@ class AdjVectorDQM { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); assert(u != v); - num_cases_u = num_cases(u); - num_cases_v = num_cases(v); auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); +#pragma omp parallel for for (auto case_u = 0; case_u < num_cases_u; case_u++) { cu = case_starts_[u] + case_u; for (auto case_v = 0; case_v < num_cases_v; case_v++) { @@ -163,9 +163,9 @@ class AdjVectorDQM { variable_type v, variable_type case_v, bias_type bias) { assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < num_cases(v)); + assert(case_u >= 0 && case_u < this->num_cases(u)); assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < num_cases(v)); + assert(case_v >= 0 && case_v < this->num_cases(v)); auto cu = case_starts_[u] + case_u; auto cv = case_starts_[v] + case_v; bqm_.set_quadratic(cu, cv, bias); @@ -190,7 +190,7 @@ class AdjVectorDQM { auto case_u = p_curr_sample[u]; assert(case_u < num_cases(u)); auto cu = case_starts_[u] + case_u; - current_sample_energy+= bqm_.get_linear(cu); + current_sample_energy += bqm_.get_linear(cu); for (auto vi = 0; vi < adj_[u].size(); vi++) { auto v = adj_[u][vi]; // We only care about lower triangle. @@ -201,11 +201,11 @@ class AdjVectorDQM { auto cv = case_starts_[v] + case_v; auto out = bqm_.get_quadratic(cu, cv); if (out.second) { - current_sample_energy+= out.first; + current_sample_energy += out.first; } } } - p_energies[si] = current_sample_energy; + p_energies[si] = current_sample_energy; } } } From fcebf054c59de4836d6fac990821a209b1e1538e Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Fri, 15 Jan 2021 12:25:29 -0800 Subject: [PATCH 10/36] Fix parallelization bug --- dimod/include/dimod/adjvectordqm.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 86537d514..55d9506d7 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -140,7 +140,8 @@ class AdjVectorDQM { assert(u != v); auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); -#pragma omp parallel for + // This cannot be parallelized since the vectors cannot be reshaped in + // parallel. for (auto case_u = 0; case_u < num_cases_u; case_u++) { cu = case_starts_[u] + case_u; for (auto case_v = 0; case_v < num_cases_v; case_v++) { From b4a983e99843f354e612626a93b34fc95af6e2ed Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Sat, 16 Jan 2021 13:55:28 -0800 Subject: [PATCH 11/36] Added functions for converting from numpy vectors --- dimod/include/dimod/adjvectordqm.h | 81 ++++++++++++++++++++++++++++-- 1 file changed, 76 insertions(+), 5 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 55d9506d7..bb5460e15 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -17,6 +17,7 @@ #include #include +#include #include #include @@ -32,12 +33,82 @@ class AdjVectorDQM { using variable_type = V; using size_type = std::size_t; - AdjVectorBQM bqm_; + AdjVectorBQM bqm_; std::vector case_starts_; std::vector> adj_; AdjVectorBQM() { case_starts_.push_back(0); } + AdjVectorDQM(variable_type* case_starts, bias_type* linear_biases, + size_type num_variables, size_type num_cases, + variable_type* irow, variable_type* icol, + bias_type* quadratic_biases, size_type num_interactions) { + // Set the BQM, linear biases will be added separately. + if (num_interactions) { + bqm_ = AdjVectorBQM( + irow, icol, num_interactions, true); + } + + // Accounting for the cases/variables at the end without interaction. + while (bqm_.num_variables() < num_cases) { + bqm_.add_variable(); + } + assert(bqm_.num_variables() == num_cases); + + // Add the linear biases. + for (auto ci = 0; ci < num_cases; ci++) { + bqm_.set_linear(ci, linear_biases[ci]); + } + + // Set the case starts. + case_starts_.resize(num_variables + 1); + for (auto v = 0; v < num_variables; v++) { + case_starts_[v] = case_starts[v]; + } + case_starts[num_variables] = num_cases; + + // Fill the adjacency list for variables. + std::vector> adjset; + adjset.resize(num_variables); + auto u = 0; + for (auto ci = 0, ci_end = bqm_.num_variables(); ci++) { + while (ci >= case_starts_[u + 1]) { + u++; + } + auto span = bqm_.neighborhood(ci); + auto v = 0; + while (span.first != span.second) { + auto cj = *(span.first).first; + while (cj >= case_starts_[v + 1]) { + v++; + } + adjset[u].insert(v); + span.first++; + } + } + + adj_.resize(num_variables); + for (auto v = 0; v < num_variables; v++) { + adj_[v].insert(adj_[v].begin(), adjset[v].begin(), adjset[v].end()); + std::sort(adj_[v].begin(), adj_[v].end()); + } + } + + bool is_self_loop_present() { + for (auto v = 0, num_variables = this->num_variables(); + v < num_variables; v++) { + for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; + ci < ci_end; ci++) { + auto span = bqm_.neighborhood(ci, case_starts_[v]); + if ((span.first != span.second) && + (*(span.first).first < case_starts_[v + 1])) { + return true; + } + } + } + return false; + } + variable_type num_variables() { return adj_.size(); } size_type num_variaables_interactions() { @@ -179,13 +250,13 @@ class AdjVectorDQM { return true; } - void energies(variable_type* p_samples, int num_samples, - variable_type sample_length, bias_type* p_energies) { + void get_energies(variable_type* samples, int num_samples, + variable_type sample_length, bias_type* energies) { assert(sample_length == this->num_variables()); auto num_variables = sample_length; #pragma omp parallel for for (auto si = 0; si < num_samples; si++) { - variable_type* p_curr_sample = p_samples + (si * num_variables); + variable_type* p_curr_sample = samples + (si * num_variables); double current_sample_energy = 0; for (auto u = 0; u < num_variables; u++) { auto case_u = p_curr_sample[u]; @@ -206,7 +277,7 @@ class AdjVectorDQM { } } } - p_energies[si] = current_sample_energy; + energies[si] = current_sample_energy; } } } From bfea932314cf08cdde42a9521d76615af467972e Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 18 Jan 2021 15:25:08 -0800 Subject: [PATCH 12/36] Added declaration for adjvectordqm.h in cython --- dimod/discrete/cppdqm.pxd | 96 +++++++++++++++++++++++++++++ dimod/include/dimod/adjvectordqm.h | 98 ++++++++++++++++-------------- 2 files changed, 149 insertions(+), 45 deletions(-) create mode 100644 dimod/discrete/cppdqm.pxd diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd new file mode 100644 index 000000000..373b91388 --- /dev/null +++ b/dimod/discrete/cppdqm.pxd @@ -0,0 +1,96 @@ +# distutils: language = c++ +# cython: language_level=3 +# +# Copyright 2019 D-Wave Systems Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================= + +from libcpp cimport bool +from libcpp.pair cimport pair +from libcpp.vector cimport vector + +cimport numpy as np + +from dimod.bqm.common cimport VarIndex, Bias +from dimod.bqm.cppbqm cimport AdjVectorBQM as cppAdjVectorBQM + +cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: + + cdef cppclass AdjVectorDQM[V, B]: + ctypedef V variable_type + ctypedef B bias_type + ctypedef size_t size_type + + cdef cppAdjVectorBQM[variable_type, bias_type] bqm_ + vector[variable_type] case_starts_ + vector[vector[variable_type]] adj_ + + # constructors + # cython cannot handle templated constructors, so we call out the types + # explicitly + + AdjVectorDQM() except + + + # the actual signature is more general, but we already have a large + # number of these so we'll add them as needed + # AdjVectorDQM(variable_type* case_starts, size_type num_variables, bias_type* linear_biases, + # size_type num_cases, variable_type* irow, variable_type* icol, + # bias_type* quadratic_biases, size_type num_interactions) + AdjVectorDQM(np.uint32_t*, size_type, np.uint32_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint32_t*, size_type) + AdjVectorDQM(np.uint32_t*, size_type, np.uint64_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint64_t*, size_type) + AdjVectorDQM(np.uint32_t*, size_type, np.int32_t*, size_type, np.uint32_t*, np.uint32_t*, np.int32_t*, size_type) + AdjVectorDQM(np.uint32_t*, size_type, np.int64_t*, size_type, np.uint32_t*, np.uint32_t*, np.int64_t*, size_type) + AdjVectorDQM(np.uint32_t*, size_type, np.float32_t*, size_type, np.uint32_t*, np.uint32_t*, np.float32_t*, size_type) + AdjVectorDQM(np.uint32_t*, size_type, np.float64_t*, size_type, np.uint32_t*, np.uint32_t*, np.float64_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.uint32_t*, size_type, np.uint64_t*, np.uint64_t*, np.uint32_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.uint64_t*, size_type, np.uint64_t*, np.uint64_t*, np.uint64_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.int32_t*, size_type, np.uint64_t*, np.uint64_t*, np.int32_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.int64_t*, size_type, np.uint64_t*, np.uint64_t*, np.int64_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.float32_t*, size_type, np.uint64_t*, np.uint64_t*, np.float32_t*, size_type) + AdjVectorDQM(np.uint64_t*, size_type, np.float64_t*, size_type, np.uint64_t*, np.uint64_t*, np.float64_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.uint32_t*, size_type, np.int32_t*, np.int32_t*, np.uint32_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.uint64_t*, size_type, np.int32_t*, np.int32_t*, np.uint64_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.int32_t*, size_type, np.int32_t*, np.int32_t*, np.int32_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.int64_t*, size_type, np.int32_t*, np.int32_t*, np.int64_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.float32_t*, size_type, np.int32_t*, np.int32_t*, np.float32_t*, size_type) + AdjVectorDQM(np.int32_t*, size_type, np.float64_t*, size_type, np.int32_t*, np.int32_t*, np.float64_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.uint32_t*, size_type, np.int64_t*, np.int64_t*, np.uint32_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.uint64_t*, size_type, np.int64_t*, np.int64_t*, np.uint64_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.int32_t*, size_type, np.int64_t*, np.int64_t*, np.int32_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.int64_t*, size_type, np.int64_t*, np.int64_t*, np.int64_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.float32_t*, size_type, np.int64_t*, np.int64_t*, np.float32_t*, size_type) + AdjVectorDQM(np.int64_t*, size_type, np.float64_t*, size_type, np.int64_t*, np.int64_t*, np.float64_t*, size_type) + + + # methods + + bool is_self_loop_present() except + + size_type num_variables() except + + size_type num_variable_interactions() except + + size_type num_cases() except + + size_type num_case_interactions() except + + bias_type get_linear_case(variable_type, variable_type) except + + void set_linear_case(variable_type, variable_type, bias_type) except + + void get_linear(variable_type, bias_type*) except + + void set_linear(variable_type, bias_type*) except + + pair[bias_type, bool] get_quadratic_case(variable_type, variable_type, variable_type, variable_type) except + + bool set_quadratic_case(variable_type, variable_type, variable_type, variable_type, bias_type) except + + bool get_quadratic(variable_type, variable_type, bias_type*) except + + bool set_quadratic(variable_type, variable_type, bias_type*) except + + void get_energies(variable_type*, int, variable_type, bias_type*) except + + + # shapeable methods + + variable_type add_variable(variable_type) except + diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index bb5460e15..da7d83495 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -39,8 +39,8 @@ class AdjVectorDQM { AdjVectorBQM() { case_starts_.push_back(0); } - AdjVectorDQM(variable_type* case_starts, bias_type* linear_biases, - size_type num_variables, size_type num_cases, + AdjVectorDQM(variable_type* case_starts, size_type num_variables, + bias_type* linear_biases, size_type num_cases, variable_type* irow, variable_type* icol, bias_type* quadratic_biases, size_type num_interactions) { // Set the BQM, linear biases will be added separately. @@ -109,7 +109,7 @@ class AdjVectorDQM { return false; } - variable_type num_variables() { return adj_.size(); } + size_type num_variables() { return adj_.size(); } size_type num_variaables_interactions() { size_type num = 0; @@ -119,7 +119,7 @@ class AdjVectorDQM { return (num / 2); } - variable_type num_cases(variable_type v = -1) { + size_type num_cases(variable_type v = -1) { assert(v < this->num_variables()); if (v < 0) { return bqm_.num_variables(); @@ -141,6 +141,18 @@ class AdjVectorDQM { return v; } + bias_type get_linear_case(variable_type v, variable_type case_v) { + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < this->num_cases(v)); + return bqm_.get_linear(case_starts_[v] + case_v); + } + + void set_linear_case(variable_type v, variable_type case_v, bias_type b) { + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < this->num_cases(v)); + bqm_.set_linear(case_starts_[v] + case_v, b); + } + void get_linear(variable_type v, bias_type* biases) { assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); @@ -149,22 +161,48 @@ class AdjVectorDQM { } } - bias_type get_linear_case(variable_type v, variable_type case_v) { - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < this->num_cases(v)); - return bqm_.get_linear(case_starts_[v] + case_v); - } - void set_linear(variable_type v, bias_type* p_biases) { + assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { bqm_.set_linear(case_starts_[v] + case_v, p_biases[case_v]); } } - void set_linear_case(variable_type v, variable_type case_v, bias_type b) { + std::pair get_quadratic_case(variable_type u, + variable_type case_u, + variable_type v, + variable_type case_v) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < this->num_cases(u)); + assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < this->num_cases(v)); - bqm_.set_linear(case_starts_[v] + case_v, b); + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + return bqm_.get_quadratic(cu, cv); + } + + // Check if boolean type is still okay + bool set_quadratic_case(variable_type u, variable_type case_u, + variable_type v, variable_type case_v, + bias_type bias) { + assert(u >= 0 && u < this->num_variables()); + assert(case_u >= 0 && case_u < this->num_cases(u)); + assert(v >= 0 && v < this->num_variables()); + assert(case_v >= 0 && case_v < this->num_cases(v)); + if (u == v) { + return false; + } + auto cu = case_starts_[u] + case_u; + auto cv = case_starts_[v] + case_v; + bqm_.set_quadratic(cu, cv, bias); + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), + u); + } + return true; } // Returns false if there is no interaction among the variables. @@ -193,22 +231,12 @@ class AdjVectorDQM { return true; } - bias_type get_quadratic_case(variable_type u, variable_type case_u, - variable_type v, variable_type case_v) { - assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < this->num_cases(u)); - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < this->num_cases(v)); - // should add assert for u != v ? - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; - return bqm_.get_quadratic(cu, cv).first; - } - bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); - assert(u != v); + if (u == v) { + return false; + } auto num_cases_u = num_cases(u); auto num_cases_v = num_cases(v); // This cannot be parallelized since the vectors cannot be reshaped in @@ -230,26 +258,6 @@ class AdjVectorDQM { return true; } - // Check if boolean type is still okay - bool set_quadratic_case(variable_type u, variable_type case_u, - variable_type v, variable_type case_v, - bias_type bias) { - assert(u >= 0 && u < this->num_variables()); - assert(case_u >= 0 && case_u < this->num_cases(u)); - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < this->num_cases(v)); - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; - bqm_.set_quadratic(cu, cv, bias); - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), - u); - } - return true; - } - void get_energies(variable_type* samples, int num_samples, variable_type sample_length, bias_type* energies) { assert(sample_length == this->num_variables()); From ce9ed8b4562353e4d648b81f41d224d0eae80674 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 18 Jan 2021 19:21:35 -0800 Subject: [PATCH 13/36] Update the pxd file for dqm for type names --- dimod/discrete/cydiscrete_quadratic_model.pxd | 32 ++++++++----------- dimod/include/dimod/adjvectordqm.h | 2 +- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pxd b/dimod/discrete/cydiscrete_quadratic_model.pxd index 76f842fdc..9da671757 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pxd +++ b/dimod/discrete/cydiscrete_quadratic_model.pxd @@ -20,13 +20,11 @@ from libcpp.vector cimport vector cimport numpy as np -from dimod.bqm.cppbqm cimport AdjVectorBQM as cppAdjVectorBQM +from dimod.discrete.cppdqm cimport AdjVectorDQM as cppAdjVectorDQM from dimod.bqm.common cimport Integral32plus, Numeric, Numeric32plus - -ctypedef np.float64_t Bias -ctypedef np.int64_t CaseIndex -ctypedef np.int64_t VarIndex +ctypedef np.float64_t Bias_t +ctypedef np.int64_t VarIndex_t ctypedef fused Unsigned: np.uint8_t @@ -36,26 +34,24 @@ ctypedef fused Unsigned: cdef class cyDiscreteQuadraticModel: - cdef cppAdjVectorBQM[CaseIndex, Bias] bqm_ - cdef vector[CaseIndex] case_starts_ # len(adj_) + 1 - cdef vector[vector[VarIndex]] adj_ + cdef cppAdjVectorDQM[VarIndex_t, Bias_t] dqm_ cdef readonly object dtype - cdef readonly object case_dtype + cdef readonly object variable_dtype cpdef Py_ssize_t add_variable(self, Py_ssize_t) except -1 - cpdef Bias[:] energies(self, CaseIndex[:, :]) - cpdef Bias get_linear_case(self, VarIndex, CaseIndex) except? -45.3 + cpdef Bias_t[:] energies(self, VarIndex_t[:, :]) + cpdef Bias_t get_linear_case(self, VarIndex_t, VarIndex_t) except? -45.3 cpdef Py_ssize_t num_cases(self, Py_ssize_t v=*) except -1 cpdef Py_ssize_t num_case_interactions(self) cpdef Py_ssize_t num_variable_interactions(self) except -1 cpdef Py_ssize_t num_variables(self) - cpdef Py_ssize_t set_linear(self, VarIndex v, Numeric[:] biases) except -1 - cpdef Py_ssize_t set_linear_case(self, VarIndex, CaseIndex, Bias) except -1 + cpdef Py_ssize_t set_linear(self, VarIndex_t v, Numeric[:] biases) except -1 + cpdef Py_ssize_t set_linear_case(self, VarIndex_t, VarIndex_t, Bias_t) except -1 cpdef Py_ssize_t set_quadratic_case( - self, VarIndex, CaseIndex, VarIndex, CaseIndex, Bias) except -1 - cpdef Bias get_quadratic_case( - self, VarIndex, CaseIndex, VarIndex, CaseIndex) except? -45.3 + self, VarIndex_t, VarIndex_t, VarIndex_t, VarIndex_t, Bias_t) except -1 + cpdef Bias_t get_quadratic_case( + self, VarIndex_t, VarIndex_t, VarIndex_t, VarIndex_t) except? -45.3 - cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, - Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata) + cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias_t[:] ldata, + Unsigned[:] irow, Unsigned[:] icol, Bias_t[:] qdata) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index da7d83495..a07657e73 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -34,7 +34,7 @@ class AdjVectorDQM { using size_type = std::size_t; AdjVectorBQM bqm_; - std::vector case_starts_; + std::vector case_starts_; //len(adj_) + 1 std::vector> adj_; AdjVectorBQM() { case_starts_.push_back(0); } From a51c270568025f80a9229479f130a879703440a5 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 18 Jan 2021 21:14:56 -0800 Subject: [PATCH 14/36] Initial cython changes --- dimod/discrete/cydiscrete_quadratic_model.pxd | 24 +- dimod/discrete/cydiscrete_quadratic_model.pyx | 207 +++--------------- dimod/include/dimod/adjvectordqm.h | 16 +- 3 files changed, 50 insertions(+), 197 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pxd b/dimod/discrete/cydiscrete_quadratic_model.pxd index 9da671757..12d0fc1e2 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pxd +++ b/dimod/discrete/cydiscrete_quadratic_model.pxd @@ -23,8 +23,8 @@ cimport numpy as np from dimod.discrete.cppdqm cimport AdjVectorDQM as cppAdjVectorDQM from dimod.bqm.common cimport Integral32plus, Numeric, Numeric32plus -ctypedef np.float64_t Bias_t -ctypedef np.int64_t VarIndex_t +ctypedef np.float64_t Bias +ctypedef np.int64_t VarIndex ctypedef fused Unsigned: np.uint8_t @@ -34,24 +34,24 @@ ctypedef fused Unsigned: cdef class cyDiscreteQuadraticModel: - cdef cppAdjVectorDQM[VarIndex_t, Bias_t] dqm_ + cdef cppAdjVectorDQM[VarIndex, Bias] dqm_ cdef readonly object dtype cdef readonly object variable_dtype cpdef Py_ssize_t add_variable(self, Py_ssize_t) except -1 - cpdef Bias_t[:] energies(self, VarIndex_t[:, :]) - cpdef Bias_t get_linear_case(self, VarIndex_t, VarIndex_t) except? -45.3 + cpdef Bias[:] energies(self, VarIndex[:, :]) + cpdef Bias get_linear_case(self, VarIndex, VarIndex) except? -45.3 cpdef Py_ssize_t num_cases(self, Py_ssize_t v=*) except -1 cpdef Py_ssize_t num_case_interactions(self) cpdef Py_ssize_t num_variable_interactions(self) except -1 cpdef Py_ssize_t num_variables(self) - cpdef Py_ssize_t set_linear(self, VarIndex_t v, Numeric[:] biases) except -1 - cpdef Py_ssize_t set_linear_case(self, VarIndex_t, VarIndex_t, Bias_t) except -1 + cpdef Py_ssize_t set_linear(self, VarIndex v, Numeric[:] biases) except -1 + cpdef Py_ssize_t set_linear_case(self, VarIndex, VarIndex, Bias) except -1 cpdef Py_ssize_t set_quadratic_case( - self, VarIndex_t, VarIndex_t, VarIndex_t, VarIndex_t, Bias_t) except -1 - cpdef Bias_t get_quadratic_case( - self, VarIndex_t, VarIndex_t, VarIndex_t, VarIndex_t) except? -45.3 + self, VarIndex, VarIndex, VarIndex, VarIndex, Bias) except -1 + cpdef Bias get_quadratic_case( + self, VarIndex, VarIndex, VarIndex, VarIndex) except? -45.3 - cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias_t[:] ldata, - Unsigned[:] irow, Unsigned[:] icol, Bias_t[:] qdata) + cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, + Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 41687879b..361927e28 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -28,10 +28,8 @@ from dimod.utilities import asintegerarrays, asnumericarrays cdef class cyDiscreteQuadraticModel: def __init__(self): - self.case_starts_.push_back(0) - - self.dtype = np.float64 - self.case_dtype = np.int64 + self.bias_dtype = np.float64 + self.variable_dtype = np.int64 @property def adj(self): @@ -51,27 +49,15 @@ cdef class cyDiscreteQuadraticModel: if num_cases <= 0: raise ValueError("num_cases must be a positive integer") - cdef VarIndex v = self.adj_.size() # index of new variable - - self.adj_.resize(v+1) - - cdef Py_ssize_t i - for i in range(num_cases): - self.bqm_.add_variable() - - self.case_starts_.push_back(self.bqm_.num_variables()) - - return v + return self.dqm_.add_variable(num_cases) def copy(self): cdef cyDiscreteQuadraticModel dqm = type(self)() - dqm.bqm_ = self.bqm_ - dqm.case_starts_ = self.case_starts_ - dqm.adj_ = self.adj_ + dqm.dqm_ = self.dqm_ - dqm.dtype = self.dtype - dqm.case_dtype = self.dtype + dqm.bias_dtype = self.bias_dtype + dqm.variable_dtype = self.bias_dtype @cython.boundscheck(False) @cython.wraparound(False) @@ -83,7 +69,7 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t num_samples = samples.shape[0] cdef VarIndex num_variables = samples.shape[1] - cdef Bias[:] energies = np.zeros(num_samples, dtype=self.dtype) + cdef Bias[:] energies = np.empty(num_samples, dtype=self.bias_dtype) cdef Py_ssize_t si, vi cdef CaseIndex cu, case_u, cv, case_v @@ -94,27 +80,7 @@ cdef class cyDiscreteQuadraticModel: if case_u >= self.num_cases(u): raise ValueError("invalid case") - - cu = self.case_starts_[u] + case_u - - energies[si] += self.bqm_.get_linear(cu) - - for vi in range(self.adj_[u].size()): - v = self.adj_[u][vi] - - # we only care about the lower triangle - if v > u: - break - - case_v = samples[si, v] - - cv = self.case_starts_[v] + case_v - - out = self.bqm_.get_quadratic(cu, cv) - - if out.second: - energies[si] += out.first - + self.dqm_.get_energies(samples.data, num_samples, num_variables, energies.data) return energies @classmethod @@ -160,66 +126,9 @@ cdef class cyDiscreteQuadraticModel: if irow[qi] == icol[qi]: raise ValueError("quadratic data contains a self-loop") - cdef cyDiscreteQuadraticModel dqm = cls() - - # set the BQM - if num_interactions: - dqm.bqm_ = cppAdjVectorBQM[CaseIndex, Bias]( - &irow[0], &icol[0], &quadratic_biases[0], - num_interactions, True) - - # add the linear biases - while dqm.bqm_.num_variables() < num_cases: - dqm.bqm_.add_variable() - for ci in range(num_cases): - dqm.bqm_.set_linear(ci, linear_biases[ci]) - - # set the case starts - dqm.case_starts_.resize(case_starts.shape[0] + 1) - for v in range(case_starts.shape[0]): - dqm.case_starts_[v] = case_starts[v] - dqm.case_starts_[case_starts.shape[0]] = dqm.bqm_.num_variables() - - # and finally the adj. This is not really the memory bottleneck so - # we can build an intermediate (unordered) set version - cdef vector[unordered_set[VarIndex]] adjset - adjset.resize(num_variables) - u = 0 - for ci in range(dqm.bqm_.num_variables()): - - # we've been careful so don't need ui < case_starts.size() - 1 - while ci >= dqm.case_starts_[u+1]: - u += 1 - - span = dqm.bqm_.neighborhood(ci) - - v = 0 - while span.first != span.second: - cj = deref(span.first).first - - # see above note - while cj >= dqm.case_starts_[v+1]: - v += 1 - - adjset[u].insert(v) - - inc(span.first) - - # now put adjset into adj - dqm.adj_.resize(num_variables) - for v in range(num_variables): - dqm.adj_[v].insert(dqm.adj_[v].begin(), - adjset[v].begin(), adjset[v].end()) - sort(dqm.adj_[v].begin(), dqm.adj_[v].end()) - - # do one last final check for self-loops within a variable - for v in range(num_variables): - for ci in range(dqm.case_starts_[v], dqm.case_starts_[v+1]): - span2 = dqm.bqm_.neighborhood(ci, dqm.case_starts_[v]) - if span2.first == span2.second: - continue - if deref(span2.first).first < dqm.case_starts_[v+1]: - raise ValueError("A variable has a self-loop") + cdef cyDiscreteQuadraticModel dqm(case_starts.data, num_variables, linear_biases.data, num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) + if dqm_.is_self_loop_present(): + raise ValueError("A variable has a self-loop") return dqm @@ -252,13 +161,9 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t num_cases = self.num_cases(v) - biases = np.empty(num_cases, dtype=np.float64) + biases = np.empty(num_cases, dtype=self.bias_dtype) cdef Bias[:] biases_view = biases - - cdef Py_ssize_t c - for c in range(num_cases): - biases_view[c] = self.bqm_.get_linear(self.case_starts_[v] + c) - + self.dqm_.get_linear(v, biases.data) return biases @cython.boundscheck(False) @@ -271,7 +176,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case, self.num_cases(v))) - return self.bqm_.get_linear(self.case_starts_[v] + case) + return self.dqm_.get_linear(v, case) def get_quadratic(self, VarIndex u, VarIndex v, bint array=False): @@ -292,20 +197,10 @@ cdef class cyDiscreteQuadraticModel: if array: # build a numpy array quadratic = np.zeros((self.num_cases(u), self.num_cases(v)), - dtype=self.dtype) + dtype=self.bias_dtype) quadratic_view = quadratic - for ci in range(self.case_starts_[u], self.case_starts_[u+1]): - - span = self.bqm_.neighborhood(ci, self.case_starts_[v]) - - while (span.first != span.second and deref(span.first).first < self.case_starts_[v+1]): - case_u = ci - self.case_starts_[u] - case_v = deref(span.first).first - self.case_starts_[v] - quadratic_view[case_u, case_v] = deref(span.first).second - - inc(span.first) - + # TODO : REWRITE else: # store in a dict quadratic = {} @@ -339,11 +234,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case_v, self.num_cases(v))) - - cdef CaseIndex cu = self.case_starts_[u] + case_u - cdef CaseIndex cv = self.case_starts_[v] + case_v - - return self.bqm_.get_quadratic(cu, cv).first + return self.dqm_.get_quadratic(u, case_u, v, case_v).first @cython.boundscheck(False) @cython.wraparound(False) @@ -357,25 +248,21 @@ cdef class cyDiscreteQuadraticModel: if v >= self.num_variables(): raise ValueError("unknown variable {}".format(v)) - return self.case_starts_[v+1] - self.case_starts_[v] + return self.dqm_.num_cases(v) cpdef Py_ssize_t num_case_interactions(self): """The total number of case interactions.""" - return self.bqm_.num_interactions() + return self.dqm_.num_interactions() @cython.boundscheck(False) @cython.wraparound(False) cpdef Py_ssize_t num_variable_interactions(self) except -1: """The total number of case interactions.""" - cdef Py_ssize_t num = 0 - cdef Py_ssize_t v - for v in range(self.num_variables()): - num += self.adj_[v].size() - return num // 2 + return self.dqm_.num_variable_interactions() cpdef Py_ssize_t num_variables(self): """The number of discrete variables in the DQM.""" - return self.adj_.size() + return self.dqm_.num_variables() @cython.boundscheck(False) @cython.wraparound(False) @@ -389,7 +276,7 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t c for c in range(biases.shape[0]): - self.bqm_.set_linear(self.case_starts_[v] + c, biases[c]) + self.dqm_.set_linear_case(v, c, biases[c]) @cython.boundscheck(False) @cython.wraparound(False) @@ -403,7 +290,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case, self.num_cases(v))) - self.bqm_.set_linear(self.case_starts_[v] + case, b) + self.dqm_.set_linear_case(v, case, b) def set_quadratic(self, VarIndex u, VarIndex v, biases): @@ -435,32 +322,11 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case_v, self.num_cases(v))) - cu = self.case_starts_[u] + case_u - cv = self.case_starts_[v] + case_v - - self.bqm_.set_quadratic(cu, cv, bias) + self.dqm_.set_quadratic(u, case_u, v, case_v, bias) else: - biases_view = np.asarray(biases, dtype=self.dtype).reshape(num_cases_u, num_cases_v) - - for case_u in range(biases_view.shape[0]): - cu = self.case_starts_[u] + case_u - for case_v in range(biases_view.shape[1]): - cv = self.case_starts_[v] + case_v - - bias = biases_view[case_u, case_v] - - if bias: - self.bqm_.set_quadratic(cu, cv, bias) - - # track in adjacency - low = lower_bound(self.adj_[u].begin(), self.adj_[u].end(), v) - if low == self.adj_[u].end() or deref(low) != v: - # need to add - self.adj_[u].insert(low, v) - self.adj_[v].insert( - lower_bound(self.adj_[v].begin(), self.adj_[v].end(), u), - u) + biases_view = np.asarray(biases, dtype=self.bias_dtype).reshape(num_cases_u, num_cases_v) + self.dqm_.set_quadratic(u, v, biases_view.data) cpdef Py_ssize_t set_quadratic_case(self, VarIndex u, CaseIndex case_u, @@ -481,21 +347,8 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("there cannot be a quadratic interaction between " "two cases in the same variable") - - cdef CaseIndex cu = self.case_starts_[u] + case_u - cdef CaseIndex cv = self.case_starts_[v] + case_v - - self.bqm_.set_quadratic(cu, cv, bias) - - # track in adjacency - low = lower_bound(self.adj_[u].begin(), self.adj_[u].end(), v) - if low == self.adj_[u].end() or deref(low) != v: - # need to add - self.adj_[u].insert(low, v) - self.adj_[v].insert( - lower_bound(self.adj_[v].begin(), self.adj_[v].end(), u), - u) - + self.dqm_.set_quadratic_case(u, case_u, v, case_v, bias) + @cython.boundscheck(False) @cython.wraparound(False) cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, @@ -540,10 +393,10 @@ cdef class cyDiscreteQuadraticModel: index_dtype = np.uint64 starts = np.empty(num_variables, dtype=index_dtype) - ldata = np.empty(num_cases, dtype=self.dtype) + ldata = np.empty(num_cases, dtype=self.bias_dtype) irow = np.empty(num_interactions, dtype=index_dtype) icol = np.empty(num_interactions, dtype=index_dtype) - qdata = np.empty(num_interactions, dtype=self.dtype) + qdata = np.empty(num_interactions, dtype=self.bias_dtype) if index_dtype == np.uint16: self._into_numpy_vectors[np.uint16_t](starts, ldata, irow, icol, qdata) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index a07657e73..c348e843e 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -88,6 +88,7 @@ class AdjVectorDQM { } adj_.resize(num_variables); + #pragma omp parallel for for (auto v = 0; v < num_variables; v++) { adj_[v].insert(adj_[v].begin(), adjset[v].begin(), adjset[v].end()); std::sort(adj_[v].begin(), adj_[v].end()); @@ -119,13 +120,9 @@ class AdjVectorDQM { return (num / 2); } - size_type num_cases(variable_type v = -1) { - assert(v < this->num_variables()); - if (v < 0) { - return bqm_.num_variables(); - } else { - return (case_starts_[v + 1] - case_starts_[v]); - } + size_type num_cases(variable_type v) { + assert(v >= 0 && v < this->num_variables()); + return (case_starts_[v + 1] - case_starts_[v]); } size_type num_case_interactions() { return bqm_.num_interactions(); } @@ -246,7 +243,10 @@ class AdjVectorDQM { for (auto case_v = 0; case_v < num_cases_v; case_v++) { cv = case_starts_[v] + case_v; auto bias = p_biases[cu * num_cases_v + case_v]; - bqm_.set_quadratic(cu, cv, bias); + // TODO :Discuss with alexander, since we need to conditionally update adj_ + // if(bias != (bias_type) 0.0) { + bqm_.set_quadratic(cu, cv, bias); + // } } } auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); From 9e160c0277ff86aaa427cc72dd79905f21654bb2 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 13:48:14 -0800 Subject: [PATCH 15/36] Templated from numpy function --- dimod/include/dimod/adjvectordqm.h | 63 ++++++++++++++++-------------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index c348e843e..05e26f748 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -34,15 +34,16 @@ class AdjVectorDQM { using size_type = std::size_t; AdjVectorBQM bqm_; - std::vector case_starts_; //len(adj_) + 1 + std::vector case_starts_; // len(adj_) + 1 std::vector> adj_; AdjVectorBQM() { case_starts_.push_back(0); } - AdjVectorDQM(variable_type* case_starts, size_type num_variables, - bias_type* linear_biases, size_type num_cases, - variable_type* irow, variable_type* icol, - bias_type* quadratic_biases, size_type num_interactions) { + template + AdjVectorDQM(InputVariable_t *case_starts, size_type num_variables, + InputBias_t *linear_biases, size_type num_cases, + InputVariable_t *irow, InputVariable_t *icol, + InputBias_t *quadratic_biases, size_type num_interactions) { // Set the BQM, linear biases will be added separately. if (num_interactions) { bqm_ = AdjVectorBQM( @@ -88,7 +89,7 @@ class AdjVectorDQM { } adj_.resize(num_variables); - #pragma omp parallel for +#pragma omp parallel for for (auto v = 0; v < num_variables; v++) { adj_[v].insert(adj_[v].begin(), adjset[v].begin(), adjset[v].end()); std::sort(adj_[v].begin(), adj_[v].end()); @@ -112,7 +113,7 @@ class AdjVectorDQM { size_type num_variables() { return adj_.size(); } - size_type num_variaables_interactions() { + size_type num_variables_interactions() { size_type num = 0; for (auto v = 0, vend = this->num_variables(); v < vend; v++) { num += adj_[v].size(); @@ -150,7 +151,7 @@ class AdjVectorDQM { bqm_.set_linear(case_starts_[v] + case_v, b); } - void get_linear(variable_type v, bias_type* biases) { + void get_linear(variable_type v, bias_type *biases) { assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { @@ -158,7 +159,7 @@ class AdjVectorDQM { } } - void set_linear(variable_type v, bias_type* p_biases) { + void set_linear(variable_type v, bias_type *p_biases) { assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { @@ -193,18 +194,13 @@ class AdjVectorDQM { auto cu = case_starts_[u] + case_u; auto cv = case_starts_[v] + case_v; bqm_.set_quadratic(cu, cv, bias); - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), - u); - } + connect_variables(u, v); return true; } // Returns false if there is no interaction among the variables. bool get_quadratic(variable_type u, variable_type v, - bias_type* quadratic_biases) { + bias_type *quadratic_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); @@ -228,7 +224,7 @@ class AdjVectorDQM { return true; } - bool set_quadratic(variable_type u, variable_type v, bias_type* p_biases) { + bool set_quadratic(variable_type u, variable_type v, bias_type *p_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); if (u == v) { @@ -238,33 +234,32 @@ class AdjVectorDQM { auto num_cases_v = num_cases(v); // This cannot be parallelized since the vectors cannot be reshaped in // parallel. + bool inserted = false; for (auto case_u = 0; case_u < num_cases_u; case_u++) { cu = case_starts_[u] + case_u; for (auto case_v = 0; case_v < num_cases_v; case_v++) { cv = case_starts_[v] + case_v; auto bias = p_biases[cu * num_cases_v + case_v]; - // TODO :Discuss with alexander, since we need to conditionally update adj_ - // if(bias != (bias_type) 0.0) { - bqm_.set_quadratic(cu, cv, bias); - // } + if (bias != (bias_type)0.0) { + bqm_.set_quadratic(cu, cv, bias); + inserted = true; + } } } - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), - u); + + if (inserted) { + connect_variables(u, v); } return true; } - void get_energies(variable_type* samples, int num_samples, - variable_type sample_length, bias_type* energies) { + void get_energies(variable_type *samples, int num_samples, + variable_type sample_length, bias_type *energies) { assert(sample_length == this->num_variables()); auto num_variables = sample_length; #pragma omp parallel for for (auto si = 0; si < num_samples; si++) { - variable_type* p_curr_sample = samples + (si * num_variables); + variable_type *p_curr_sample = samples + (si * num_variables); double current_sample_energy = 0; for (auto u = 0; u < num_variables; u++) { auto case_u = p_curr_sample[u]; @@ -288,6 +283,16 @@ class AdjVectorDQM { energies[si] = current_sample_energy; } } + + private: + void connect_variables(variable_type u, variable_type v) { + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), + u); + } + } } } // namespace dimod From 6d76a938fc49beccb95642f19855d250fe68c2cd Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 15:29:15 -0800 Subject: [PATCH 16/36] Templated the functions. --- dimod/discrete/cppdqm.pxd | 10 ++++---- dimod/include/dimod/adjvectordqm.h | 39 +++++++++++++++++------------- 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 373b91388..722548381 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -83,13 +83,13 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: size_type num_case_interactions() except + bias_type get_linear_case(variable_type, variable_type) except + void set_linear_case(variable_type, variable_type, bias_type) except + - void get_linear(variable_type, bias_type*) except + - void set_linear(variable_type, bias_type*) except + + void get_linear[io_bias_type](variable_type, io_bias_type*) except + + void set_linear[io_bias_type](variable_type, io_bias_type*) except + pair[bias_type, bool] get_quadratic_case(variable_type, variable_type, variable_type, variable_type) except + bool set_quadratic_case(variable_type, variable_type, variable_type, variable_type, bias_type) except + - bool get_quadratic(variable_type, variable_type, bias_type*) except + - bool set_quadratic(variable_type, variable_type, bias_type*) except + - void get_energies(variable_type*, int, variable_type, bias_type*) except + + bool get_quadratic[io_bias_type](variable_type, variable_type, bias_type*) except + + bool set_quadratic[io_bias_type](variable_type, variable_type, bias_type*) except + + void get_energies[io_variable_type, io_bias_type](variable_type*, int, variable_type, bias_type*) except + # shapeable methods diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 05e26f748..b142ae1aa 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -39,11 +39,11 @@ class AdjVectorDQM { AdjVectorBQM() { case_starts_.push_back(0); } - template - AdjVectorDQM(InputVariable_t *case_starts, size_type num_variables, - InputBias_t *linear_biases, size_type num_cases, - InputVariable_t *irow, InputVariable_t *icol, - InputBias_t *quadratic_biases, size_type num_interactions) { + template + AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, + io_bias_type *linear_biases, size_type num_cases, + io_variable_type *irow, io_variable_type *icol, + io_bias_type *quadratic_biases, size_type num_interactions) { // Set the BQM, linear biases will be added separately. if (num_interactions) { bqm_ = AdjVectorBQM( @@ -151,7 +151,8 @@ class AdjVectorDQM { bqm_.set_linear(case_starts_[v] + case_v, b); } - void get_linear(variable_type v, bias_type *biases) { + template + void get_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { @@ -159,11 +160,12 @@ class AdjVectorDQM { } } - void set_linear(variable_type v, bias_type *p_biases) { + template + void set_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { - bqm_.set_linear(case_starts_[v] + case_v, p_biases[case_v]); + bqm_.set_linear(case_starts_[v] + case_v, biases[case_v]); } } @@ -199,8 +201,9 @@ class AdjVectorDQM { } // Returns false if there is no interaction among the variables. + template bool get_quadratic(variable_type u, variable_type v, - bias_type *quadratic_biases) { + io_bias_type *quadratic_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); @@ -224,7 +227,8 @@ class AdjVectorDQM { return true; } - bool set_quadratic(variable_type u, variable_type v, bias_type *p_biases) { + template + bool set_quadratic(variable_type u, variable_type v, io_bias_type *biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); if (u == v) { @@ -239,8 +243,8 @@ class AdjVectorDQM { cu = case_starts_[u] + case_u; for (auto case_v = 0; case_v < num_cases_v; case_v++) { cv = case_starts_[v] + case_v; - auto bias = p_biases[cu * num_cases_v + case_v]; - if (bias != (bias_type)0.0) { + auto bias = biases[cu * num_cases_v + case_v]; + if (bias != (io_bias_type)0) { bqm_.set_quadratic(cu, cv, bias); inserted = true; } @@ -253,16 +257,17 @@ class AdjVectorDQM { return true; } - void get_energies(variable_type *samples, int num_samples, - variable_type sample_length, bias_type *energies) { + template + void get_energies(io_variable_type *samples, int num_samples, + variable_type sample_length, io_bias_type *energies) { assert(sample_length == this->num_variables()); auto num_variables = sample_length; #pragma omp parallel for for (auto si = 0; si < num_samples; si++) { - variable_type *p_curr_sample = samples + (si * num_variables); + variable_type *current_sample = samples + (si * num_variables); double current_sample_energy = 0; for (auto u = 0; u < num_variables; u++) { - auto case_u = p_curr_sample[u]; + auto case_u = current_sample[u]; assert(case_u < num_cases(u)); auto cu = case_starts_[u] + case_u; current_sample_energy += bqm_.get_linear(cu); @@ -272,7 +277,7 @@ class AdjVectorDQM { if (v > u) { break; } - auto case_v = p_cur_sample[v]; + auto case_v = current_sample[v]; auto cv = case_starts_[v] + case_v; auto out = bqm_.get_quadratic(cu, cv); if (out.second) { From 89fef863245b660b5b6077d09599f483e9b8b0eb Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 15:53:42 -0800 Subject: [PATCH 17/36] Manual rebase to master --- dimod/discrete/cydiscrete_quadratic_model.pxd | 29 ++++++++++++++----- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pxd b/dimod/discrete/cydiscrete_quadratic_model.pxd index 12d0fc1e2..b2a8140ac 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pxd +++ b/dimod/discrete/cydiscrete_quadratic_model.pxd @@ -21,9 +21,10 @@ from libcpp.vector cimport vector cimport numpy as np from dimod.discrete.cppdqm cimport AdjVectorDQM as cppAdjVectorDQM -from dimod.bqm.common cimport Integral32plus, Numeric, Numeric32plus + ctypedef np.float64_t Bias +ctypedef np.int64_t CaseIndex ctypedef np.int64_t VarIndex ctypedef fused Unsigned: @@ -32,26 +33,40 @@ ctypedef fused Unsigned: np.uint32_t np.uint64_t +ctypedef fused Integral: + Unsigned + np.int8_t + np.int16_t + np.int32_t + np.int64_t + +ctypedef fused Numeric: + Integral + np.float32_t + np.float64_t + cdef class cyDiscreteQuadraticModel: cdef cppAdjVectorDQM[VarIndex, Bias] dqm_ cdef readonly object dtype - cdef readonly object variable_dtype + cdef readonly object case_dtype cpdef Py_ssize_t add_variable(self, Py_ssize_t) except -1 - cpdef Bias[:] energies(self, VarIndex[:, :]) - cpdef Bias get_linear_case(self, VarIndex, VarIndex) except? -45.3 + cpdef Bias[:] energies(self, CaseIndex[:, :]) + cpdef Bias get_linear_case(self, VarIndex, CaseIndex) except? -45.3 cpdef Py_ssize_t num_cases(self, Py_ssize_t v=*) except -1 cpdef Py_ssize_t num_case_interactions(self) cpdef Py_ssize_t num_variable_interactions(self) except -1 cpdef Py_ssize_t num_variables(self) cpdef Py_ssize_t set_linear(self, VarIndex v, Numeric[:] biases) except -1 - cpdef Py_ssize_t set_linear_case(self, VarIndex, VarIndex, Bias) except -1 + cpdef Py_ssize_t set_linear_case(self, VarIndex, CaseIndex, Bias) except -1 cpdef Py_ssize_t set_quadratic_case( - self, VarIndex, VarIndex, VarIndex, VarIndex, Bias) except -1 + self, VarIndex, CaseIndex, VarIndex, CaseIndex, Bias) except -1 cpdef Bias get_quadratic_case( - self, VarIndex, VarIndex, VarIndex, VarIndex) except? -45.3 + self, VarIndex, CaseIndex, VarIndex, CaseIndex) except? -45.3 cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata) + cdef void _from_numpy_vectors(self, Integral[:] starts, Bias[:] ldata, + Integral[:] irow, Integral[:] icol, Bias[:] qdata) except * From 55809b06f5114e8623d6f7f0f586b9488ba11dde Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 17:14:22 -0800 Subject: [PATCH 18/36] Updated cydiscrete pyx file --- dimod/discrete/cydiscrete_quadratic_model.pyx | 45 +++++++++---------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 361927e28..9939aadfa 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -22,14 +22,12 @@ from libcpp.unordered_set cimport unordered_set import numpy as np -from dimod.utilities import asintegerarrays, asnumericarrays - cdef class cyDiscreteQuadraticModel: def __init__(self): - self.bias_dtype = np.float64 - self.variable_dtype = np.int64 + self.dtype = np.float64 + self.case_dtype = np.int64 @property def adj(self): @@ -56,8 +54,8 @@ cdef class cyDiscreteQuadraticModel: dqm.dqm_ = self.dqm_ - dqm.bias_dtype = self.bias_dtype - dqm.variable_dtype = self.bias_dtype + dqm.dtype = self.dtype + dqm.case_dtype = self.dtype @cython.boundscheck(False) @cython.wraparound(False) @@ -69,18 +67,19 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t num_samples = samples.shape[0] cdef VarIndex num_variables = samples.shape[1] - cdef Bias[:] energies = np.empty(num_samples, dtype=self.bias_dtype) + cdef Bias[:] energies = np.empty(num_samples, dtype=self.dtype) cdef Py_ssize_t si, vi cdef CaseIndex cu, case_u, cv, case_v cdef VarIndex u, v - for si in range(num_samples): # this could be parallelized + for si in range(num_samples): for u in range(num_variables): case_u = samples[si, u] if case_u >= self.num_cases(u): raise ValueError("invalid case") - self.dqm_.get_energies(samples.data, num_samples, num_variables, energies.data) + + self.dqm_.get_energies(&samples[0,0], num_samples, num_variables, &energies[0]) return energies @classmethod @@ -161,9 +160,8 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t num_cases = self.num_cases(v) - biases = np.empty(num_cases, dtype=self.bias_dtype) - cdef Bias[:] biases_view = biases - self.dqm_.get_linear(v, biases.data) + biases = np.empty(num_cases, dtype=self.dtype) + self.dqm_.get_linear(v, &biases[0]) return biases @cython.boundscheck(False) @@ -176,7 +174,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case, self.num_cases(v))) - return self.dqm_.get_linear(v, case) + return self.dqm_.get_linear_case(v, case) def get_quadratic(self, VarIndex u, VarIndex v, bint array=False): @@ -196,11 +194,10 @@ cdef class cyDiscreteQuadraticModel: if array: # build a numpy array - quadratic = np.zeros((self.num_cases(u), self.num_cases(v)), - dtype=self.bias_dtype) - quadratic_view = quadratic + quadratic = np.empty((self.num_cases(u), self.num_cases(v)), + dtype=self.dtype) + self.dqm_.get_quadratic(u, v, &quadratic[0,0]) - # TODO : REWRITE else: # store in a dict quadratic = {} @@ -274,9 +271,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError('Recieved {} bias(es) for a variable of degree {}' ''.format(biases.shape[0], self.num_cases(v))) - cdef Py_ssize_t c - for c in range(biases.shape[0]): - self.dqm_.set_linear_case(v, c, biases[c]) + self.dqm_.set_linear(v, &biases[0]) @cython.boundscheck(False) @cython.wraparound(False) @@ -322,11 +317,11 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case_v, self.num_cases(v))) - self.dqm_.set_quadratic(u, case_u, v, case_v, bias) + self.dqm_.set_quadratic_case(u, case_u, v, case_v, bias) else: - biases_view = np.asarray(biases, dtype=self.bias_dtype).reshape(num_cases_u, num_cases_v) - self.dqm_.set_quadratic(u, v, biases_view.data) + biases_view = np.asarray(biases, dtype=self.dtype).reshape(num_cases_u, num_cases_v) + self.dqm_.set_quadratic(u, v, &biases_view[0,0]) cpdef Py_ssize_t set_quadratic_case(self, VarIndex u, CaseIndex case_u, @@ -393,10 +388,10 @@ cdef class cyDiscreteQuadraticModel: index_dtype = np.uint64 starts = np.empty(num_variables, dtype=index_dtype) - ldata = np.empty(num_cases, dtype=self.bias_dtype) + ldata = np.empty(num_cases, dtype=self.dtype) irow = np.empty(num_interactions, dtype=index_dtype) icol = np.empty(num_interactions, dtype=index_dtype) - qdata = np.empty(num_interactions, dtype=self.bias_dtype) + qdata = np.empty(num_interactions, dtype=self.dtype) if index_dtype == np.uint16: self._into_numpy_vectors[np.uint16_t](starts, ldata, irow, icol, qdata) From aa12593ef73b9ff8e64aea113e118ac9c4b38aa3 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 17:32:01 -0800 Subject: [PATCH 19/36] Fix pxd file --- dimod/discrete/cydiscrete_quadratic_model.pxd | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pxd b/dimod/discrete/cydiscrete_quadratic_model.pxd index b2a8140ac..2a4d8e127 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pxd +++ b/dimod/discrete/cydiscrete_quadratic_model.pxd @@ -21,6 +21,7 @@ from libcpp.vector cimport vector cimport numpy as np from dimod.discrete.cppdqm cimport AdjVectorDQM as cppAdjVectorDQM +from dimod.bqm.common cimport Integral32plus, Numeric, Numeric32plus ctypedef np.float64_t Bias @@ -33,18 +34,6 @@ ctypedef fused Unsigned: np.uint32_t np.uint64_t -ctypedef fused Integral: - Unsigned - np.int8_t - np.int16_t - np.int32_t - np.int64_t - -ctypedef fused Numeric: - Integral - np.float32_t - np.float64_t - cdef class cyDiscreteQuadraticModel: cdef cppAdjVectorDQM[VarIndex, Bias] dqm_ @@ -68,5 +57,3 @@ cdef class cyDiscreteQuadraticModel: cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata) - cdef void _from_numpy_vectors(self, Integral[:] starts, Bias[:] ldata, - Integral[:] irow, Integral[:] icol, Bias[:] qdata) except * From 42d03fedad77db252942de790bece76590f2a050 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Tue, 19 Jan 2021 18:29:09 -0800 Subject: [PATCH 20/36] Completed initial to_numpy_vectors --- dimod/discrete/cppdqm.pxd | 7 +- dimod/discrete/cydiscrete_quadratic_model.pyx | 42 +++------ dimod/include/dimod/adjvectordqm.h | 86 ++++++++++--------- 3 files changed, 64 insertions(+), 71 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 722548381..35d3fdc5a 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -87,9 +87,10 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: void set_linear[io_bias_type](variable_type, io_bias_type*) except + pair[bias_type, bool] get_quadratic_case(variable_type, variable_type, variable_type, variable_type) except + bool set_quadratic_case(variable_type, variable_type, variable_type, variable_type, bias_type) except + - bool get_quadratic[io_bias_type](variable_type, variable_type, bias_type*) except + - bool set_quadratic[io_bias_type](variable_type, variable_type, bias_type*) except + - void get_energies[io_variable_type, io_bias_type](variable_type*, int, variable_type, bias_type*) except + + bool get_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + + bool set_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + + void get_energies[io_variable_type, io_bias_type](io_variable_type*, int, variable_type, io_bias_type*) except + + void extract_data[io_variable_type, io_bias_type](variable_type*, io_bias_type*, io_variable_type*, io_variable_type*, io_bias_type*) except + # shapeable methods diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 9939aadfa..ba381110c 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -125,11 +125,14 @@ cdef class cyDiscreteQuadraticModel: if irow[qi] == icol[qi]: raise ValueError("quadratic data contains a self-loop") - cdef cyDiscreteQuadraticModel dqm(case_starts.data, num_variables, linear_biases.data, num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) - if dqm_.is_self_loop_present(): + cdef cyDiscreteQuadraticModel cyDQM = cls() + + cyDQM.dqm_ = cppAdjVectorDQM(case_starts.data, num_variables, linear_biases.data, num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) + + if cyDQM.dqm_.is_self_loop_present(): raise ValueError("A variable has a self-loop") - return dqm + return cyDQM @classmethod def from_numpy_vectors(cls, case_starts, linear_biases, quadratic): @@ -202,13 +205,13 @@ cdef class cyDiscreteQuadraticModel: # store in a dict quadratic = {} - for ci in range(self.case_starts_[u], self.case_starts_[u+1]): + for ci in range(self.dqm_.case_starts_[u], self.dqm_.case_starts_[u+1]): - span = self.bqm_.neighborhood(ci, self.case_starts_[v]) + span = self.dqm_.bqm_.neighborhood(ci, self.dqm_.case_starts_[v]) - while (span.first != span.second and deref(span.first).first < self.case_starts_[v+1]): - case_u = ci - self.case_starts_[u] - case_v = deref(span.first).first - self.case_starts_[v] + while (span.first != span.second and deref(span.first).first < self.dqm_.case_starts_[v+1]): + case_u = ci - self.dqm_.case_starts_[u] + case_v = deref(span.first).first - self.dqm_.case_starts_[v] quadratic[case_u, case_v] = deref(span.first).second inc(span.first) @@ -231,7 +234,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case_v, self.num_cases(v))) - return self.dqm_.get_quadratic(u, case_u, v, case_v).first + return self.dqm_.get_quadratic_case(u, case_u, v, case_v).first @cython.boundscheck(False) @cython.wraparound(False) @@ -350,26 +353,7 @@ cdef class cyDiscreteQuadraticModel: Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata): # we don't do array length checking so be careful! This can segfault # if the given arrays are incorrectly sized - - cdef Py_ssize_t vi - for vi in range(self.num_variables()): - starts[vi] = self.case_starts_[vi] - - cdef Py_ssize_t ci = 0 - cdef Py_ssize_t qi = 0 - for ci in range(self.bqm_.num_variables()): - ldata[ci] = self.bqm_.linear(ci) - - span = self.bqm_.neighborhood(ci) - while span.first != span.second and deref(span.first).first < ci: - - irow[qi] = ci - icol[qi] = deref(span.first).first - qdata[qi] = deref(span.first).second - - inc(span.first) - qi += 1 - + self.dqm_.extract_data(&starts[0], &ldata[0], &irow[0], &icol[0], &qdata[0]) def to_numpy_vectors(self): diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index b142ae1aa..10cd3186d 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -28,7 +28,7 @@ namespace dimod { template class AdjVectorDQM { - public: + public: using bias_type = B; using variable_type = V; using size_type = std::size_t; @@ -40,14 +40,12 @@ class AdjVectorDQM { AdjVectorBQM() { case_starts_.push_back(0); } template - AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, - io_bias_type *linear_biases, size_type num_cases, - io_variable_type *irow, io_variable_type *icol, - io_bias_type *quadratic_biases, size_type num_interactions) { + AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, io_bias_type *linear_biases, + size_type num_cases, io_variable_type *irow, io_variable_type *icol, io_bias_type *quadratic_biases, + size_type num_interactions) { // Set the BQM, linear biases will be added separately. if (num_interactions) { - bqm_ = AdjVectorBQM( - irow, icol, num_interactions, true); + bqm_ = AdjVectorBQM(irow, icol, num_interactions, true); } // Accounting for the cases/variables at the end without interaction. @@ -96,14 +94,35 @@ class AdjVectorDQM { } } + template + void extract_data(io_variable_type *case_starts, io_bias_type *linear_biases, io_variable_type *irow, + io_variable_type *icol, io_bias_type *quadratic_biases) { + auto num_variables = this->num_variables(); + auto num_total_cases = bqm_.num_variables(); + + for (auto v = 0; v < num_variables; v++) { + case_starts[v] = case_starts_[v]; + } + + size_type qi = 0; + for (auto ci = 0; ci < num_total_cases; ci++) { + linear_biases[ci] = bqm_.get_linear(ci); + auto span = bqm_.neighborhood(ci); + while ((span.first != span.second) && (*(span.first).first < ci)) { + irow[qi] = ci; + icol[qi] = *(span.first).first; + quadratic_biases[qi] = *(span.first).second; + span.first++; + qi++; + } + } + } + bool is_self_loop_present() { - for (auto v = 0, num_variables = this->num_variables(); - v < num_variables; v++) { - for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; - ci < ci_end; ci++) { + for (auto v = 0, num_variables = this->num_variables(); v < num_variables; v++) { + for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); - if ((span.first != span.second) && - (*(span.first).first < case_starts_[v + 1])) { + if ((span.first != span.second) && (*(span.first).first < case_starts_[v + 1])) { return true; } } @@ -151,27 +170,23 @@ class AdjVectorDQM { bqm_.set_linear(case_starts_[v] + case_v, b); } - template + template void get_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - for (auto case_v = 0, num_cases_v = this->num_cases(v); - case_v < num_cases_v; case_v++) { + for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); } } - template + template void set_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - for (auto case_v = 0, num_cases_v = this->num_cases(v); - case_v < num_cases_v; case_v++) { + for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { bqm_.set_linear(case_starts_[v] + case_v, biases[case_v]); } } - std::pair get_quadratic_case(variable_type u, - variable_type case_u, - variable_type v, + std::pair get_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < this->num_cases(u)); @@ -183,8 +198,7 @@ class AdjVectorDQM { } // Check if boolean type is still okay - bool set_quadratic_case(variable_type u, variable_type case_u, - variable_type v, variable_type case_v, + bool set_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v, bias_type bias) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < this->num_cases(u)); @@ -201,9 +215,8 @@ class AdjVectorDQM { } // Returns false if there is no interaction among the variables. - template - bool get_quadratic(variable_type u, variable_type v, - io_bias_type *quadratic_biases) { + template + bool get_quadratic(variable_type u, variable_type v, io_bias_type *quadratic_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); @@ -214,20 +227,17 @@ class AdjVectorDQM { auto num_cases_v = num_cases(v); #pragma omp parallel for for (auto case_u = 0; case_u < num_cases_u; case_u++) { - auto span = bqm_.neighborhood(case_starts_[u] + case_u, - case_starts_[v]); - while (span.first != span.second && - *(span.first) < case_starts_[v + 1]) { + auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); + while (span.first != span.second && *(span.first) < case_starts_[v + 1]) { case_v = *(span.first) - case_starts_[v]; - quadratic_biases[case_u * num_cases_v + case_v] = - *(span.first).second; + quadratic_biases[case_u * num_cases_v + case_v] = *(span.first).second; span.first++; } } return true; } - template + template bool set_quadratic(variable_type u, variable_type v, io_bias_type *biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); @@ -258,8 +268,7 @@ class AdjVectorDQM { } template - void get_energies(io_variable_type *samples, int num_samples, - variable_type sample_length, io_bias_type *energies) { + void get_energies(io_variable_type *samples, int num_samples, variable_type sample_length, io_bias_type *energies) { assert(sample_length == this->num_variables()); auto num_variables = sample_length; #pragma omp parallel for @@ -289,13 +298,12 @@ class AdjVectorDQM { } } - private: + private: void connect_variables(variable_type u, variable_type v) { auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); if (low == adj_[u].end() || *low != v) { adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), - u); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); } } } From 3ceafa9ff0ac00026c31fb8793c5a71096c0ffa4 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 20 Jan 2021 13:27:26 -0800 Subject: [PATCH 21/36] NFC & type fix --- dimod/discrete/cppdqm.pxd | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 35d3fdc5a..62a50a8f0 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -17,14 +17,14 @@ # # ============================================================================= +from dimod.bqm.cppbqm cimport AdjVectorBQM as cppAdjVectorBQM +from dimod.bqm.common cimport VarIndex, Bias from libcpp cimport bool from libcpp.pair cimport pair from libcpp.vector cimport vector cimport numpy as np -from dimod.bqm.common cimport VarIndex, Bias -from dimod.bqm.cppbqm cimport AdjVectorBQM as cppAdjVectorBQM cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: @@ -33,7 +33,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: ctypedef B bias_type ctypedef size_t size_type - cdef cppAdjVectorBQM[variable_type, bias_type] bqm_ + cppAdjVectorBQM[variable_type, bias_type] bqm_ vector[variable_type] case_starts_ vector[vector[variable_type]] adj_ @@ -47,7 +47,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: # number of these so we'll add them as needed # AdjVectorDQM(variable_type* case_starts, size_type num_variables, bias_type* linear_biases, # size_type num_cases, variable_type* irow, variable_type* icol, - # bias_type* quadratic_biases, size_type num_interactions) + # bias_type* quadratic_biases, size_type num_interactions) AdjVectorDQM(np.uint32_t*, size_type, np.uint32_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint32_t*, size_type) AdjVectorDQM(np.uint32_t*, size_type, np.uint64_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint64_t*, size_type) AdjVectorDQM(np.uint32_t*, size_type, np.int32_t*, size_type, np.uint32_t*, np.uint32_t*, np.int32_t*, size_type) @@ -73,14 +73,13 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: AdjVectorDQM(np.int64_t*, size_type, np.float32_t*, size_type, np.int64_t*, np.int64_t*, np.float32_t*, size_type) AdjVectorDQM(np.int64_t*, size_type, np.float64_t*, size_type, np.int64_t*, np.int64_t*, np.float64_t*, size_type) - # methods bool is_self_loop_present() except + size_type num_variables() except + - size_type num_variable_interactions() except + - size_type num_cases() except + - size_type num_case_interactions() except + + size_type num_variable_interactions() except + + size_type num_cases() except + + size_type num_case_interactions() except + bias_type get_linear_case(variable_type, variable_type) except + void set_linear_case(variable_type, variable_type, bias_type) except + void get_linear[io_bias_type](variable_type, io_bias_type*) except + @@ -89,8 +88,8 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: bool set_quadratic_case(variable_type, variable_type, variable_type, variable_type, bias_type) except + bool get_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + bool set_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + - void get_energies[io_variable_type, io_bias_type](io_variable_type*, int, variable_type, io_bias_type*) except + - void extract_data[io_variable_type, io_bias_type](variable_type*, io_bias_type*, io_variable_type*, io_variable_type*, io_bias_type*) except + + void get_energies[io_variable_type, io_bias_type](io_variable_type*, int, variable_type, io_bias_type*) except + + void extract_data[io_variable_type, io_bias_type](io_variable_type*, io_bias_type*, io_variable_type*, io_variable_type*, io_bias_type*) except + # shapeable methods From 6fb1510ab4bf1ab1ed43620d84e4f4bd10ad2ac0 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 20 Jan 2021 13:41:25 -0800 Subject: [PATCH 22/36] Fix bug missing arg --- dimod/discrete/cppdqm.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 62a50a8f0..80875e141 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -78,7 +78,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: bool is_self_loop_present() except + size_type num_variables() except + size_type num_variable_interactions() except + - size_type num_cases() except + + size_type num_cases(variable_type) except + size_type num_case_interactions() except + bias_type get_linear_case(variable_type, variable_type) except + void set_linear_case(variable_type, variable_type, bias_type) except + From 946d5083900f125194d6442ece51af5a764e9d90 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 20 Jan 2021 17:21:17 -0800 Subject: [PATCH 23/36] NFC and add new function to test connection --- dimod/include/dimod/adjvectordqm.h | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 10cd3186d..81a3ecc05 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -118,7 +118,7 @@ class AdjVectorDQM { } } - bool is_self_loop_present() { + bool self_loop_present() { for (auto v = 0, num_variables = this->num_variables(); v < num_variables; v++) { for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); @@ -130,6 +130,15 @@ class AdjVectorDQM { return false; } + bool connection_present(variable_type u, variable_type v) { + bool connected = true; + auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (it == adj_[u].end() || it != v) { + connected = false; + } + return connected; + } + size_type num_variables() { return adj_.size(); } size_type num_variables_interactions() { @@ -219,8 +228,7 @@ class AdjVectorDQM { bool get_quadratic(variable_type u, variable_type v, io_bias_type *quadratic_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); - auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (it == adj_[u].end() || *it != v) { + if (!connection_present(u, v)) { return false; } auto num_cases_u = num_cases(u); @@ -300,8 +308,7 @@ class AdjVectorDQM { private: void connect_variables(variable_type u, variable_type v) { - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (low == adj_[u].end() || *low != v) { + if (!connection_present(u, v)) { adj_[u].insert(low, v); adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); } From 622117a837a3a95b958fa16bb9f6213d6f943329 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 20 Jan 2021 17:26:17 -0800 Subject: [PATCH 24/36] Reflect changes in cpp file in pxd file --- dimod/discrete/cppdqm.pxd | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 80875e141..001a34e23 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -75,7 +75,8 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: # methods - bool is_self_loop_present() except + + bool self_loop_present() except + + bool connection_present(variable_type, variable_type) except + size_type num_variables() except + size_type num_variable_interactions() except + size_type num_cases(variable_type) except + From 194d4e8ec5129db6f4f7a7f792569c4b894baf19 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Wed, 20 Jan 2021 18:09:42 -0800 Subject: [PATCH 25/36] Fixed most cython compilation errors --- dimod/discrete/cydiscrete_quadratic_model.pyx | 66 ++++++++++--------- 1 file changed, 36 insertions(+), 30 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index ba381110c..6c16cb01b 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -22,6 +22,8 @@ from libcpp.unordered_set cimport unordered_set import numpy as np +from dimod.utilities import asintegerarrays, asnumericarrays + cdef class cyDiscreteQuadraticModel: @@ -31,7 +33,7 @@ cdef class cyDiscreteQuadraticModel: @property def adj(self): - return self.adj_ + return self.dqm_.adj_ cpdef Py_ssize_t add_variable(self, Py_ssize_t num_cases) except -1: """Add a discrete variable. @@ -47,7 +49,7 @@ cdef class cyDiscreteQuadraticModel: if num_cases <= 0: raise ValueError("num_cases must be a positive integer") - return self.dqm_.add_variable(num_cases) + return self.dqm_.add_variable(num_cases) def copy(self): cdef cyDiscreteQuadraticModel dqm = type(self)() @@ -60,7 +62,7 @@ cdef class cyDiscreteQuadraticModel: @cython.boundscheck(False) @cython.wraparound(False) cpdef Bias[:] energies(self, CaseIndex[:, :] samples): - + if samples.shape[1] != self.num_variables(): raise ValueError("Given sample(s) have incorrect number of variables") @@ -79,7 +81,7 @@ cdef class cyDiscreteQuadraticModel: if case_u >= self.num_cases(u): raise ValueError("invalid case") - self.dqm_.get_energies(&samples[0,0], num_samples, num_variables, &energies[0]) + self.dqm_.get_energies( & samples[0, 0], num_samples, num_variables, & energies[0]) return energies @classmethod @@ -127,9 +129,10 @@ cdef class cyDiscreteQuadraticModel: cdef cyDiscreteQuadraticModel cyDQM = cls() - cyDQM.dqm_ = cppAdjVectorDQM(case_starts.data, num_variables, linear_biases.data, num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) + cyDQM.dqm_ = cppAdjVectorDQM(case_starts.data, num_variables, linear_biases.data, + num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) - if cyDQM.dqm_.is_self_loop_present(): + if cyDQM.dqm_.self_loop_present(): raise ValueError("A variable has a self-loop") return cyDQM @@ -164,7 +167,8 @@ cdef class cyDiscreteQuadraticModel: cdef Py_ssize_t num_cases = self.num_cases(v) biases = np.empty(num_cases, dtype=self.dtype) - self.dqm_.get_linear(v, &biases[0]) + cdef Bias[:] biases_view = biases + self.dqm_.get_linear(v, & biases_view[0]) return biases @cython.boundscheck(False) @@ -182,13 +186,12 @@ cdef class cyDiscreteQuadraticModel: def get_quadratic(self, VarIndex u, VarIndex v, bint array=False): # check that the interaction does in fact exist - if u < 0 or u >= self.adj_.size(): + if u < 0 or u >= self.num_variables(): raise ValueError("unknown variable") - if v < 0 or v >= self.adj_.size(): + if v < 0 or v >= self.num_variables(): raise ValueError("unknown variable") - it = lower_bound(self.adj_[u].begin(), self.adj_[u].end(), v) - if it == self.adj_[u].end() or deref(it) != v: + if not self.dqm_.connection_present(u,v): raise ValueError("there is no interaction between given variables") cdef CaseIndex ci @@ -199,7 +202,8 @@ cdef class cyDiscreteQuadraticModel: # build a numpy array quadratic = np.empty((self.num_cases(u), self.num_cases(v)), dtype=self.dtype) - self.dqm_.get_quadratic(u, v, &quadratic[0,0]) + quadratic_view = quadratic + self.dqm_.get_quadratic(u, v, & quadratic_view[0, 0]) else: # store in a dict @@ -207,11 +211,12 @@ cdef class cyDiscreteQuadraticModel: for ci in range(self.dqm_.case_starts_[u], self.dqm_.case_starts_[u+1]): - span = self.dqm_.bqm_.neighborhood(ci, self.dqm_.case_starts_[v]) + span = self.dqm_.bqm_.neighborhood( + ci, self.dqm_.case_starts_[v]) while (span.first != span.second and deref(span.first).first < self.dqm_.case_starts_[v+1]): case_u = ci - self.dqm_.case_starts_[u] - case_v = deref(span.first).first - self.dqm_.case_starts_[v] + case_v = deref(span.first).first - self.case_starts_[v] quadratic[case_u, case_v] = deref(span.first).second inc(span.first) @@ -224,7 +229,7 @@ cdef class cyDiscreteQuadraticModel: @cython.wraparound(False) cpdef Bias get_quadratic_case(self, VarIndex u, CaseIndex case_u, - VarIndex v, CaseIndex case_v) except? -45.3: + VarIndex v, CaseIndex case_v) except? -45.3: if case_u < 0 or case_u >= self.num_cases(u): raise ValueError("case {} is invalid, variable only supports {} " @@ -234,7 +239,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case_v, self.num_cases(v))) - return self.dqm_.get_quadratic_case(u, case_u, v, case_v).first + return self.dqm_.get_quadratic_case(u, case_u, v, case_v).first @cython.boundscheck(False) @cython.wraparound(False) @@ -243,7 +248,7 @@ cdef class cyDiscreteQuadraticModel: the total number of cases in the DQM. """ if v < 0: - return self.bqm_.num_variables() + return self.dqm_.bqm_.num_variables() if v >= self.num_variables(): raise ValueError("unknown variable {}".format(v)) @@ -252,13 +257,13 @@ cdef class cyDiscreteQuadraticModel: cpdef Py_ssize_t num_case_interactions(self): """The total number of case interactions.""" - return self.dqm_.num_interactions() + return self.dqm_.num_case_interactions() @cython.boundscheck(False) @cython.wraparound(False) cpdef Py_ssize_t num_variable_interactions(self) except -1: """The total number of case interactions.""" - return self.dqm_.num_variable_interactions() + return self.dqm_.num_variable_interactions() cpdef Py_ssize_t num_variables(self): """The number of discrete variables in the DQM.""" @@ -274,12 +279,12 @@ cdef class cyDiscreteQuadraticModel: raise ValueError('Recieved {} bias(es) for a variable of degree {}' ''.format(biases.shape[0], self.num_cases(v))) - self.dqm_.set_linear(v, &biases[0]) + self.dqm_.set_linear(v, & biases[0]) @cython.boundscheck(False) @cython.wraparound(False) cpdef Py_ssize_t set_linear_case(self, VarIndex v, CaseIndex case, Bias b) except -1: - + # self.num_cases checks that the variable is valid if case < 0: @@ -293,9 +298,9 @@ cdef class cyDiscreteQuadraticModel: def set_quadratic(self, VarIndex u, VarIndex v, biases): # check that the interaction does in fact exist - if u < 0 or u >= self.adj_.size(): + if u < 0 or u >= self.num_variables(): raise ValueError("unknown variable") - if v < 0 or v >= self.adj_.size(): + if v < 0 or v >= self.num_variables(): raise ValueError("unknown variable") if u == v: raise ValueError("there cannot be a quadratic interaction between " @@ -322,9 +327,10 @@ cdef class cyDiscreteQuadraticModel: self.dqm_.set_quadratic_case(u, case_u, v, case_v, bias) else: - - biases_view = np.asarray(biases, dtype=self.dtype).reshape(num_cases_u, num_cases_v) - self.dqm_.set_quadratic(u, v, &biases_view[0,0]) + + biases_view = np.asarray(biases, dtype=self.dtype).reshape( + num_cases_u, num_cases_v) + self.dqm_.set_quadratic(u, v, & biases_view[0, 0]) cpdef Py_ssize_t set_quadratic_case(self, VarIndex u, CaseIndex case_u, @@ -346,20 +352,20 @@ cdef class cyDiscreteQuadraticModel: "two cases in the same variable") self.dqm_.set_quadratic_case(u, case_u, v, case_v, bias) - + @cython.boundscheck(False) @cython.wraparound(False) cdef void _into_numpy_vectors(self, Unsigned[:] starts, Bias[:] ldata, Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata): # we don't do array length checking so be careful! This can segfault # if the given arrays are incorrectly sized - self.dqm_.extract_data(&starts[0], &ldata[0], &irow[0], &icol[0], &qdata[0]) + self.dqm_.extract_data( & starts[0], & ldata[0], & irow[0], & icol[0], & qdata[0]) def to_numpy_vectors(self): - + cdef Py_ssize_t num_variables = self.num_variables() cdef Py_ssize_t num_cases = self.num_cases() - cdef Py_ssize_t num_interactions = self.bqm_.num_interactions() + cdef Py_ssize_t num_interactions = self.num_case_interactions() # use the minimum sizes of the various index types. We combine for # variables and cases and exclude int8 to keep the total number of From 7cd854a4e7d76b492bb56690a0696d3e21c509f9 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 21 Jan 2021 12:53:11 -0800 Subject: [PATCH 26/36] Fixed a bunch of errors --- dimod/discrete/cppdqm.pxd | 1 + dimod/discrete/cydiscrete_quadratic_model.pyx | 5 ++- dimod/include/dimod/adjvectordqm.h | 42 ++++++++++++------- 3 files changed, 30 insertions(+), 18 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 001a34e23..5cec34239 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -42,6 +42,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: # explicitly AdjVectorDQM() except + + AdjVectorDQM(const AdjVectorDQM &) except + # the actual signature is more general, but we already have a large # number of these so we'll add them as needed diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 6c16cb01b..d0940536a 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -58,6 +58,7 @@ cdef class cyDiscreteQuadraticModel: dqm.dtype = self.dtype dqm.case_dtype = self.dtype + return dqm @cython.boundscheck(False) @cython.wraparound(False) @@ -129,8 +130,8 @@ cdef class cyDiscreteQuadraticModel: cdef cyDiscreteQuadraticModel cyDQM = cls() - cyDQM.dqm_ = cppAdjVectorDQM(case_starts.data, num_variables, linear_biases.data, - num_cases, irow.data, icol.data, quadratic_biases.data, num_interactions) + cyDQM.dqm_ = cppAdjVectorDQM[VarIndex, Bias](&case_starts[0], num_variables, &linear_biases[0], + num_cases, &irow[0], &icol[0], &quadratic_biases[0], num_interactions) if cyDQM.dqm_.self_loop_present(): raise ValueError("A variable has a self-loop") diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 81a3ecc05..b42d57bfb 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -37,7 +37,16 @@ class AdjVectorDQM { std::vector case_starts_; // len(adj_) + 1 std::vector> adj_; - AdjVectorBQM() { case_starts_.push_back(0); } + AdjVectorDQM() { case_starts_.push_back(0); } + + explicit AdjVectorDQM(const AdjVectorDQM& dqm) { + bqm_ = dqm.bqm_; + case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end()); + adj_.resize(dqm.adj_.size()); + for(auto v = 0; v < dqm.num_variables(); v++) { + adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), dqm.adj_[v].end()); + } + } template AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, io_bias_type *linear_biases, @@ -70,14 +79,14 @@ class AdjVectorDQM { std::vector> adjset; adjset.resize(num_variables); auto u = 0; - for (auto ci = 0, ci_end = bqm_.num_variables(); ci++) { + for (auto ci = 0, ci_end = bqm_.num_variables(); ci < ci_end; ci++) { while (ci >= case_starts_[u + 1]) { u++; } auto span = bqm_.neighborhood(ci); auto v = 0; while (span.first != span.second) { - auto cj = *(span.first).first; + auto cj = (span.first)->first; while (cj >= case_starts_[v + 1]) { v++; } @@ -108,10 +117,10 @@ class AdjVectorDQM { for (auto ci = 0; ci < num_total_cases; ci++) { linear_biases[ci] = bqm_.get_linear(ci); auto span = bqm_.neighborhood(ci); - while ((span.first != span.second) && (*(span.first).first < ci)) { + while ((span.first != span.second) && ((span.first)->first < ci)) { irow[qi] = ci; - icol[qi] = *(span.first).first; - quadratic_biases[qi] = *(span.first).second; + icol[qi] = (span.first)->first; + quadratic_biases[qi] = (span.first)->second; span.first++; qi++; } @@ -122,7 +131,7 @@ class AdjVectorDQM { for (auto v = 0, num_variables = this->num_variables(); v < num_variables; v++) { for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); - if ((span.first != span.second) && (*(span.first).first < case_starts_[v + 1])) { + if ((span.first != span.second) && ((span.first)->first < case_starts_[v + 1])) { return true; } } @@ -133,7 +142,7 @@ class AdjVectorDQM { bool connection_present(variable_type u, variable_type v) { bool connected = true; auto it = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (it == adj_[u].end() || it != v) { + if (it == adj_[u].end() || *it != v) { connected = false; } return connected; @@ -141,7 +150,7 @@ class AdjVectorDQM { size_type num_variables() { return adj_.size(); } - size_type num_variables_interactions() { + size_type num_variable_interactions() { size_type num = 0; for (auto v = 0, vend = this->num_variables(); v < vend; v++) { num += adj_[v].size(); @@ -236,9 +245,9 @@ class AdjVectorDQM { #pragma omp parallel for for (auto case_u = 0; case_u < num_cases_u; case_u++) { auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); - while (span.first != span.second && *(span.first) < case_starts_[v + 1]) { - case_v = *(span.first) - case_starts_[v]; - quadratic_biases[case_u * num_cases_v + case_v] = *(span.first).second; + while (span.first != span.second && (span.first)->first < case_starts_[v + 1]) { + auto case_v = (span.first)->first - case_starts_[v]; + quadratic_biases[case_u * num_cases_v + case_v] = (span.first)->second; span.first++; } } @@ -258,9 +267,9 @@ class AdjVectorDQM { // parallel. bool inserted = false; for (auto case_u = 0; case_u < num_cases_u; case_u++) { - cu = case_starts_[u] + case_u; + auto cu = case_starts_[u] + case_u; for (auto case_v = 0; case_v < num_cases_v; case_v++) { - cv = case_starts_[v] + case_v; + auto cv = case_starts_[v] + case_v; auto bias = biases[cu * num_cases_v + case_v]; if (bias != (io_bias_type)0) { bqm_.set_quadratic(cu, cv, bias); @@ -308,12 +317,13 @@ class AdjVectorDQM { private: void connect_variables(variable_type u, variable_type v) { - if (!connection_present(u, v)) { + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { adj_[u].insert(low, v); adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); } } -} +}; } // namespace dimod #endif // DIMOD_ADJVECTORDQM_H_ From 62bbd13c06ed69309b38d648fb9803bdb49e51dc Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 21 Jan 2021 13:05:50 -0800 Subject: [PATCH 27/36] Fixed bug --- dimod/include/dimod/.clang-format | 14 ++++++++++++++ dimod/include/dimod/adjvectordqm.h | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 dimod/include/dimod/.clang-format diff --git a/dimod/include/dimod/.clang-format b/dimod/include/dimod/.clang-format new file mode 100644 index 000000000..c0c35104f --- /dev/null +++ b/dimod/include/dimod/.clang-format @@ -0,0 +1,14 @@ +--- +Language: Cpp +BasedOnStyle: Google + +ColumnLimit: 120 +NamespaceIndentation: Inner + +# Scaled by a factor of 2 such that the base indent is 4 +AccessModifierOffset: -2 +ConstructorInitializerIndentWidth: 8 +ContinuationIndentWidth: 8 +IndentWidth: 4 +ObjCBlockIndentWidth: 4 +... diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index b42d57bfb..2e34f7e83 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -54,7 +54,7 @@ class AdjVectorDQM { size_type num_interactions) { // Set the BQM, linear biases will be added separately. if (num_interactions) { - bqm_ = AdjVectorBQM(irow, icol, num_interactions, true); + bqm_ = AdjVectorBQM(irow, icol, quadratic_biases, num_interactions, true); } // Accounting for the cases/variables at the end without interaction. From 49e476c654dbc731cbd55f30aaa695c09b331d8f Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 21 Jan 2021 15:05:21 -0800 Subject: [PATCH 28/36] Fix auto related errors --- dimod/include/dimod/adjvectordqm.h | 34 ++++++++++++++++++------------ 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 2e34f7e83..4364ee956 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -39,14 +39,15 @@ class AdjVectorDQM { AdjVectorDQM() { case_starts_.push_back(0); } - explicit AdjVectorDQM(const AdjVectorDQM& dqm) { - bqm_ = dqm.bqm_; - case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end()); - adj_.resize(dqm.adj_.size()); - for(auto v = 0; v < dqm.num_variables(); v++) { - adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), dqm.adj_[v].end()); - } - } + explicit AdjVectorDQM(const AdjVectorDQM &dqm) { + bqm_ = dqm.bqm_; + case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end()); + adj_.resize(dqm.adj_.size()); + auto num_variables = dqm.num_variables(); + for (auto v = 0; v < num_variables; v++) { + adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), dqm.adj_[v].end()); + } + } template AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, io_bias_type *linear_biases, @@ -79,7 +80,8 @@ class AdjVectorDQM { std::vector> adjset; adjset.resize(num_variables); auto u = 0; - for (auto ci = 0, ci_end = bqm_.num_variables(); ci < ci_end; ci++) { + auto num_total_cases = bqm_.num_variables(); + for (auto ci = 0; ci < num_total_cases; ci++) { while (ci >= case_starts_[u + 1]) { u++; } @@ -128,7 +130,8 @@ class AdjVectorDQM { } bool self_loop_present() { - for (auto v = 0, num_variables = this->num_variables(); v < num_variables; v++) { + auto num_variables = this->num_variables(); + for (auto v = 0; v < num_variables; v++) { for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); if ((span.first != span.second) && ((span.first)->first < case_starts_[v + 1])) { @@ -152,7 +155,8 @@ class AdjVectorDQM { size_type num_variable_interactions() { size_type num = 0; - for (auto v = 0, vend = this->num_variables(); v < vend; v++) { + auto num_variables = this->num_variables(); + for (auto v = 0; v < num_variables; v++) { num += adj_[v].size(); } return (num / 2); @@ -191,7 +195,8 @@ class AdjVectorDQM { template void get_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { + auto num_cases_v = this->num_cases(v); + for (auto case_v = 0; case_v < num_cases_v; case_v++) { biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); } } @@ -199,7 +204,8 @@ class AdjVectorDQM { template void set_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - for (auto case_v = 0, num_cases_v = this->num_cases(v); case_v < num_cases_v; case_v++) { + auto num_cases_v = this->num_cases(v); + for (auto case_v = 0; case_v < num_cases_v; case_v++) { bqm_.set_linear(case_starts_[v] + case_v, biases[case_v]); } } @@ -317,7 +323,7 @@ class AdjVectorDQM { private: void connect_variables(variable_type u, variable_type v) { - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); if (low == adj_[u].end() || *low != v) { adj_[u].insert(low, v); adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); From 86f8056bd87f9871454b0ef05d6e4b36660f699a Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 21 Jan 2021 19:24:35 -0800 Subject: [PATCH 29/36] Remove auto as they are not safe for immediate assignments --- dimod/include/dimod/.clang-format | 14 ----- dimod/include/dimod/adjvectordqm.h | 98 +++++++++++++++--------------- 2 files changed, 49 insertions(+), 63 deletions(-) delete mode 100644 dimod/include/dimod/.clang-format diff --git a/dimod/include/dimod/.clang-format b/dimod/include/dimod/.clang-format deleted file mode 100644 index c0c35104f..000000000 --- a/dimod/include/dimod/.clang-format +++ /dev/null @@ -1,14 +0,0 @@ ---- -Language: Cpp -BasedOnStyle: Google - -ColumnLimit: 120 -NamespaceIndentation: Inner - -# Scaled by a factor of 2 such that the base indent is 4 -AccessModifierOffset: -2 -ConstructorInitializerIndentWidth: 8 -ContinuationIndentWidth: 8 -IndentWidth: 4 -ObjCBlockIndentWidth: 4 -... diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 4364ee956..1dc3ae913 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -43,8 +43,8 @@ class AdjVectorDQM { bqm_ = dqm.bqm_; case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end()); adj_.resize(dqm.adj_.size()); - auto num_variables = dqm.num_variables(); - for (auto v = 0; v < num_variables; v++) { + variable_type num_variables = dqm.num_variables(); + for (variable_type v = 0; v < num_variables; v++) { adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), dqm.adj_[v].end()); } } @@ -65,13 +65,13 @@ class AdjVectorDQM { assert(bqm_.num_variables() == num_cases); // Add the linear biases. - for (auto ci = 0; ci < num_cases; ci++) { + for (variable_type ci = 0; ci < num_cases; ci++) { bqm_.set_linear(ci, linear_biases[ci]); } // Set the case starts. case_starts_.resize(num_variables + 1); - for (auto v = 0; v < num_variables; v++) { + for (variable_type v = 0; v < num_variables; v++) { case_starts_[v] = case_starts[v]; } case_starts[num_variables] = num_cases; @@ -79,16 +79,16 @@ class AdjVectorDQM { // Fill the adjacency list for variables. std::vector> adjset; adjset.resize(num_variables); - auto u = 0; - auto num_total_cases = bqm_.num_variables(); - for (auto ci = 0; ci < num_total_cases; ci++) { + variable_type u = 0; + variable_type num_total_cases = bqm_.num_variables(); + for (variable_type ci = 0; ci < num_total_cases; ci++) { while (ci >= case_starts_[u + 1]) { u++; } auto span = bqm_.neighborhood(ci); - auto v = 0; + variable_type v = 0; while (span.first != span.second) { - auto cj = (span.first)->first; + variable_type cj = (span.first)->first; while (cj >= case_starts_[v + 1]) { v++; } @@ -99,7 +99,7 @@ class AdjVectorDQM { adj_.resize(num_variables); #pragma omp parallel for - for (auto v = 0; v < num_variables; v++) { + for (variable_type v = 0; v < num_variables; v++) { adj_[v].insert(adj_[v].begin(), adjset[v].begin(), adjset[v].end()); std::sort(adj_[v].begin(), adj_[v].end()); } @@ -108,15 +108,15 @@ class AdjVectorDQM { template void extract_data(io_variable_type *case_starts, io_bias_type *linear_biases, io_variable_type *irow, io_variable_type *icol, io_bias_type *quadratic_biases) { - auto num_variables = this->num_variables(); - auto num_total_cases = bqm_.num_variables(); + variable_type num_variables = this->num_variables(); + variable_type num_total_cases = bqm_.num_variables(); - for (auto v = 0; v < num_variables; v++) { + for (variable_type v = 0; v < num_variables; v++) { case_starts[v] = case_starts_[v]; } size_type qi = 0; - for (auto ci = 0; ci < num_total_cases; ci++) { + for (variable_type ci = 0; ci < num_total_cases; ci++) { linear_biases[ci] = bqm_.get_linear(ci); auto span = bqm_.neighborhood(ci); while ((span.first != span.second) && ((span.first)->first < ci)) { @@ -130,9 +130,9 @@ class AdjVectorDQM { } bool self_loop_present() { - auto num_variables = this->num_variables(); - for (auto v = 0; v < num_variables; v++) { - for (auto ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { + variable_type num_variables = this->num_variables(); + for (variable_type v = 0; v < num_variables; v++) { + for (variable_type ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); if ((span.first != span.second) && ((span.first)->first < case_starts_[v + 1])) { return true; @@ -155,8 +155,8 @@ class AdjVectorDQM { size_type num_variable_interactions() { size_type num = 0; - auto num_variables = this->num_variables(); - for (auto v = 0; v < num_variables; v++) { + variable_type num_variables = this->num_variables(); + for (variable_type v = 0; v < num_variables; v++) { num += adj_[v].size(); } return (num / 2); @@ -171,9 +171,9 @@ class AdjVectorDQM { variable_type add_variable(variable_type num_cases) { assert(num_cases > 0); - auto v = adj_.size(); + variable_type v = adj_.size(); adj_.resize(v + 1); - for (auto n = 0; n < num_cases; n++) { + for (variable_type n = 0; n < num_cases; n++) { bqm_.add_variable(); } case_starts_.push_back(bqm_.num_variables()); @@ -195,8 +195,8 @@ class AdjVectorDQM { template void get_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - auto num_cases_v = this->num_cases(v); - for (auto case_v = 0; case_v < num_cases_v; case_v++) { + variable_type num_cases_v = this->num_cases(v); + for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); } } @@ -204,8 +204,8 @@ class AdjVectorDQM { template void set_linear(variable_type v, io_bias_type *biases) { assert(v >= 0 && v < this->num_variables()); - auto num_cases_v = this->num_cases(v); - for (auto case_v = 0; case_v < num_cases_v; case_v++) { + variable_type num_cases_v = this->num_cases(v); + for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { bqm_.set_linear(case_starts_[v] + case_v, biases[case_v]); } } @@ -216,8 +216,8 @@ class AdjVectorDQM { assert(case_u >= 0 && case_u < this->num_cases(u)); assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < this->num_cases(v)); - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; + variable_type cu = case_starts_[u] + case_u; + variable_type cv = case_starts_[v] + case_v; return bqm_.get_quadratic(cu, cv); } @@ -231,8 +231,8 @@ class AdjVectorDQM { if (u == v) { return false; } - auto cu = case_starts_[u] + case_u; - auto cv = case_starts_[v] + case_v; + variable_type cu = case_starts_[u] + case_u; + variable_type cv = case_starts_[v] + case_v; bqm_.set_quadratic(cu, cv, bias); connect_variables(u, v); return true; @@ -246,13 +246,13 @@ class AdjVectorDQM { if (!connection_present(u, v)) { return false; } - auto num_cases_u = num_cases(u); - auto num_cases_v = num_cases(v); + variable_type num_cases_u = num_cases(u); + variable_type num_cases_v = num_cases(v); #pragma omp parallel for - for (auto case_u = 0; case_u < num_cases_u; case_u++) { + for (variable_type case_u = 0; case_u < num_cases_u; case_u++) { auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); while (span.first != span.second && (span.first)->first < case_starts_[v + 1]) { - auto case_v = (span.first)->first - case_starts_[v]; + variable_type case_v = (span.first)->first - case_starts_[v]; quadratic_biases[case_u * num_cases_v + case_v] = (span.first)->second; span.first++; } @@ -267,17 +267,17 @@ class AdjVectorDQM { if (u == v) { return false; } - auto num_cases_u = num_cases(u); - auto num_cases_v = num_cases(v); + variable_type num_cases_u = num_cases(u); + variable_type num_cases_v = num_cases(v); // This cannot be parallelized since the vectors cannot be reshaped in // parallel. bool inserted = false; - for (auto case_u = 0; case_u < num_cases_u; case_u++) { - auto cu = case_starts_[u] + case_u; - for (auto case_v = 0; case_v < num_cases_v; case_v++) { - auto cv = case_starts_[v] + case_v; - auto bias = biases[cu * num_cases_v + case_v]; - if (bias != (io_bias_type)0) { + for (variable_type case_u = 0; case_u < num_cases_u; case_u++) { + variable_type cu = case_starts_[u] + case_u; + for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { + variable_type cv = case_starts_[v] + case_v; + bias_type bias = biases[cu * num_cases_v + case_v]; + if (bias != (bias_type) 0) { bqm_.set_quadratic(cu, cv, bias); inserted = true; } @@ -293,24 +293,24 @@ class AdjVectorDQM { template void get_energies(io_variable_type *samples, int num_samples, variable_type sample_length, io_bias_type *energies) { assert(sample_length == this->num_variables()); - auto num_variables = sample_length; + variable_type num_variables = sample_length; #pragma omp parallel for for (auto si = 0; si < num_samples; si++) { variable_type *current_sample = samples + (si * num_variables); double current_sample_energy = 0; - for (auto u = 0; u < num_variables; u++) { - auto case_u = current_sample[u]; + for (variable_type u = 0; u < num_variables; u++) { + variable_type case_u = current_sample[u]; assert(case_u < num_cases(u)); - auto cu = case_starts_[u] + case_u; + variable_type cu = case_starts_[u] + case_u; current_sample_energy += bqm_.get_linear(cu); - for (auto vi = 0; vi < adj_[u].size(); vi++) { - auto v = adj_[u][vi]; + for (variable_type vi = 0; vi < adj_[u].size(); vi++) { + variable_type v = adj_[u][vi]; // We only care about lower triangle. if (v > u) { break; } - auto case_v = current_sample[v]; - auto cv = case_starts_[v] + case_v; + variable_type case_v = current_sample[v]; + variable_type cv = case_starts_[v] + case_v; auto out = bqm_.get_quadratic(cu, cv); if (out.second) { current_sample_energy += out.first; From 6ad650dac0de0afd030d6c7ece7401e1f58148c7 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Thu, 21 Jan 2021 20:58:43 -0800 Subject: [PATCH 30/36] Fix bugs --- dimod/include/dimod/adjvectordqm.h | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 1dc3ae913..dc16125fc 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -248,6 +248,7 @@ class AdjVectorDQM { } variable_type num_cases_u = num_cases(u); variable_type num_cases_v = num_cases(v); + memset(quadratic_biases, 0, (size_t)num_cases_u * (size_t)num_cases_v * sizeof(io_bias_type)); #pragma omp parallel for for (variable_type case_u = 0; case_u < num_cases_u; case_u++) { auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); @@ -277,7 +278,7 @@ class AdjVectorDQM { for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { variable_type cv = case_starts_[v] + case_v; bias_type bias = biases[cu * num_cases_v + case_v]; - if (bias != (bias_type) 0) { + if (bias != (bias_type)0) { bqm_.set_quadratic(cu, cv, bias); inserted = true; } @@ -286,8 +287,10 @@ class AdjVectorDQM { if (inserted) { connect_variables(u, v); + return true; + } else { + return false; } - return true; } template From 9372616c098acba3b1f4b419456e9cc646f7d6a8 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Fri, 22 Jan 2021 19:26:59 -0800 Subject: [PATCH 31/36] Fix bugs --- dimod/discrete/cydiscrete_quadratic_model.pyx | 2 +- dimod/include/dimod/adjvectordqm.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index d0940536a..6c6770c76 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -217,7 +217,7 @@ cdef class cyDiscreteQuadraticModel: while (span.first != span.second and deref(span.first).first < self.dqm_.case_starts_[v+1]): case_u = ci - self.dqm_.case_starts_[u] - case_v = deref(span.first).first - self.case_starts_[v] + case_v = deref(span.first).first - self.dqm_.case_starts_[v] quadratic[case_u, case_v] = deref(span.first).second inc(span.first) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index dc16125fc..0a49a7ce8 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -74,7 +74,7 @@ class AdjVectorDQM { for (variable_type v = 0; v < num_variables; v++) { case_starts_[v] = case_starts[v]; } - case_starts[num_variables] = num_cases; + case_starts_[num_variables] = num_cases; // Fill the adjacency list for variables. std::vector> adjset; From 36ee4cf8932c65efca3fbebf213a799f424e82a9 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Sat, 23 Jan 2021 16:27:38 -0800 Subject: [PATCH 32/36] Fix bug --- dimod/include/dimod/adjvectordqm.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 0a49a7ce8..cb02ba483 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -277,7 +277,7 @@ class AdjVectorDQM { variable_type cu = case_starts_[u] + case_u; for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { variable_type cv = case_starts_[v] + case_v; - bias_type bias = biases[cu * num_cases_v + case_v]; + bias_type bias = biases[case_u * num_cases_v + case_v]; if (bias != (bias_type)0) { bqm_.set_quadratic(cu, cv, bias); inserted = true; From 34769c1313e331e51fb03896efddee9acdcf5115 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Sat, 23 Jan 2021 16:49:29 -0800 Subject: [PATCH 33/36] Fix bugs --- dimod/discrete/cppdqm.pxd | 1 + dimod/discrete/cydiscrete_quadratic_model.pyx | 2 + dimod/include/dimod/adjvectordqm.h | 72 ++++++++++++------- 3 files changed, 48 insertions(+), 27 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 5cec34239..35805e4c8 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -78,6 +78,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: bool self_loop_present() except + bool connection_present(variable_type, variable_type) except + + void connect_variables(variable_type, variable_type) except + size_type num_variables() except + size_type num_variable_interactions() except + size_type num_cases(variable_type) except + diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 6c6770c76..096c802d8 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -327,6 +327,8 @@ cdef class cyDiscreteQuadraticModel: "cases".format(case_v, self.num_cases(v))) self.dqm_.set_quadratic_case(u, case_u, v, case_v, bias) + + self.dqm_.connect_variables(u,v) else: biases_view = np.asarray(biases, dtype=self.dtype).reshape( diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index cb02ba483..2e7d043bf 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -28,7 +28,7 @@ namespace dimod { template class AdjVectorDQM { - public: + public: using bias_type = B; using variable_type = V; using size_type = std::size_t; @@ -41,21 +41,25 @@ class AdjVectorDQM { explicit AdjVectorDQM(const AdjVectorDQM &dqm) { bqm_ = dqm.bqm_; - case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end()); + case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), + dqm.case_starts_.end()); adj_.resize(dqm.adj_.size()); variable_type num_variables = dqm.num_variables(); for (variable_type v = 0; v < num_variables; v++) { - adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), dqm.adj_[v].end()); + adj_[v].insert(adj_[v].begin(), dqm.adj_[v].begin(), + dqm.adj_[v].end()); } } template - AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, io_bias_type *linear_biases, - size_type num_cases, io_variable_type *irow, io_variable_type *icol, io_bias_type *quadratic_biases, - size_type num_interactions) { + AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, + io_bias_type *linear_biases, size_type num_cases, + io_variable_type *irow, io_variable_type *icol, + io_bias_type *quadratic_biases, size_type num_interactions) { // Set the BQM, linear biases will be added separately. if (num_interactions) { - bqm_ = AdjVectorBQM(irow, icol, quadratic_biases, num_interactions, true); + bqm_ = AdjVectorBQM( + irow, icol, quadratic_biases, num_interactions, true); } // Accounting for the cases/variables at the end without interaction. @@ -106,7 +110,8 @@ class AdjVectorDQM { } template - void extract_data(io_variable_type *case_starts, io_bias_type *linear_biases, io_variable_type *irow, + void extract_data(io_variable_type *case_starts, + io_bias_type *linear_biases, io_variable_type *irow, io_variable_type *icol, io_bias_type *quadratic_biases) { variable_type num_variables = this->num_variables(); variable_type num_total_cases = bqm_.num_variables(); @@ -132,9 +137,12 @@ class AdjVectorDQM { bool self_loop_present() { variable_type num_variables = this->num_variables(); for (variable_type v = 0; v < num_variables; v++) { - for (variable_type ci = case_starts_[v], ci_end = case_starts_[v + 1]; ci < ci_end; ci++) { + for (variable_type ci = case_starts_[v], + ci_end = case_starts_[v + 1]; + ci < ci_end; ci++) { auto span = bqm_.neighborhood(ci, case_starts_[v]); - if ((span.first != span.second) && ((span.first)->first < case_starts_[v + 1])) { + if ((span.first != span.second) && + ((span.first)->first < case_starts_[v + 1])) { return true; } } @@ -151,6 +159,15 @@ class AdjVectorDQM { return connected; } + void connect_variables(variable_type u, variable_type v) { + auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); + if (low == adj_[u].end() || *low != v) { + adj_[u].insert(low, v); + adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), + u); + } + } + size_type num_variables() { return adj_.size(); } size_type num_variable_interactions() { @@ -210,7 +227,9 @@ class AdjVectorDQM { } } - std::pair get_quadratic_case(variable_type u, variable_type case_u, variable_type v, + std::pair get_quadratic_case(variable_type u, + variable_type case_u, + variable_type v, variable_type case_v) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < this->num_cases(u)); @@ -222,7 +241,8 @@ class AdjVectorDQM { } // Check if boolean type is still okay - bool set_quadratic_case(variable_type u, variable_type case_u, variable_type v, variable_type case_v, + bool set_quadratic_case(variable_type u, variable_type case_u, + variable_type v, variable_type case_v, bias_type bias) { assert(u >= 0 && u < this->num_variables()); assert(case_u >= 0 && case_u < this->num_cases(u)); @@ -240,7 +260,8 @@ class AdjVectorDQM { // Returns false if there is no interaction among the variables. template - bool get_quadratic(variable_type u, variable_type v, io_bias_type *quadratic_biases) { + bool get_quadratic(variable_type u, variable_type v, + io_bias_type *quadratic_biases) { assert(u >= 0 && u < this->num_variables()); assert(v >= 0 && v < this->num_variables()); if (!connection_present(u, v)) { @@ -248,13 +269,18 @@ class AdjVectorDQM { } variable_type num_cases_u = num_cases(u); variable_type num_cases_v = num_cases(v); - memset(quadratic_biases, 0, (size_t)num_cases_u * (size_t)num_cases_v * sizeof(io_bias_type)); + memset( + quadratic_biases, 0, + (size_t)num_cases_u * (size_t)num_cases_v * sizeof(io_bias_type)); #pragma omp parallel for for (variable_type case_u = 0; case_u < num_cases_u; case_u++) { - auto span = bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); - while (span.first != span.second && (span.first)->first < case_starts_[v + 1]) { + auto span = + bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); + while (span.first != span.second && + (span.first)->first < case_starts_[v + 1]) { variable_type case_v = (span.first)->first - case_starts_[v]; - quadratic_biases[case_u * num_cases_v + case_v] = (span.first)->second; + quadratic_biases[case_u * num_cases_v + case_v] = + (span.first)->second; span.first++; } } @@ -294,7 +320,8 @@ class AdjVectorDQM { } template - void get_energies(io_variable_type *samples, int num_samples, variable_type sample_length, io_bias_type *energies) { + void get_energies(io_variable_type *samples, int num_samples, + variable_type sample_length, io_bias_type *energies) { assert(sample_length == this->num_variables()); variable_type num_variables = sample_length; #pragma omp parallel for @@ -323,15 +350,6 @@ class AdjVectorDQM { energies[si] = current_sample_energy; } } - - private: - void connect_variables(variable_type u, variable_type v) { - auto low = std::lower_bound(adj_[u].begin(), adj_[u].end(), v); - if (low == adj_[u].end() || *low != v) { - adj_[u].insert(low, v); - adj_[v].insert(std::lower_bound(adj_[v].begin(), adj_[v].end(), u), u); - } - } }; } // namespace dimod From 5e324b32bb44bc25ee0f748fc943e0f5381f1787 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 25 Jan 2021 13:14:01 -0800 Subject: [PATCH 34/36] Addressed some review comments --- dimod/discrete/cppdqm.pxd | 4 +- dimod/discrete/cydiscrete_quadratic_model.pyx | 6 +- dimod/include/dimod/adjvectordqm.h | 69 +++++++++---------- 3 files changed, 37 insertions(+), 42 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index 35805e4c8..e26ef76b5 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -91,8 +91,8 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: bool set_quadratic_case(variable_type, variable_type, variable_type, variable_type, bias_type) except + bool get_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + bool set_quadratic[io_bias_type](variable_type, variable_type, io_bias_type*) except + - void get_energies[io_variable_type, io_bias_type](io_variable_type*, int, variable_type, io_bias_type*) except + - void extract_data[io_variable_type, io_bias_type](io_variable_type*, io_bias_type*, io_variable_type*, io_variable_type*, io_bias_type*) except + + double get_energy[io_variable_type](io_variable_type*) except + + void to_coo[io_variable_type, io_bias_type](io_variable_type*, io_bias_type*, io_variable_type*, io_variable_type*, io_bias_type*) except + # shapeable methods diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 096c802d8..3da8b5dd2 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -82,7 +82,9 @@ cdef class cyDiscreteQuadraticModel: if case_u >= self.num_cases(u): raise ValueError("invalid case") - self.dqm_.get_energies( & samples[0, 0], num_samples, num_variables, & energies[0]) + for si in range(num_samples): + energies[si] = self.dqm_.get_energy(& samples[si, 0]) + return energies @classmethod @@ -362,7 +364,7 @@ cdef class cyDiscreteQuadraticModel: Unsigned[:] irow, Unsigned[:] icol, Bias[:] qdata): # we don't do array length checking so be careful! This can segfault # if the given arrays are incorrectly sized - self.dqm_.extract_data( & starts[0], & ldata[0], & irow[0], & icol[0], & qdata[0]) + self.dqm_.to_coo( & starts[0], & ldata[0], & irow[0], & icol[0], & qdata[0]) def to_numpy_vectors(self): diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 2e7d043bf..1017b359e 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -15,7 +15,6 @@ #ifndef DIMOD_ADJVECTORDQM_H_ #define DIMOD_ADJVECTORDQM_H_ -#include #include #include #include @@ -59,7 +58,7 @@ class AdjVectorDQM { // Set the BQM, linear biases will be added separately. if (num_interactions) { bqm_ = AdjVectorBQM( - irow, icol, quadratic_biases, num_interactions, true); + irow, icol, quadratic_biases, num_interactions, true); } // Accounting for the cases/variables at the end without interaction. @@ -110,9 +109,9 @@ class AdjVectorDQM { } template - void extract_data(io_variable_type *case_starts, - io_bias_type *linear_biases, io_variable_type *irow, - io_variable_type *icol, io_bias_type *quadratic_biases) { + void to_coo(io_variable_type *case_starts, io_bias_type *linear_biases, + io_variable_type *irow, io_variable_type *icol, + io_bias_type *quadratic_biases) { variable_type num_variables = this->num_variables(); variable_type num_total_cases = bqm_.num_variables(); @@ -269,18 +268,18 @@ class AdjVectorDQM { } variable_type num_cases_u = num_cases(u); variable_type num_cases_v = num_cases(v); - memset( - quadratic_biases, 0, - (size_t)num_cases_u * (size_t)num_cases_v * sizeof(io_bias_type)); + memset(quadratic_biases, 0, + (size_t)num_cases_u * (size_t)num_cases_v * + sizeof(io_bias_type)); #pragma omp parallel for for (variable_type case_u = 0; case_u < num_cases_u; case_u++) { - auto span = - bqm_.neighborhood(case_starts_[u] + case_u, case_starts_[v]); + auto span = bqm_.neighborhood(case_starts_[u] + case_u, + case_starts_[v]); while (span.first != span.second && (span.first)->first < case_starts_[v + 1]) { variable_type case_v = (span.first)->first - case_starts_[v]; quadratic_biases[case_u * num_cases_v + case_v] = - (span.first)->second; + (span.first)->second; span.first++; } } @@ -319,36 +318,30 @@ class AdjVectorDQM { } } - template - void get_energies(io_variable_type *samples, int num_samples, - variable_type sample_length, io_bias_type *energies) { - assert(sample_length == this->num_variables()); - variable_type num_variables = sample_length; -#pragma omp parallel for - for (auto si = 0; si < num_samples; si++) { - variable_type *current_sample = samples + (si * num_variables); - double current_sample_energy = 0; - for (variable_type u = 0; u < num_variables; u++) { - variable_type case_u = current_sample[u]; - assert(case_u < num_cases(u)); - variable_type cu = case_starts_[u] + case_u; - current_sample_energy += bqm_.get_linear(cu); - for (variable_type vi = 0; vi < adj_[u].size(); vi++) { - variable_type v = adj_[u][vi]; - // We only care about lower triangle. - if (v > u) { - break; - } - variable_type case_v = current_sample[v]; - variable_type cv = case_starts_[v] + case_v; - auto out = bqm_.get_quadratic(cu, cv); - if (out.second) { - current_sample_energy += out.first; - } + template + double get_energy(io_variable_type *sample) { + variable_type num_variables = this->num_variables(); + double sample_energy = 0; + for (variable_type u = 0; u < num_variables; u++) { + variable_type case_u = sample[u]; + assert(case_u < num_cases(u)); + variable_type cu = case_starts_[u] + case_u; + sample_energy += bqm_.get_linear(cu); + for (variable_type vi = 0; vi < adj_[u].size(); vi++) { + variable_type v = adj_[u][vi]; + // We only care about lower triangle. + if (v > u) { + break; + } + variable_type case_v = sample[v]; + variable_type cv = case_starts_[v] + case_v; + auto out = bqm_.get_quadratic(cu, cv); + if (out.second) { + sample_energy += out.first; } } - energies[si] = current_sample_energy; } + return sample_energy; } }; } // namespace dimod From 2e5e4c8082aeb4c4114edbe1b4c0519812c5ca07 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 25 Jan 2021 13:44:36 -0800 Subject: [PATCH 35/36] Switched from get/set linear case to linear_case --- dimod/discrete/cppdqm.pxd | 13 +++++++------ dimod/discrete/cydiscrete_quadratic_model.pyx | 5 +++-- dimod/include/dimod/adjvectordqm.h | 14 ++++---------- 3 files changed, 14 insertions(+), 18 deletions(-) diff --git a/dimod/discrete/cppdqm.pxd b/dimod/discrete/cppdqm.pxd index e26ef76b5..4a21f715a 100644 --- a/dimod/discrete/cppdqm.pxd +++ b/dimod/discrete/cppdqm.pxd @@ -42,13 +42,15 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: # explicitly AdjVectorDQM() except + - AdjVectorDQM(const AdjVectorDQM &) except + + AdjVectorDQM(const AdjVectorDQM & ) except + # the actual signature is more general, but we already have a large # number of these so we'll add them as needed - # AdjVectorDQM(variable_type* case_starts, size_type num_variables, bias_type* linear_biases, - # size_type num_cases, variable_type* irow, variable_type* icol, - # bias_type* quadratic_biases, size_type num_interactions) + # template + # AdjVectorDQM(io_variable_type *case_starts, size_type num_variables, + # io_bias_type *linear_biases, size_type num_cases, + # io_variable_type *irow, io_variable_type *icol, + # io_bias_type *quadratic_biases, size_type num_interactions) AdjVectorDQM(np.uint32_t*, size_type, np.uint32_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint32_t*, size_type) AdjVectorDQM(np.uint32_t*, size_type, np.uint64_t*, size_type, np.uint32_t*, np.uint32_t*, np.uint64_t*, size_type) AdjVectorDQM(np.uint32_t*, size_type, np.int32_t*, size_type, np.uint32_t*, np.uint32_t*, np.int32_t*, size_type) @@ -83,8 +85,7 @@ cdef extern from "dimod/adjvectordqm.h" namespace "dimod" nogil: size_type num_variable_interactions() except + size_type num_cases(variable_type) except + size_type num_case_interactions() except + - bias_type get_linear_case(variable_type, variable_type) except + - void set_linear_case(variable_type, variable_type, bias_type) except + + bias_type & linear_case(variable_type, variable_type) except + void get_linear[io_bias_type](variable_type, io_bias_type*) except + void set_linear[io_bias_type](variable_type, io_bias_type*) except + pair[bias_type, bool] get_quadratic_case(variable_type, variable_type, variable_type, variable_type) except + diff --git a/dimod/discrete/cydiscrete_quadratic_model.pyx b/dimod/discrete/cydiscrete_quadratic_model.pyx index 3da8b5dd2..07840b63a 100644 --- a/dimod/discrete/cydiscrete_quadratic_model.pyx +++ b/dimod/discrete/cydiscrete_quadratic_model.pyx @@ -184,7 +184,7 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case, self.num_cases(v))) - return self.dqm_.get_linear_case(v, case) + return self.dqm_.linear_case(v, case) def get_quadratic(self, VarIndex u, VarIndex v, bint array=False): @@ -296,7 +296,8 @@ cdef class cyDiscreteQuadraticModel: raise ValueError("case {} is invalid, variable only supports {} " "cases".format(case, self.num_cases(v))) - self.dqm_.set_linear_case(v, case, b) + cdef Bias *bias_ptr = &(self.dqm_.linear_case(v, case)) + bias_ptr[0] = b def set_quadratic(self, VarIndex u, VarIndex v, biases): diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 1017b359e..297d755fa 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -196,16 +196,10 @@ class AdjVectorDQM { return v; } - bias_type get_linear_case(variable_type v, variable_type case_v) { + bias_type &linear_case(variable_type v, variable_type case_v) { assert(v >= 0 && v < this->num_variables()); assert(case_v >= 0 && case_v < this->num_cases(v)); - return bqm_.get_linear(case_starts_[v] + case_v); - } - - void set_linear_case(variable_type v, variable_type case_v, bias_type b) { - assert(v >= 0 && v < this->num_variables()); - assert(case_v >= 0 && case_v < this->num_cases(v)); - bqm_.set_linear(case_starts_[v] + case_v, b); + return bqm_.linear(case_starts_[v] + case_v); } template @@ -213,7 +207,7 @@ class AdjVectorDQM { assert(v >= 0 && v < this->num_variables()); variable_type num_cases_v = this->num_cases(v); for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { - biases[case_v] = bqm_.get_linear(case_starts_[v] + case_v); + biases[case_v] = bqm_.linear(case_starts_[v] + case_v); } } @@ -222,7 +216,7 @@ class AdjVectorDQM { assert(v >= 0 && v < this->num_variables()); variable_type num_cases_v = this->num_cases(v); for (variable_type case_v = 0; case_v < num_cases_v; case_v++) { - bqm_.set_linear(case_starts_[v] + case_v, biases[case_v]); + bqm_.linear(case_starts_[v] + case_v) = biases[case_v]; } } From 0e75ca9bf791cdd8d0f5f40651a48dcbf3b17172 Mon Sep 17 00:00:00 2001 From: Anil Mahmud Date: Mon, 25 Jan 2021 13:55:37 -0800 Subject: [PATCH 36/36] Made copy constructor templated --- dimod/include/dimod/adjvectordqm.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dimod/include/dimod/adjvectordqm.h b/dimod/include/dimod/adjvectordqm.h index 297d755fa..8fe197337 100644 --- a/dimod/include/dimod/adjvectordqm.h +++ b/dimod/include/dimod/adjvectordqm.h @@ -38,7 +38,8 @@ class AdjVectorDQM { AdjVectorDQM() { case_starts_.push_back(0); } - explicit AdjVectorDQM(const AdjVectorDQM &dqm) { + template + explicit AdjVectorDQM(const DQM &dqm) { bqm_ = dqm.bqm_; case_starts_.insert(case_starts_.begin(), dqm.case_starts_.begin(), dqm.case_starts_.end());