我正在尝试让 kamada kawai 弹簧布局在椭圆体表面上工作。我编写了一个 convex_topology 专业化,它实现了 GeographicLib 的测地线函数,然后我应用了一个网络(存储在 json 中)以及初始位置(同样存储)。
但是我的代码失败了。我不知道是不是因为拓扑定义得不好,还是我对 Boost 的 kamada kawai 布局的实现缺少了某些东西,或者是我对一切都期望太高了。
当前拓扑将点存储为纬度/经度双精度数,将 point_difference 存储为距离/方位对,最小/最大使用与北极的距离。使用 Geodesic 提供的 xyz 坐标(椭圆体上的表面)可能更好,但当我尝试时也失败了。
这是我正在使用的代码。
#include <algorithm>
#include <cassert>
#include <chrono>
#include <cmath>
#include <fstream>
#include <getopt.h>
#include <iostream>
#include <sstream>
#include <streambuf>
#include <string>
#include <unordered_map>
#include <vector>
#include <random>
#include <numbers>
#include <GeographicLib/Intersect.hpp>
#include <GeographicLib/Geodesic.hpp>
#include <GeographicLib/Constants.hpp>
#include <boost/graph/adjacency_list.hpp>
#include <boost/graph/kamada_kawai_spring_layout.hpp>
#include <boost/graph/random_layout.hpp>
#include "rapidjson/document.h"
#include "rapidjson/istreamwrapper.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"
#include "rapidjson/ostreamwrapper.h"
using namespace std;
using namespace rapidjson;
using namespace GeographicLib;
template <typename RandomNumberGenerator = minstd_rand > class geo_topology : public boost::convex_topology<2>
{
public:
typedef typename convex_topology<2>::point_type point_type;
typedef typename convex_topology<2>::point_difference_type point_difference_type;
typedef boost::uniform_01< RandomNumberGenerator, double > rand_t;
private:
shared_ptr< RandomNumberGenerator > gen_ptr;
shared_ptr< rand_t > rand;
Geodesic wgs84;
public:
explicit geo_topology(Geodesic& g) : gen_ptr(new RandomNumberGenerator) , rand(new rand_t(*gen_ptr)),wgs84(g) {}
geo_topology(RandomNumberGenerator& gen): gen_ptr(), rand(new rand_t(gen)),wgs84(Geodesic::WGS84()) {}
point_type random_point() const {
point_type p;
p[0] = fmod((*rand)(),180.0) - 90.0; // latitude.
p[1] = fmod((*rand)(),360.0) - 180.0; // longitude.
if (isnan(p[0]) or isnan(p[1])) {
std::cerr << "nan in random_point" << std::endl;
}
return p;
}
point_type bound(point_type a) const {
a[0] = fmod(a[0] + 90.0,180.0) - 90.0; // latitude.
a[1] = fmod(a[1] + 180.0,360.0) - 180.0; // longitude.
if (isnan(a[0]) or isnan(a[1])) {
std::cerr << "nan in bound" << std::endl;
}
return a;
}
double distance_from_boundary(point_type a) const {
return 1000.0; // really don't know if this means anything.
}
point_type center() const {
point_type result;
result[0] = 90.0; // latitude.
result[1] = 0.0;
return result;
}
point_type origin() const {
point_type result;
result[0] = 0.0; // latitude.
result[1] = 0.0;
return result;
}
point_difference_type extent() const {
point_difference_type result;
result[0] = wgs84.EquatorialRadius(); // dist.
result[1] = 360.0; // r_fwd_azi
return result;
}
double distance(point a, point b) const {
double dist = 0.0;
double r_fwd_azi, r_rev_azi;
wgs84.Inverse(a[0], a[1], b[0], b[1], dist, r_fwd_azi, r_rev_azi);
if (isnan(dist)) {
std::cerr << "dist shows nan distance" << std::endl;
}
return dist;
}
point move_position_toward(point a, double fraction, point b) const {
point c;
double dist = 0.0;
double r_fwd_azi, r_rev_azi;
wgs84.Inverse(a[0], a[1], b[0], b[1], dist, r_fwd_azi, r_rev_azi);
GeodesicLine gl = wgs84.Line(a[0], a[1], r_fwd_azi);
gl.Position(fraction * dist, c[0], c[1]);
if (isnan(c[0]) or isnan(c[1])) {
std::cerr << "nan in move_position_toward" << std::endl;
}
return c;
}
point_difference difference(point a, point b) const { // a-b
point_difference result;
double r_rev_azi;
wgs84.Inverse(a[0], a[1], b[0], b[1], result[0], result[1], r_rev_azi);
if (isnan(result[0]) or isnan(result[1])) {
std::cerr << "nan in point_difference" << std::endl;
}
return result;
}
point adjust(point a, point_difference delta) const {
point result; //point_difference = dist/fwd_azi
if (isnan(delta[0]) or isnan(delta[1])) {
// std::cerr << "nan in adjust.delta for point at " << a[0] << "," << a[1] << std::endl;
point b = random_point();
result = move_position_toward(a, 0.000001, b);
} else {
GeodesicLine gl = wgs84.Line(a[0], a[1], delta[1]);
gl.Position(delta[0], result[0], result[1]);
if (isnan(result[0]) or isnan(result[1])) {
std::cerr << "nan in adjust" << std::endl;
}
}
return result;
}
point pointwise_min(point a, point b) const {
point result;
double a_dist, b_dist = 0.0;
double r_fwd_azi, r_rev_azi;
wgs84.Inverse(90.0, 0.0, a[0], a[1], a_dist, r_fwd_azi, r_rev_azi);
wgs84.Inverse(90.0, 0.0, b[0], b[1], b_dist, r_fwd_azi, r_rev_azi);
result = a_dist <= b_dist ? a : b;
if (isnan(result[0]) or isnan(result[1])) {
std::cerr << "nan in pointwise_min" << std::endl;
}
return result;
}
point pointwise_max(point a, point b) const {
point result;
double a_dist, b_dist = 0.0;
double r_fwd_azi, r_rev_azi;
wgs84.Inverse(90.0, 0.0, a[0], a[1], a_dist, r_fwd_azi, r_rev_azi);
wgs84.Inverse(90.0, 0.0, b[0], b[1], b_dist, r_fwd_azi, r_rev_azi);
result = a_dist >= b_dist ? a : b;
if (isnan(result[0]) or isnan(result[1])) {
std::cerr << "nan in pointwise_max" << std::endl;
}
return result;
}
double norm(point_difference delta) const {
return delta[0] / wgs84.EquatorialRadius();
}
double volume(point_difference delta) const {
return delta[1];
}
};
struct ExampleSpherical {
static constexpr double r_mux = numbers::pi / 180.0;
static constexpr double d_mux = 180.0 / numbers::pi;
static constexpr double radius = 6371008.7714;
static constexpr double q_circ = radius * numbers::pi * 0.5;
using Topology = geo_topology<>;
using Point = Topology::point_type;
using EdgeWeightProperty = boost::property<boost::edge_weight_t, double> ;
using Graph = boost::adjacency_list<boost::vecS, boost::vecS, boost::undirectedS, //
boost::property<boost::vertex_index_t, int>, //
boost::property<boost::edge_index_t, int, EdgeWeightProperty> //
>;
size_t depth;
double line_length;
std::vector<unsigned long> vertices;
using PosVector = std::vector<Point>;
using vertex_id_t = unsigned long; //std::size_t;
map<vertex_id_t,set<vertex_id_t>> net;
map<vertex_id_t,Point> pos;
Geodesic geo;
Topology topo;
ExampleSpherical(size_t d) : depth(d),geo(Geodesic::WGS84()),topo(geo) {
line_length = q_circ / pow(3.0, double(depth+1));
}
void load_positions(const std::string filename) {
// Load latitude/longitude positions.
auto start = std::chrono::system_clock::now();
Document jpos;
ifstream pos_file(filename); // dict of node: pos
string pos_str((istreambuf_iterator<char>(pos_file)),istreambuf_iterator<char>());
jpos.Parse(pos_str.c_str());
if (!jpos.HasParseError()) {
auto n_end = jpos.MemberEnd();
for (auto p_it = jpos.MemberBegin(); p_it != n_end; ++p_it) {
unsigned long node = std::stoul (p_it->name.GetString());
Point p;
p[0] = p_it->value[0].GetDouble();
p[1] = p_it->value[1].GetDouble();
pos.insert({node,p});
}
}
for (auto& a : pos) {
for (auto & b : pos) {
if (a.first != b.first) {
auto dx = topo.difference(a.second, b.second);
if (dx[0] < line_length) {
line_length = dx[0];
}
if (dx[0] < 0.000001) {
cout << "found overlapping nodes";
exit(0);
}
}
}
}
cout << "Minimum Node Distance:" << line_length;
auto end = std::chrono::system_clock::now();
auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
cout << "position load took: " << ms << "ms\n";
}
void load_network(const std::string filename) {
auto start = std::chrono::system_clock::now();
Document jnet;
ifstream net_file(filename); // dict with list "nodes" = each= eg {"id"=0}, and list links=eg {"source": 0, "target":2}
string net_str((istreambuf_iterator<char>(net_file)),istreambuf_iterator<char>());
jnet.Parse(net_str.c_str());
if (!jnet.HasParseError()) {
rapidjson::Value::ConstValueIterator n_ite = jnet["nodes"].Begin();
rapidjson::Value::ConstValueIterator n_end = jnet["nodes"].End();
do {
net.insert({n_ite->GetObject()["id"].GetUint64(),{{},{}}});
n_ite++;
} while (n_ite != n_end);
n_ite = jnet["links"].Begin();
n_end = jnet["links"].End();
do {
unsigned long ia = n_ite->GetObject()["source"].GetUint64();
unsigned long ib = n_ite->GetObject()["target"].GetUint64();
net[ia].insert(ib);
net[ib].insert(ia);
n_ite++;
} while (n_ite != n_end);
}
auto end = std::chrono::system_clock::now();
auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
cout << "network load took: " << ms << "ms\n";
}
void validate_graph() {
deque<vertex_id_t> dx;
for (auto& node : net) {
if (pos.find(node.first) == pos.end()) {
dx.push_back(node.first);
}
}
for (auto& node : net) {
if (node.second.empty()) {
dx.push_back(node.first);
}
for (auto& q : node.second ) {
if (net.find(q) == net.end()) {
dx.push_back(node.first);
}
}
}
if (!dx.empty()) {
cerr << "files did not reconcile correctly";
}
}
void load_and_validate() {
ostringstream graph_file_str;
ostringstream graph_pos_str;
graph_file_str << "graph_" << depth << ".json";
graph_pos_str << "graph_ll_pos_" << depth << ".json";
load_network(graph_file_str.str());
load_positions(graph_pos_str.str());
validate_graph();
}
void save_positions(PosVector& final_pos) {
ostringstream result_pos_str;
result_pos_str << "result_ll_pos_" << depth << ".json";
auto start = std::chrono::system_clock::now();
Document jpos;
jpos.SetObject();
auto& alloc = jpos.GetAllocator();
for (auto& node: pos) {
ostringstream os;
os << node.first;
string id = os.str();
Value array(kArrayType);
array.PushBack(final_pos[node.first][0],alloc);
array.PushBack(final_pos[node.first][1],alloc);
auto j_id = Value(id.c_str(), alloc);
jpos.AddMember(j_id, array, alloc);
}
std::ofstream ofs { result_pos_str.str() };
if ( !ofs.is_open() ) {
std::cerr << "Could not open file for writing!\n";
}
OStreamWrapper osw { ofs };
Writer<OStreamWrapper> writer2 { osw };
jpos.Accept(writer2);
auto end = std::chrono::system_clock::now();
auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
cout << "position save took: " << ms << "ms\n";
}
struct kamada_kawai_done {
kamada_kawai_done() : last_delta() {}
template<typename Graph>
bool operator()(double delta_p,
typename boost::graph_traits<Graph>::vertex_descriptor /*p*/,
const Graph& /*g*/,
bool global)
{
if (global) {
double diff = last_delta - delta_p;
if (diff < 0) diff = -diff;
std::cout << "delta_p: " << delta_p << std::endl;
last_delta = delta_p;
return diff < 0.01;
} else {
return delta_p < 0.01;
}
}
double last_delta;
};
void evaluate() {
load_and_validate();
vertices.resize(net.size());
size_t i=0;
for (auto n: net) {
vertices[i] = n.first;
i++;
}
Graph g(this->vertices.size());
PosVector position(num_vertices(g));
for (auto n: net) {
auto& s = n.first;
for (auto d: n.second) {
if (d != s) {
add_edge(s, d, 1.0, g);
} else {
cout << "Weird. source and destinations " << s << " and " << d << " are the same. Skipping that." << endl;
}
}
position[s] = pos[s];
}
auto position_map = boost::make_iterator_property_map(position.begin(), get(boost::vertex_index, g));
boost::kamada_kawai_spring_layout(g, position_map, get(boost::edge_weight, g), topo, boost::edge_length(10.0), kamada_kawai_done());
save_positions(position);
}
};
int main() {
ExampleSpherical p(1);
p.evaluate();
}
graph_0.json
{"directed": false, "multigraph": false, "graph": {}, "nodes": [{"id": 0}, {"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}, {"id": 6}, {"id": 7}, {"id": 8}, {"id": 9}, {"id": 10}, {"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}, {"id": 16}, {"id": 17}, {"id": 18}, {"id": 19}, {"id": 20}, {"id": 21}, {"id": 22}, {"id": 23}, {"id": 24}, {"id": 25}, {"id": 26}, {"id": 27}, {"id": 28}, {"id": 29}, {"id": 30}, {"id": 31}, {"id": 32}, {"id": 33}, {"id": 34}, {"id": 35}, {"id": 36}, {"id": 37}], "links": [{"source": 0, "target": 2}, {"source": 0, "target": 1}, {"source": 0, "target": 4}, {"source": 0, "target": 3}, {"source": 1, "target": 7}, {"source": 1, "target": 3}, {"source": 1, "target": 5}, {"source": 1, "target": 2}, {"source": 1, "target": 6}, {"source": 2, "target": 4}, {"source": 2, "target": 8}, {"source": 2, "target": 5}, {"source": 2, "target": 10}, {"source": 3, "target": 9}, {"source": 3, "target": 11}, {"source": 3, "target": 6}, {"source": 3, "target": 4}, {"source": 4, "target": 9}, {"source": 4, "target": 8}, {"source": 4, "target": 12}, {"source": 5, "target": 7}, {"source": 5, "target": 14}, {"source": 5, "target": 16}, {"source": 5, "target": 10}, {"source": 6, "target": 15}, {"source": 6, "target": 17}, {"source": 6, "target": 11}, {"source": 6, "target": 7}, {"source": 7, "target": 15}, {"source": 7, "target": 13}, {"source": 7, "target": 14}, {"source": 8, "target": 10}, {"source": 8, "target": 20}, {"source": 8, "target": 12}, {"source": 8, "target": 22}, {"source": 9, "target": 21}, {"source": 9, "target": 12}, {"source": 9, "target": 11}, {"source": 9, "target": 23}, {"source": 10, "target": 20}, {"source": 10, "target": 16}, {"source": 10, "target": 18}, {"source": 11, "target": 17}, {"source": 11, "target": 19}, {"source": 11, "target": 21}, {"source": 12, "target": 22}, {"source": 12, "target": 24}, {"source": 12, "target": 23}, {"source": 13, "target": 14}, {"source": 13, "target": 25}, {"source": 13, "target": 15}, {"source": 14, "target": 28}, {"source": 14, "target": 16}, {"source": 14, "target": 25}, {"source": 15, "target": 25}, {"source": 15, "target": 29}, {"source": 15, "target": 17}, {"source": 16, "target": 26}, {"source": 16, "target": 28}, {"source": 16, "target": 18}, {"source": 17, "target": 27}, {"source": 17, "target": 29}, {"source": 17, "target": 19}, {"source": 18, "target": 26}, {"source": 18, "target": 20}, {"source": 19, "target": 27}, {"source": 19, "target": 21}, {"source": 20, "target": 26}, {"source": 20, "target": 22}, {"source": 20, "target": 31}, {"source": 21, "target": 32}, {"source": 21, "target": 23}, {"source": 21, "target": 27}, {"source": 22, "target": 30}, {"source": 22, "target": 31}, {"source": 22, "target": 24}, {"source": 23, "target": 24}, {"source": 23, "target": 30}, {"source": 23, "target": 32}, {"source": 24, "target": 30}, {"source": 25, "target": 28}, {"source": 25, "target": 29}, {"source": 25, "target": 33}, {"source": 26, "target": 34}, {"source": 26, "target": 31}, {"source": 26, "target": 28}, {"source": 27, "target": 29}, {"source": 27, "target": 35}, {"source": 27, "target": 32}, {"source": 28, "target": 33}, {"source": 28, "target": 34}, {"source": 29, "target": 33}, {"source": 29, "target": 35}, {"source": 30, "target": 32}, {"source": 30, "target": 36}, {"source": 30, "target": 31}, {"source": 31, "target": 36}, {"source": 31, "target": 34}, {"source": 32, "target": 35}, {"source": 32, "target": 36}, {"source": 33, "target": 37}, {"source": 33, "target": 35}, {"source": 33, "target": 34}, {"source": 34, "target": 37}, {"source": 34, "target": 36}, {"source": 35, "target": 37}, {"source": 35, "target": 36}, {"source": 36, "target": 37}]}
和 graph_ll_0.json
{"0": [0.0, 180.0], "1": [0.0, -150.0], "2": [-30.000000000000004, 180.0], "3": [29.999999999999996, 180.0], "4": [0.0, 150.0], "5": [-35.264389682754654, -135.0], "6": [35.264389682754654, -135.0], "7": [0.0, -120.00000000000001], "8": [-35.264389682754654, 135.0], "9": [35.264389682754654, 135.0], "10": [-59.99999999999999, 180.0], "11": [59.99999999999999, 180.0], "12": [0.0, 120.00000000000001], "13": [0.0, -90.0], "14": [-29.999999999999996, -90.0], "15": [29.999999999999996, -90.0], "16": [-59.99999999999999, -90.0], "17": [59.99999999999999, -90.0], "18": [-90.0, 0.0], "19": [90.0, 0.0], "20": [-60.00000000000001, 90.0], "21": [60.00000000000001, 90.0], "22": [-30.000000000000004, 90.0], "23": [29.999999999999996, 90.0], "24": [0.0, 90.0], "25": [0.0, -60.00000000000001], "26": [-60.00000000000001, 0.0], "27": [60.00000000000001, 0.0], "28": [-35.264389682754654, -45.0], "29": [35.264389682754654, -45.0], "30": [0.0, 60.00000000000001], "31": [-35.264389682754654, 45.0], "32": [35.264389682754654, 45.0], "33": [0.0, -29.999999999999996], "34": [-30.000000000000004, 0.0], "35": [29.999999999999996, 0.0], "36": [0.0, 29.999999999999996], "37": [0.0, 0.0]}
更新
Sehe 的以下贡献解决了我代码中的许多错误 - 特别是与我对 boost 的 kamada_kawai_spring_layout 的理解不佳有关
拓扑“范数”方法应返回 point_difference 的绝对值(即 {distance, bearing} 对偶)。point_difference 增量不应以负值到达,但它发生在 kamada_kawai 内部的某个地方。
double norm(point_difference delta) const {
return abs(delta[0]);
}
我现在看到弹簧值比初始值更能扭曲位置(这在网络的更高分辨率上显示,但对问题没有影响。请注意,其余顶点与原始顶点相比没有变化:
kamada_kawai 看起来应该是这样的(虽然这是通过使用 python 上的 networkx 绘制的,适合单位球体,而不是椭圆体 - 虽然 networkx 很好,但它太慢、占用大量内存,并且不能支持任意拓扑)。
我正在使用 Python 脚本将输出转换为 KMZ 以实现可视化。
import json
import simplekml
from utility.spherical import Spherical
def add_line(kz, pts, idx, name=None, col=(154, 154, 154), alpha=0.5, width=4):
tpt = [pts[i] for i in idx]
if len(tpt[0]) == 3:
tpt = [sp.xyz_ll(tf) for tf in tpt]
lo = [i[1] for i in tpt]
mim = max(lo) - min(lo)
if mim > 179.9999:
path = [(o if o >= 0 else o + 360.0000, a) for (a, o) in tpt]
else:
path = [(o, a) for (a, o) in tpt]
lstr = kz.newlinestring(name=name)
lstr.coords = path
lstr.style.linestyle.width = width
if __name__ == '__main__':
sp = Spherical()
for depth in [2]:
pos_f = open(f'result_ll_pos_{depth}.json', 'r')
pos_n = json.load(pos_f)
pos_f.close()
pos = dict()
for k, v in pos_n.items():
key = int(k)
pos[key] = tuple(v)
net_f = open(f'graph_{depth}.json', 'r')
net = json.load(net_f)
net_f.close()
edges = net['links']
kml = simplekml.Kml(open=1)
for edge in edges:
add_line(kml, pos, [edge['source'], edge['target']])
kml.save(f'kkp_l_{depth}.kml')
print(f'kml file generated {depth}')
因此经过审查后,我想出了一个完整的版本。
存在
0
作为任何节点的邻居插入的错误(初始化程序{{}, {}}
使用单个元素创建集合0
,因为两个{}
值都初始化为该值)。此外,网络会存储两个方向的邻居,但由于图
undirected
无论如何都是如此,这会导致所有边都重复。如果这不是我们的本意,只需将边替换vecS
为,重复项就会自动过滤掉。setS
然后,边缘索引属性是必需的,但未正确填充。它们全部设置为
1
(1.0,因此看起来这似乎是/预期/的权重)。重量也没有。它们都默认为 0。
我认为最好不要从此开始
last_delta = 0
。last_delta
最后,这可能没问题,但我想通过引用传递“完成”条件来确保它确实被保留。最后,我添加了一个
write_graphviz
步骤来将输入呈现为已解析以供检查:在 Coliru 上直播
这将打印例如:
点图:
结果是
result_ll_pos_0.json
:如果没有 @sehe 的帮助,我不可能到达这里。正如评论中提到的,我需要编写一个合适的 point_difference 结构,然后我还注意到我没有正确地在弧度(boost,C++ 数学)和度(GeographicLib)之间切换。之后,我怀疑,出于某种原因,kamada_kawai_spring_layout 传递给调整()位置的增量需要反转(或者我不明白调用的意图(!)所以我将 pi/180° 添加到它发送的值中。
虽然这里仍有一些可以优化的地方(以及几个多余的功能),但工作代码如下。
初始网络如下所示:
处理后的/kamada_kawai_spring_layouted网络如下:
这是一项出色的改进。当然,可能还需要进行一些调整、进一步改进和提高效率,我很乐意听听你们的意见,但问题已经得到解答。不过,荣誉归于 sehe。