鲲鹏社区首页
中文
注册
我要评分
文档获取效率
文档正确性
内容完整性
文档易理解
在线提单
论坛求助

使用示例

下方使用示例以使用sift-128-euclidean.hdf5数据集,线程数80为例,数据集可通过以下方式获取:

1
wget http://ann-benchmarks.com/sift-128-euclidean.hdf5 --no-check-certificate

假设程序运行的目录为“/path/to/kbest_test”,完整的目录结构应如下所示:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
├── build                                                   // 存放构建文件,运行build.sh时会自动生成
├── build.sh                                                // 运行build.sh以构建可执行文件run 
├── CMakeLists.txt                        
├── datasets                                                // 存放数据集
      └── sift-128-euclidean.hdf5
├── graph_indices                                           // 存放构建好的图索引,需手动创建。
      └── sift-128-euclidean_KGN-RNN_R_50_L_100.kgn         // 构建好的图索引,运行可执行文件run后(对应数据集配置文件“index_save_or_load”为save,"save_types"为save_graph)时生成
├── searcher_indices                                        // 存放构建好的检索器,需手动创建。
      └── sift.ksn         // 构建好的检索器,运行可执行文件run后(对应数据集配置文件“index_save_or_load”为save,"save_types"为save_searcher)时生成
├── main.cpp                                                // 包含运行函数的文件
├── run                                                     // 可执行文件,运行build.sh后生成
└── sift.config                                             // 对应数据集配置文件

运行步骤如下:

  1. 假设程序运行的目录为“/path/to/kbest_test”,检查目录下是否存在build.sh,CMakeLists.txt,datasets/sift-128-euclidean.hdf5,main.cpp,sift.config文件。其中,build.sh,CMakeLists.txt,main.cpp,sift.config将在下方提供。
  2. 确保sift.config文件中的“num_numa_nodes”为实际运行时的NUMA数量。
  3. 若是第一次执行,确保sift.config文件中的“index_save_or_load”“save”;后续执行时可改为“load”,使用构建好的图索引或检索器查询。
  4. 安装hdf5-devel依赖。
    1
    yum install hdf5-devel openssl-devel libcurl-devel
    
  5. 执行build.sh。
    1
    sh build.sh
    
  6. 运行可执行文件run。
    1
    ./run 80 2 -1 sift.config
    

    测试指令参数与解释如下所示。

    ./run <threads> <qurey_mode> <batch_size> <config_name>
    • “threads”表示实际运行时的线程数。
    • “qurey_mode”表示测试模式选择,设置为“1”时表示batch查询模式,即一次查询“batch_size”条query;设置为“2”时表示并发单query查询模式,即在并发条件下每个线程仅查询单条query,此时“batch_size”参数无效。
    • “batch_size”表示batch查询模式下一次查询的query数量,设置为“-1”时表示一次查询数据集中所有的查询query。
    • “config_name”表示测试数据集对应的配置文件名称。

    执行结果如下。

build.sh内容如下:
1
2
3
4
5
mkdir build
cd build
cmake .. -DCMAKE_INCLUDE_PATH=/usr/local/sra_recall/include -DCMAKE_LIBRARY_PATH=/usr/local/sra_recall/lib
make -j
cp run ..
CMakeLists.txt内容如下:
EXECUTE_PROCESS(COMMAND uname -m COMMAND tr -d '\n' OUTPUT_VARIABLE ARCHITECTURE)
message(STATUS "Architecture: ${ARCHITECTURE}")

if(${ARCHITECTURE} STREQUAL "aarch64")
    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ldl -lz -gdwarf-2")
    include_directories(/usr/include/hdf5)
else()
    set(CMAKE_C_COMPILER gcc)
    set(CMAKE_CXX_COMPILER g++)
    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -lhdf5")
endif()

cmake_minimum_required(VERSION 3.12.0)
project(best LANGUAGES C CXX)
set(CMAKE_CXX_STANDARD 20)

message(PROJECT_SOURCE_DIR="${PROJECT_SOURCE_DIR}")

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++2a -falign-jumps=64 -fopenmp -fPIC -Ofast -march=armv8.2-a+dotprod")
set(KBEST_INCLUDE_DIRS "/usr/local/sra_recall/include")

find_package(HDF5 REQUIRED COMPONENTS C HL)
include_directories(${HDF5_INCLUDE_DIRS})
include_directories(${KBEST_INCLUDE_DIRS})

message(HDF5_INCLUDE_DIRS="${HDF5_INCLUDE_DIRS}")
message(HDF5_LIBRARIES="${HDF5_LIBRARIES}")

set(KBEST_SHARED_LIB_PATH "/usr/local/sra_recall/lib")
message(KBEST_SHARED_LIB_PATH="${KBEST_SHARED_LIB_PATH}")
link_directories(${KBEST_SHARED_LIB_PATH})

add_executable(run main.cpp)

if(${ARCHITECTURE} STREQUAL "aarch64")
    target_link_libraries(run ${KBEST_SHARED_LIB_PATH}/libkbest.so ${HDF5_LIBRARIES} -lcrypto -lcurl -lpthread -lz -ldl -lm -lnuma)
else()
    target_link_libraries(run ${KBEST_SHARED_LIB_PATH}/libkbest.so ${HDF5_LIBRARIES} dl z)
endif()
main.cpp内容如下:
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
#include <bits/stdc++.h>
#if defined(__aarch64__)
#include "hdf5.h"
#else
#include <hdf5.h>
#endif
#include "kbest.h"
#include <numa.h>
#include <array>
#include <filesystem>
#include <fstream>
#include <iostream>
#include <memory>
#include <stdexcept>
#include <string>
#include <vector>
using namespace std;

static const char* HDF5_DATASET_TRAIN = "train";
static const char* HDF5_DATASET_TEST = "test";
static const char* HDF5_DATASET_NEIGHBORS = "neighbors";
static const char* HDF5_DATASET_DISTANCES = "distances";

extern bool NUMA_ENABLED;
extern int num_numa_nodes;

void* hdf5_read(const std::string& file_name, const std::string& dataset_name, H5T_class_t dataset_class,
                int32_t& d_out, int32_t& n_out);

double distance(const float* x, const float* y, int d, const std::string metric) {                                      // 距离计算
    if (metric == "L2") {
        double sum = 0.0f;
        for (int i = 0; i < d; ++i) {
            sum += (double)(x[i] - y[i]) * (x[i] - y[i]);
        }
        return sum;
    } else if (metric == "IP") {
        double sum = 0.0f;
        for (int i = 0; i < d; ++i) {
            sum -= (double)x[i] * y[i];
        }
        return sum;
    } else {
        assert(false);
    }
    return 0;
}

int intersect(int64_t* a1, int64_t* a2, int l1, int l2, int index) {                                                    // 计算交集
    int res = 0;
    for (int i = 0; i < l1; i++) {
        if (a1[i] < 0) { continue; }
        for (int j = 0; j < i; j++) {
            if (a1[i] == a1[j]) { continue; }
        }
        for (int j = 0; j < l2; j++) {
            if (a1[i] == a2[j]) {
                res++;
                break;
            }
        }
    }
    return res;
}

void loadHDF(const std::string& ann_file_name, int32_t& nb, int32_t& nq, int32_t& dim, int32_t& gt_closest,             // 加载数据集
             float*& data, float*& queries, int64_t*& gt_ids, int consecutiveLog, int& numAllocParts) {
    float* data_one = (float*)hdf5_read(ann_file_name, HDF5_DATASET_TRAIN, H5T_FLOAT, dim, nb);
    queries = (float*)hdf5_read(ann_file_name, HDF5_DATASET_TEST, H5T_FLOAT, dim, nq);
    int32_t* gt_ids_short = (int32_t*)hdf5_read(ann_file_name, HDF5_DATASET_NEIGHBORS, H5T_INTEGER, gt_closest, nq);
    gt_ids = new int64_t[gt_closest * nq];
    for (int i = 0; i < gt_closest * nq; i++) {
        gt_ids[i] = gt_ids_short[i];
    }

    data = data_one;
    numAllocParts = 1;
    delete[] gt_ids_short;
}

static void normalize(float* data, int length) {                                                                        // 归一化处理
    double norm = 0;
    for (int i = 0; i < length; i++) {
        norm += data[i] * data[i];
    }
    norm = sqrt(norm);
    if (norm != 0.0f) {
        for (int i = 0; i < length; i++) {
            data[i] /= norm;
            assert(!isnan(data[i]));
        }
    }
}

struct Config {
    int iter_num;
    int topK;
    std::string index_save_or_load;
    std::string save_types;
    std::string index_path;
    std::string searcher_path;
    std::string dataset_path;
    std::string dataset_type;
    std::string metric;
    int L;
    int R;
    int A;
    std::string index_type;
    bool optimize;
    bool batch;
    int kmeans_ep;
    int kmeans_type;
    std::vector<int> level;
    std::vector<int> efs;
    bool numa_enabled;
    int num_numa_nodes;
};

std::vector<int> parse_list(const std::string& str) {
    std::vector<int> result;
    std::stringstream ss(str);
    std::string item;
    while (std::getline(ss, item, ',')) {
        result.push_back(std::stoi(item));
    }
    return result;
}

Config read_config(const std::string& file_path) {
    std::ifstream file(file_path);
    if (!file.is_open()) { std::cout << "[ERROR] config file not found" << std::endl; }

    Config config;
    std::unordered_map<std::string, std::string> config_map;
    std::string line;
    while (std::getline(file, line)) {
        std::istringstream is_line(line);
        std::string key;
        if (std::getline(is_line, key, '=')) {
            std::string value;
            if (std::getline(is_line, value)) { config_map[key] = value; }
        }
    }

    config.iter_num = std::stoi(config_map["iter_num"]);
    std::cout << "iter_num: " << config.iter_num << std::endl;
    config.topK = std::stoi(config_map["topK"]);
    std::cout << "topK: " << config.topK << std::endl;
    config.index_save_or_load = config_map["index_save_or_load"];
    config.save_types = config_map["save_types"];
    config.index_path = config_map["index_path"];
    config.searcher_path = config_map["searcher_path"];
    config.dataset_path = config_map["dataset_path"];
    config.dataset_type = config_map["dataset_type"];
    config.metric = config_map["metric"];
    config.L = std::stoi(config_map["L"]);
    config.R = std::stoi(config_map["R"]);
    config.A = std::stoi(config_map["A"]);
    std::cout << "L: " << config.L << std::endl;
    std::cout << "R: " << config.R << std::endl;
    std::cout << "A: " << config.A << std::endl;
    config.index_type = config_map["index_type"];
    config.optimize = config_map["optimize"] == "true";
    config.batch = config_map["batch"] == "true";
    config.level = parse_list(config_map["level"]);
    config.efs = parse_list(config_map["efs"]);
    config.numa_enabled = config_map["numa_enabled"] == "true";
    config.num_numa_nodes = std::stoi(config_map["num_numa_nodes"]);

    std::cout << std::endl;
    std::cout << "topK: " << config.topK << std::endl;
    std::cout << "index_save_or_load: " << config.index_save_or_load << std::endl;
    std::cout << "save_types: " << config.save_types << std::endl;
    std::cout << "index_path: " << config.index_path << std::endl;
    std::cout << "searcher_path: " << config.searcher_path << std::endl;
    std::cout << "dataset_path: " << config.dataset_path << std::endl;
    std::cout << "dataset_type: " << config.dataset_type << std::endl;
    std::cout << "metric: " << config.metric << std::endl;
    std::cout << "index_type: " << config.index_type << std::endl;
    std::cout << "optimize: " << config_map["optimize"] << std::endl;
    std::cout << "batch: " << config_map["batch"] << std::endl;
    std::cout << "level: " << config_map["level"] << std::endl;
    std::cout << "efs: " << config_map["efs"] << std::endl;
    std::cout << "numa_enabled: " << config_map["numa_enabled"] << std::endl;
    std::cout << "num_numa_nodes: " << config_map["num_numa_nodes"] << std::endl;
    std::cout << std::endl;

    return config;
}

bool checkFileExist(const char* path) {
    std::ifstream file(path);
    if (!file.is_open()) {
        std::cout << "[ERROR] file: " << std::string(path) << " not found" << std::endl;
        return false;
    }
    file.close();
    return true;
}

std::unique_ptr<KBest> best = nullptr;

pthread_mutex_t mtx;
pthread_cond_t cond;
int ready = 0;  

struct KBestSearchParams {
    int n;
    const float* x;
    int k;
    float* distance;
    int64_t* labels;
    int dim;
};

void* ThreadSearch(void* arg) {
    KBestSearchParams* params = static_cast<KBestSearchParams*>(arg);
    pthread_mutex_lock(&mtx);
    while (ready == 0) {  
        pthread_cond_wait(&cond, &mtx);
    }
    pthread_mutex_unlock(&mtx);
    for (int i=0;i<params->n;i++) {
    best->Search(1, params->x+ i*params->dim , params->k, params->distance+i*params->k, params->labels+i*params->k, 1);
    }
    return nullptr;
}

int main(int argc, char** argv) {
    if (argc < 3) {
        cerr << "Usage: " << argv[0] << " <thread_num> <query_batch_size> <config_path>\n";
        exit(1);
    }

    int num_thread_for_use = stoi(argv[1]);
    int query_mode = stoi(argv[2]);
    int query_batch_size = stoi(argv[3]);
    std::string configFile = argv[4];

    if (query_mode != 1 && query_mode != 2) {
        std::cout << "[ERROR] Currently we only support query_mode [1,2], input query_mode: " << query_mode << "\n";
        return -1;
    }

    std::ifstream file(configFile);
    if (!file.is_open()) {
        std::cout << "[ERROR] config file not found" << std::endl;
        return -1;
    }

    Config config = read_config(configFile);

    const int consecutiveLog = 20;
    int numAllocParts = 0;
    float* xb_;
    float* xq_;
    int64_t* gt_ids_;
    int32_t nb_, nq_, dim_, gt_closest;
    printf("start: \n");
    if (!checkFileExist(config.dataset_path.c_str())) { return -1; }                                                    // 读取数据集,用例使用.hdf5格式
    if (config.dataset_type == "hdf5") {
        loadHDF(config.dataset_path, nb_, nq_, dim_, gt_closest, xb_, xq_, gt_ids_, consecutiveLog, numAllocParts);
    } else {
        cerr << "error, not recognized dataset type: " << config.dataset_type << ", possible are numpy and hdf5.\n";
        exit(1);
    }

    if (config.metric == "IP") {                                                                                        // 判断数据集度量方式,用例使用L2
        for (int i = 0; i < nb_; i++) {
            normalize(xb_ + i * dim_, dim_);
        }
        for (int i = 0; i < nq_; i++) {
            normalize(xq_ + i * dim_, dim_);
        }
    } else if (config.metric != "L2") {
        cerr << "error, not recognized metric: " << config.metric << ", possible are L2 and IP.\n";
        exit(1);
    }
    printf("After loading data: \n");

    int R = 50;
    int numIters = config.iter_num;
    int closestNum = config.topK;

    NUMA_ENABLED = config.numa_enabled;
    num_numa_nodes = config.num_numa_nodes;

    uint8_t *dataPtr = nullptr;
    size_t dataLength = 0;

    if (config.index_save_or_load == "save") {
        for (auto level : config.level) {
            NUMA_ENABLED = false;
            for (int i = 0; i < 1; i++) {
                auto best_build = std::make_unique<KBest>(dim_, config.R, config.L, config.A, config.metric.c_str(), config.index_type);
                std::cout << "index building ..." << std::endl;
                int saveResult = best_build->Add(nb_, xb_, consecutiveLog, level);                                      // 构建图索引
                if (config.save_types == "save_searcher") {
                    printf("Searcher is Saving. \n");
                    saveResult = best_build->BuildSearcher();                                                           // 构建检索器
                    saveResult = best_build->Save(config.searcher_path.c_str());                                        // 保存检索器
                }
                else if (config.save_types == "save_graph"){
                    //save graph
                    printf("Graph is Saving. \n");
                    saveResult = best_build->SaveGraph(config.index_path.c_str());                                      // 保存图索引
                }
                else {
                    printf("Index serialize. \n");
                    saveResult = best_build->BuildSearcher();  
                    saveResult = best_build->Serialize(dataPtr, dataLength);                                            // 序列化
                }

                if (saveResult == -1) {
                return -1;
            }
            }
            NUMA_ENABLED = config.numa_enabled;

        }
    }

    for (auto level : config.level) {
        std::cout << std::endl;

        best = std::make_unique<KBest>(dim_, config.R, config.L, config.A, config.metric.c_str(), config.index_type);

        uint64_t timeTaken = 0;
        chrono::_V2::steady_clock::time_point startTime, endTime;
        startTime = chrono::steady_clock::now();

        int loadResult = 0;

        if (config.save_types == "save_searcher") {
            int loadResult = best->Load(config.searcher_path.c_str());                                                  // 加载检索器
        }
        else if (config.save_types == "save_graph") {
            int loadResult = best->LoadGraph(config.index_path.c_str());                                                // 加载图索引
        }
        else {
            int loadResult = best->Deserialize(dataPtr, dataLength);                                                    // 反序列化
        }
        if (loadResult == -1){
            return -1;
        }

        endTime = chrono::steady_clock::now();
        timeTaken = chrono::duration_cast<chrono::nanoseconds>(endTime - startTime).count();
        std::cout << "index built or read, time: " << (double)timeTaken / 1000 / 1000 / 1000 << "s\n";

        printf("After loading searcher: \n");

        float* distances = new float[nq_ * closestNum]();
        int64_t* labels = new int64_t[nq_ * closestNum]();

        int32_t num_batch = (query_batch_size == -1 ? 1 : (nq_ + query_batch_size - 1) / query_batch_size);

        int32_t base_num_queries = nq_ / num_thread_for_use;
        int32_t left = nq_ % num_thread_for_use;
        std::vector<int> thread_offset(num_thread_for_use + 1, 0);
        for (int i = 0; i < num_thread_for_use; ++i) {
            if (i < left) {
                thread_offset[i + 1] = base_num_queries + 1;
            } else {
                thread_offset[i + 1] = base_num_queries;
            }
        }

        for (int i = 0; i < num_thread_for_use; ++i) {
            thread_offset[i + 1] += thread_offset[i];
        }

        printf("start search: \n");
        for (auto ef : config.efs) {
            best->SetEf(ef);                                                                                            // 设置检索时的候选节点列表大小
            double totalTime = 0;
            vector<double> Times;
            double all_qps = 0.0;

            int used_numIters = numIters + 1;

            for (int iter = 0; iter < used_numIters; iter++) {
                std::vector<uint64_t> query_batch_time;
                if (query_mode == 1) {
                    if (query_batch_size == -1) {
                        startTime = chrono::steady_clock::now();
                        best->Search(nq_, xq_, closestNum, distances, labels, num_thread_for_use);                      // 检索
                        endTime = chrono::steady_clock::now();
                        timeTaken = chrono::duration_cast<chrono::nanoseconds>(endTime - startTime).count();
                        query_batch_time.push_back(timeTaken);
                    } else {
                        int32_t st = 0, en = 0, this_batch_size = 0;
                        for (int batch_id = 0; batch_id < num_batch; batch_id++) {
                            st = batch_id * query_batch_size;
                            en = std::min(st + query_batch_size, nq_);
                            this_batch_size = en - st;
                            startTime = chrono::steady_clock::now();
                            best->Search(this_batch_size, xq_ + st * dim_, closestNum, distances + st * closestNum,
                                         labels + st * closestNum, num_thread_for_use);
                            endTime = chrono::steady_clock::now();
                            timeTaken = chrono::duration_cast<chrono::nanoseconds>(endTime - startTime).count();
                            query_batch_time.push_back(timeTaken);
                        }
                    }
                } else {
                    pthread_mutex_init(&mtx, NULL);
                    pthread_cond_init(&cond, NULL);
                    ready = false;
                    std::vector<pthread_t> threads(num_thread_for_use);
                    std::vector<KBestSearchParams> params(num_thread_for_use);
                    for (int i = 0; i < num_thread_for_use; ++i) {
                        params[i].n = nq_;
                        params[i].x = xq_;
                        params[i].k = closestNum;
                        params[i].dim = dim_;
                        params[i].distance = distances;
                        params[i].labels = labels;
                        pthread_create(&threads[i], nullptr, ThreadSearch, &params[i]);
                    }
                    sleep(2);
                    pthread_mutex_lock(&mtx);

                    ready = 1;  
                    pthread_cond_broadcast(&cond);  
                    startTime = chrono::steady_clock::now();
                    pthread_mutex_unlock(&mtx);

                    for (int i = 0; i < num_thread_for_use; i++) {
                        pthread_join(threads[i], NULL);
                    }
                    endTime = chrono::steady_clock::now();
                    timeTaken = chrono::duration_cast<chrono::nanoseconds>(endTime - startTime).count();

                    pthread_mutex_destroy(&mtx);
                    pthread_cond_destroy(&cond);
                    query_batch_time.push_back(timeTaken);
                }
                uint64_t single_iter_time = 0;
                for (auto bt : query_batch_time) {
                    single_iter_time += bt;
                }
                double timeSeconds = (double)single_iter_time / 1000 / 1000 / 1000;

                if (iter != 0) {
                    totalTime += timeSeconds;
                    std::cout << "  runs [" << iter << "/" << numIters << "], qps: " << (double)nq_ / timeSeconds
                              << std::endl;
                    all_qps += (double)nq_ / timeSeconds;
                }
            }
            int found = 0;
            int gtWanted = 10;
            for (int i = 0; i < nq_; i++) {
                found += intersect(gt_ids_ + gt_closest * i, labels + closestNum * i, gtWanted, closestNum, i);
            }

            double avgTime = totalTime / (double)numIters;
            double qps = (double)nq_ / avgTime;                                                                         // 计算qps
            double recall = (double)found / nq_ / gtWanted;                                                             // 计算召回率

            std::cout << "level: " << level << " candListSize: " << ef << std::endl;
            std::cout << "recall: " << recall << " qps: " << all_qps / numIters << std::endl;
        }
        printf("finished: \n");

        delete[] distances;
        delete[] labels;
    }

    delete[] xq_;
    delete[] xb_;
    delete[] gt_ids_;
    return 0;
}

void* hdf5_read(const std::string& file_name, const std::string& dataset_name, H5T_class_t dataset_class,
                int32_t& d_out, int32_t& n_out) {
    hid_t file, dataset, datatype, dataspace, memspace;
    H5T_class_t t_class;
    hsize_t dimsm[3];
    hsize_t dims_out[2];
    hsize_t count[2];
    hsize_t offset[2];
    hsize_t count_out[3];
    hsize_t offset_out[3];
    void* data_out = nullptr;

    file = H5Fopen(file_name.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
    dataset = H5Dopen2(file, dataset_name.c_str(), H5P_DEFAULT);
    datatype = H5Dget_type(dataset);
    t_class = H5Tget_class(datatype);
    dataspace = H5Dget_space(dataset);
    H5Sget_simple_extent_dims(dataspace, dims_out, nullptr);

    n_out = dims_out[0];
    d_out = dims_out[1];
    offset[0] = offset[1] = 0;
    count[0] = dims_out[0];
    count[1] = dims_out[1];
    H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, nullptr, count, nullptr);

    dimsm[0] = dims_out[0];
    dimsm[1] = dims_out[1];
    dimsm[2] = 1;
    memspace = H5Screate_simple(3, dimsm, nullptr);

    offset_out[0] = offset_out[1] = offset_out[2] = 0;
    count_out[0] = dims_out[0];
    count_out[1] = dims_out[1];
    count_out[2] = 1;
    H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, nullptr, count_out, nullptr);

    switch (t_class) {
        case H5T_INTEGER:
            data_out = new int32_t[dims_out[0] * dims_out[1]];
            H5Dread(dataset, H5T_NATIVE_INT32, memspace, dataspace, H5P_DEFAULT, data_out);

            break;
        case H5T_FLOAT:
            data_out = new float[dims_out[0] * dims_out[1]];
            H5Dread(dataset, H5T_NATIVE_FLOAT, memspace, dataspace, H5P_DEFAULT, data_out);
            break;
        default:
            printf("Illegal dataset class type\n");
            break;
    }

    H5Tclose(datatype);
    H5Dclose(dataset);
    H5Sclose(dataspace);
    H5Sclose(memspace);
    H5Fclose(file);

    return data_out;
}
sift.config内容如下:
iter_num=10
topK=10
L=100
R=50
A=60
index_save_or_load=save
index_path=./graph_indices/sift-128-euclidean_KGN-RNN_R_50_L_100.kgn
searcher_path=./searcher_indices/sift-128-euclidean_KGN-RNN_R_50_L_100.ksn
dataset_path=./datasets/sift-128-euclidean.hdf5
dataset_type=hdf5
metric=L2
index_type=RNNDescent
optimize=true
batch=true
numa_enabled=false
num_numa_nodes=4
level=2
efs=72
save_types=save_graph