2021-07-21

2021-07-21  本文已影响0人  SunJi_

1、模型读取

int Net :: load_param(const datareader& dr)
int Net :: load_model(const datareader& dr)
int Net::load_param(const DataReader& dr)
{
    // 读取magic number(第一行)
    int magic = 0;
    SCAN_VALUE("%d", magic)
    if (magic != 7767517)
    {
        NCNN_LOGE("param is too old, please regenerate");
        return -1;
    }

    // 读取layer和blob的数量(第二行)
    int layer_count = 0;
    int blob_count = 0;
    SCAN_VALUE("%d", layer_count)
    SCAN_VALUE("%d", blob_count)
    if (layer_count <= 0 || blob_count <= 0)
    {
        NCNN_LOGE("invalid layer_count or blob_count");
        return -1;
    }

    d->layers.resize((size_t)layer_count);
    d->blobs.resize((size_t)blob_count);
    
    // 各层参数读取到字典当中(第三行到末尾)
    ParamDict pd;
    // 遍历所有的layer,解析每个layer的类型(layer_type)、名称(layer_name)、输入数(bottom_count)和输出数(top_count)
    int blob_index = 0;
    for (int i = 0; i < layer_count; i++)
    {
        char layer_type[256];
        char layer_name[256];
        int bottom_count = 0;
        int top_count = 0;
        SCAN_VALUE("%255s", layer_type)
        SCAN_VALUE("%255s", layer_name)
        SCAN_VALUE("%d", bottom_count)
        SCAN_VALUE("%d", top_count)
       // 根据layer_type,创建layer
        Layer* layer = create_layer(layer_type);  
        if (!layer)
        {
            layer = create_custom_layer(layer_type);
        }
        if (!layer)
        {
            NCNN_LOGE("layer %s not exists or registered", layer_type);
            clear();
            return -1;
        }
        // 设置layer的名称和类型
        layer->type = std::string(layer_type);
        layer->name = std::string(layer_name);
        
        // layer的输入(bottom_blob)
        layer->bottoms.resize(bottom_count);
        // 遍历bottom_blob
        for (int j = 0; j < bottom_count; j++)
        {
            char bottom_name[256];
            SCAN_VALUE("%255s", bottom_name)

            // 根据bottom_name,查找bottom_blob的索引
            int bottom_blob_index = find_blob_index_by_name(bottom_name);
            // 如果没有查找到bottom_name对应的blob
            // 将向blobs数组中插入一个名为bottom_name的blob
            if (bottom_blob_index == -1)
            {
                // 设置第blob_index个blob的参数
                Blob& blob = d->blobs[blob_index];
                // 设置索引
                bottom_blob_index = blob_index;
                // 设置名字
                blob.name = std::string(bottom_name);
                // 更新索引
                blob_index++;
            }
            // 设置当前blob的参数
            Blob& blob = d->blobs[bottom_blob_index];

            // 使用当前blob记录数据传输关系
            // 第i层以当前blob为输入
            blob.consumer = i;
            // 第i层layer的第j个输入
            layer->bottoms[j] = bottom_blob_index;
        }

        // 解析layer的输出top_blob(和输入类似)
        layer->tops.resize(top_count);
        for (int j = 0; j < top_count; j++)
        {
            Blob& blob = d->blobs[blob_index];

            char blob_name[256];
            SCAN_VALUE("%255s", blob_name)

            blob.name = std::string(blob_name);

            blob.producer = i;
            
            layer->tops[j] = blob_index;

            blob_index++;
        }

        // 解析blob后面参数字典pd
        int pdlr = pd.load_param(dr);
        if (pdlr != 0)
        {
            NCNN_LOGE("ParamDict load_param %d %s failed", i, layer->name.c_str());
            continue;
        }

        // set bottom and top shape hints
        layer->bottom_shapes.resize(bottom_count);
        for (int j = 0; j < bottom_count; j++)
        {
            layer->bottom_shapes[j] = d->blobs[layer->bottoms[j]].shape;
        }

        layer->top_shapes.resize(top_count);
        for (int j = 0; j < top_count; j++)
        {
            layer->top_shapes[j] = d->blobs[layer->tops[j]].shape;
        }

        // layer载入param
        int lr = layer->load_param(pd);
        if (lr != 0)
        {
            NCNN_LOGE("layer load_param %d %s failed", i, layer->name.c_str());
            continue;
        }

        d->layers[i] = layer;
    }

    d->update_input_output_indexes();
    d->update_input_output_names();

    #undef SCAN_VALUE
    return 0;
}
if (d->layers.empty())
    {
        NCNN_LOGE("network graph not ready");
        return -1;
    }

    int layer_count = (int)d->layers.size();

    // load file
    int ret = 0;

    ModelBinFromDataReader mb(dr)
   
    for (int i = 0; i < layer_count; i++)
    {
        Layer* layer = d->layers[i];
        //Here we found inconsistent content in the parameter file.
        if (!layer)
        {
            NCNN_LOGE("load_model error at layer %d, parameter file has inconsistent content.", i);
            ret = -1;
            break;
        }

        int lret = layer->load_model(mb);
        if (lret != 0)
        {
            NCNN_LOGE("layer load_model %d failed", i);
            ret = -1;
            break;
        }

        int cret = layer->create_pipeline(opt1);
        if (cret != 0)
        {
            fprintf(stderr, "layer create_pipeline %d failed\n", (int)i);
            ret = -1;
            break;
        }
    }

    fuse_network();
    return ret;
}

2、推理流程

net.cpp中定义了load_param和load_model两个函数,前者网络结构一层一层的读取到字典当中,并通过bottom和top_blob_name赋予给layer。

lay.h中定义了load_parma(pd)的虚函数,继承了layer。在测试某一个模型时,xxxnet.load_param(path)语句中,会首先调用layer中的load_param读取网络结构。然后根据layer_name和laye_type调用src/layer里面各个算子中的load_model(pd)函数,来读取各层的特定参数。

ncnn :: Net xxxnet;
xxxnet.load_param(parampath);
xxxnet.load_model(binpath);
ncnn :: Extractor ex = xxxnet.create_extractor();
ex.input("data", in)  //名字和param文件第一层网络的输入的blob对应
ex.extract(blob_name, out)

网络定义为一个ncnn::Net类,其中的layers储存了每一层的信息,blobs(数据交换结构)储存了网络的中间数据。在计算时,先利用param文件和bin文件实例化一个ncnn::Net类,根据得到的net实例化一个ncnn::Extractor类,extractor中的net会被转为const类,以保证运算的时候blobs和layers等重要信息不会被改变。

3、指令集简表

over~


load_param函数后半段,对bottom和top_blob的读写,没有理清。
好的,再看一遍,理清了。

task
调试resnet
学习使脚本高效的方法

上一篇下一篇

猜你喜欢

热点阅读