Using SmartRedis Clients

2.1 Python Client

In this section, examples are presented using the SmartRedis Python API to interact with the RedisAI tensor, model, and script data types. Additionally, an example of utilizing the SmartRedis DataSet API is also provided.

Note

The Python API examples are written to connect to a database at 127.0.0.1:6379. When running this example, ensure that the address and port of your Redis instance are used.

Note

The Python API examples are written to connect to a non-cluster Redis database. Update the Client constructor call to connect to a Redis cluster.

Tensors

The Python client has the ability to send and receive tensors from the Redis database. The tensors are stored in the Redis database as RedisAI data structures. Additionally, Python client API functions involving tensor data are compatible with Numpy arrays and do not require any other data types.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
import numpy as np
from smartredis import Client

# Connect a SmartRedis client to Redis database
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)

# Send a 2D tensor to the database
key = "2D_array"
array = np.random.randint(-10, 10, size=(10, 10))
client.put_tensor(key, array)

# Retrieve the tensor
returned_array = client.get_tensor("2D_array")

Datasets

The Python client can store and retrieve tensors and metadata in datasets. For further information about datasets, please refer to the Dataset section of the Data Structures documentation page.

The code below shows how to store and retrieve tensors which belong to a DataSet.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import numpy as np

from smartredis import Client, Dataset

# Create two arrays to store in the DataSet
data_1 = np.random.randint(-10, 10, size=(10,10))
data_2 = np.random.randint(-10, 10, size=(20, 8, 2))

# Create a DataSet object and add the two sample tensors
dataset = Dataset("test-dataset")
dataset.add_tensor("tensor_1", data_1)
dataset.add_tensor("tensor_2", data_2)

# Connect SmartRedis client to Redis database
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)

# Place the DataSet into the database
client.put_dataset(dataset)

# Retrieve the DataSet from the database
rdataset = client.get_dataset("test-dataset")

# Retrieve a tensor from inside of the fetched
# DataSet
rdata_1 = rdataset.get_tensor("tensor_1")

Models

The SmartRedis clients allow users to set and use a PyTorch, ONNX, TensorFlow, or TensorFlow Lite model in the database. Models can be sent to the database directly from memory or from a file. The code below illustrates how a jit-traced PyTorch model can be used with the Python client library.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import io

import torch
import torch.nn as nn

from smartredis import Client

# Taken from https://pytorch.org/docs/master/generated/torch.jit.trace.html
class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv = nn.Conv2d(1, 1, 3)

    def forward(self, x):
        return self.conv(x)

net = Net()
example_forward_input = torch.rand(1, 1, 3, 3)

# Trace a module (implicitly traces `forward`) and construct a
# `ScriptModule` with a single `forward` method
module = torch.jit.trace(net, example_forward_input)

# Create a buffer of the traced model
buffer = io.BytesIO()
torch.jit.save(module, buffer)
model = buffer.getvalue()

# Connect a SmartRedis client and set the model in the database
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)
client.set_model("torch_cnn", model, "TORCH", "CPU")

# Retrieve the model and verify that the retrieved
# model matches the original model.
returned_model = client.get_model("torch_cnn")
assert model == returned_model

# Setup input tensor
data = torch.rand(1, 1, 3, 3).numpy()
client.put_tensor("torch_cnn_input", data)

# Run model and get output
client.run_model("torch_cnn", inputs=["torch_cnn_input"], outputs=["torch_cnn_output"])
out_data = client.get_tensor("torch_cnn_output")

Models can also be set from a file, as in the code below.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os

import torch
import torch.nn as nn

from smartredis import Client


# taken from https://pytorch.org/docs/master/generated/torch.jit.trace.html
class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv = nn.Conv2d(1, 1, 3)

    def forward(self, x):
        return self.conv(x)

# Connect a SmartRedis client
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)

try:
    net = Net()
    example_forward_input = torch.rand(1, 1, 3, 3)
    # Trace a module (implicitly traces `forward`) and construct a
    # `ScriptModule` with a single `forward` method
    module = torch.jit.trace(net, example_forward_input)

    # Save the traced model to a file
    torch.jit.save(module, "./torch_cnn.pt")

    # Set the model in the Redis database from the file
    client.set_model_from_file("file_cnn", "./torch_cnn.pt", "TORCH", "CPU")

    # Put a tensor in the database as a test input
    data = torch.rand(1, 1, 3, 3).numpy()
    client.put_tensor("torch_cnn_input", data)

    # Run model and retrieve the output
    client.run_model("file_cnn", inputs=["torch_cnn_input"], outputs=["torch_cnn_output"])
    out_data = client.get_tensor("torch_cnn_output")
finally:
    os.remove("torch_cnn.pt")

Scripts

Scripts are a way to store python-executable code in the database. The Python client can send scripts to the dataset from a file, or directly from memory.

As an example, the code below illustrates how a function can be defined and sent to the database on the fly, without storing it in an intermediate file.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import numpy as np
import torch

from smartredis import Client

def two_to_one(data, data_2):
    """Sample torchscript script that returns the
    highest elements in both arguments

    Two inputs to one output
    """
    # return the highest element
    merged = torch.cat((data, data_2))
    return merged.max(1)[0]

# Connect a SmartRedis client to the Redis database
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)

# Generate some test data to feed to the two_to_one function
data = np.array([[1, 2, 3, 4]])
data_2 = np.array([[5, 6, 7, 8]])

# Put the test data into the Redis database
client.put_tensor("script-data-1", data)
client.put_tensor("script-data-2", data_2)

# Put the function into the Redis database
client.set_function("two-to-one", two_to_one)

# Run the script using the test data
client.run_script(
    "two-to-one",
    "two_to_one",
    ["script-data-1", "script-data-2"],
    ["script-multi-out-output"],
)

# Retrieve the output of the test function
out = client.get_tensor("script-multi-out-output")

The code below shows how to set a script from a file. Running the script set from file uses the same API calls as the example shown above.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
import os.path as osp
from smartredis import Client

# Construct a string holding the script file location
file_path = osp.dirname(osp.abspath(__file__))
script_path = osp.join(file_path, "./data_processing_script.txt")

# Connect to the Redis database
db_address = "127.0.0.1:6379"
client = Client(address=db_address, cluster=False)

# Place the script in the database
client.set_script_from_file(
    "test-script-file", osp.join(file_path, "./data_processing_script.txt")
)

The content of the script file has to be written in Python. For the example above, the file data_processing_script.txt looks like this:

1
2
3
4
5
def pre_process(inp):
    mean = torch.zeros(1).float().to(inp.device)
    mean[0] = 2.0
    temp = inp.float() * mean
    return temp

2.2 C++ Client

In this section, examples are presented using the SmartRedis C++ API to interact with the RedisAI tensor, model, and script data types. Additionally, an example of utilizing the SmartRedis DataSet API is also provided.

Note

The C++ API examples rely on the SSDB environment variable being set to the address and port of the Redis database.

Note

The C++ API examples are written to connect to a non-cluster Redis database. Update the Client constructor call to connect to a Redis cluster.

Tensors

The following example shows how to send a receive a tensor using the SmartRedis C++ client API.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#include "client.h"
#include <vector>
#include <string>

int main(int argc, char* argv[]) {

    // Initialize tensor dimensions
    size_t dim1 = 3;
    size_t dim2 = 2;
    size_t dim3 = 5;
    std::vector<size_t> dims = {3, 2, 5};

    // Initialize a tensor to random values.  Note that a dynamically
    // allocated tensor via malloc is also useable with the client
    // API.  The std::vector is used here for brevity.
    size_t n_values = dim1 * dim2 * dim3;
    std::vector<double> input_tensor(n_values, 0);
    for(size_t i=0; i<n_values; i++)
        input_tensor[i] = 2.0*rand()/RAND_MAX - 1.0;

    // Initialize a SmartRedis client
    SmartRedis::Client client(false);

    // Put the tensor in the database
    std::string key = "3d_tensor";
    client.put_tensor(key, input_tensor.data(), dims,
                      SmartRedis::TensorType::dbl,
                      SmartRedis::MemoryLayout::contiguous);

    // Retrieve the tensor from the database using the unpack feature.
    std::vector<double> unpack_tensor(n_values, 0);
    client.unpack_tensor(key, unpack_tensor.data(), {n_values},
                        SmartRedis::TensorType::dbl,
                        SmartRedis::MemoryLayout::contiguous);

    // Print the values retrieved with the unpack feature
    std::cout<<"Comparison of the sent and "\
                "retrieved (via unpack) values: "<<std::endl;
    for(size_t i=0; i<n_values; i++)
        std::cout<<"Sent: "<<input_tensor[i]<<" "
                 <<"Received: "<<unpack_tensor[i]<<std::endl;


    // Retrieve the tensor from the database using the get feature.
    SmartRedis::TensorType get_type;
    std::vector<size_t> get_dims;
    void* get_tensor;
    client.get_tensor(key, get_tensor, get_dims, get_type,
                      SmartRedis::MemoryLayout::nested);

    // Print the values retrieved with the unpack feature
    std::cout<<"Comparison of the sent and "\
                "retrieved (via get) values: "<<std::endl;
    for(size_t i=0, c=0; i<dims[0]; i++)
        for(size_t j=0; j<dims[1]; j++)
            for(size_t k=0; k<dims[2]; k++, c++) {
                std::cout<<"Sent: "<<input_tensor[c]<<" "
                         <<"Received: "
                         <<((double***)get_tensor)[i][j][k]<<std::endl;
    }

    return 0;
}

DataSets

The C++ client can store and retrieve tensors and metadata in datasets. For further information about datasets, please refer to the Dataset section of the Data Structures documentation page.

The code below shows how to store and retrieve tensors and metadata which belong to a DataSet.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#include "client.h"
#include <vector>
#include <string>

int main(int argc, char* argv[]) {

    // Initialize tensor dimensions
    size_t dim1 = 3;
    size_t dim2 = 2;
    size_t dim3 = 5;
    size_t n_values = dim1 * dim2 * dim3;
    std::vector<size_t> dims = {3, 2, 5};

    // Initialize two tensors to random values
    std::vector<double> tensor_1(n_values, 0);
    std::vector<int64_t> tensor_2(n_values, 0);

    for(size_t i=0; i<n_values; i++) {
        tensor_1[i] = 2.0*rand()/RAND_MAX - 1.0;
        tensor_2[i] = rand();
    }

    // Initialize three metadata values we will add
    // to the DataSet
    uint32_t meta_scalar_1 = 1;
    uint32_t meta_scalar_2 = 2;
    int64_t meta_scalar_3 = 3;

    // Initialize a SmartRedis client
    SmartRedis::Client client(false);

    // Create a DataSet
    SmartRedis::DataSet dataset("example_dataset");

    // Add tensors to the DataSet
    dataset.add_tensor("tensor_1", tensor_1.data(), dims,
                       SmartRedis::TensorType::dbl,
                       SmartRedis::MemoryLayout::contiguous);

    dataset.add_tensor("tensor_2", tensor_2.data(), dims,
                       SmartRedis::TensorType::int64,
                       SmartRedis::MemoryLayout::contiguous);

    // Add metadata scalar values to the DataSet
    dataset.add_meta_scalar("meta_field_1", &meta_scalar_1,
                            SmartRedis::MetaDataType::uint32);
    dataset.add_meta_scalar("meta_field_1", &meta_scalar_2,
                            SmartRedis::MetaDataType::uint32);
    dataset.add_meta_scalar("meta_field_2", &meta_scalar_3,
                            SmartRedis::MetaDataType::int64);


    // Put the DataSet in the database
    client.put_dataset(dataset);

    // Retrieve the DataSet from the database
    SmartRedis::DataSet retrieved_dataset =
        client.get_dataset("example_dataset");

    // Retrieve one of the tensors
    std::vector<int64_t> unpack_dataset_tensor(n_values, 0);
    retrieved_dataset.unpack_tensor("tensor_2",
                                    unpack_dataset_tensor.data(),
                                    {n_values},
                                    SmartRedis::TensorType::int64,
                                    SmartRedis::MemoryLayout::contiguous);

    // Print out the retrieved values
    std::cout<<"Comparing sent and received "\
               "values for tensor_2: "<<std::endl;

    for(size_t i=0; i<n_values; i++)
        std::cout<<"Sent: "<<tensor_2[i]<<" "
                 <<"Received: "
                 <<unpack_dataset_tensor[i]<<std::endl;

    //Retrieve a metadata field
    size_t get_n_meta_values;
    void* get_meta_values;
    SmartRedis::MetaDataType get_type;
    dataset.get_meta_scalars("meta_field_1",
                             get_meta_values,
                             get_n_meta_values,
                             get_type);

    // Print out the metadata field values
    for(size_t i=0; i<get_n_meta_values; i++)
        std::cout<<"meta_field_1 value "<<i<<" = "
                 <<((uint32_t*)get_meta_values)[i]<<std::endl;

    return 0;
}

Models

The following example shows how to store, and use a DL model in the database with the C++ Client. The model is stored a file in the ../../../common/mnist_data/ path relative to the compiled executable. Note that this example also sets and executes a preprocessing script.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
#include "client.h"
#include <vector>
#include <fstream>

int main(int argc, char* argv[]) {

    // Initialize a vector that will hold input image tensor
    size_t n_values = 1*1*28*28;
    std::vector<float> img(n_values, 0);

    // Load the mnist image from a file
    std::string image_file = "../../../common/mnist_data/one.raw";
    std::ifstream fin(image_file, std::ios::binary);
    std::ostringstream ostream;
    ostream << fin.rdbuf();
    fin.close();

    const std::string tmp = ostream.str();
    std::memcpy(img.data(), tmp.data(), img.size()*sizeof(float));

    // Initialize a SmartRedis client to connect to the Redis database
    SmartRedis::Client client(false);

    // Use the client to set a model in the database from a file
    std::string model_key = "mnist_model";
    std::string model_file = "../../../common/mnist_data/mnist_cnn.pt";
    client.set_model_from_file(model_key, model_file, "TORCH", "CPU", 20);

    // Use the client to set a script from the database form a file
    std::string script_key = "mnist_script";
    std::string script_file = "../../../common/mnist_data/data_processing_script.txt";
    client.set_script_from_file(script_key, "CPU", script_file);

    // Declare keys that we will use in forthcoming client commands
    std::string in_key = "mnist_input";
    std::string script_out_key = "mnist_processed_input";
    std::string out_key = "mnist_output";

    // Put the tensor into the database that was loaded from file
    client.put_tensor(in_key, img.data(), {1,1,28,28},
                        SmartRedis::TensorType::flt,
                        SmartRedis::MemoryLayout::contiguous);

    // Run the preprocessing script on the input tensor
    client.run_script("mnist_script", "pre_process", {in_key}, {script_out_key});

    // Run the model using the output of the preprocessing script
    client.run_model("mnist_model", {script_out_key}, {out_key});

    // Retrieve the output of the model
    std::vector<float> result(10, 0);
    client.unpack_tensor(out_key, result.data(), {10},
                        SmartRedis::TensorType::flt,
                        SmartRedis::MemoryLayout::contiguous);

    // Print out the results of the model evaluation
    for(size_t i=0; i<result.size(); i++) {
        std::cout<<"Result["<<i<<"] = "<<result[i]<<std::endl;
    }

    return 0;
}

Scripts

The example in Models shows how to store, and use a PyTorch script in the database with the C++ Client. The script is stored a file in the ../../../common/mnist_data/ path relative to the compiled executable. Note that this example also sets and executes a PyTorch model.

Parallel (MPI) execution

In this example, the example shown in Models and Scripts is adapted to run in parallel using MPI. This example has the same functionality, however, it shows how keys can be prefixed to prevent key collisions across MPI ranks. Note that only one model and script are set, which is shared across all ranks.

For completeness, the pre-processing script source code is also shown.

C++ program

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#include "client.h"
#include <mpi.h>

void run_mnist(const std::string& model_name,
               const std::string& script_name)
{
    // Get the MPI rank
    int rank;
    MPI_Comm_rank(MPI_COMM_WORLD, &rank);

    // Initialize a vector that will hold input image tensor
    size_t n_values = 1*1*28*28;
    std::vector<float> img(n_values, 0);

    // Load the mnist image from a file using MPI rank 0
    if(rank==0) {
        std::string image_file = "../../../common/mnist_data/one.raw";
        std::ifstream fin(image_file, std::ios::binary);
        std::ostringstream ostream;
        ostream << fin.rdbuf();
        fin.close();

        const std::string tmp = ostream.str();
        std::memcpy(img.data(), tmp.data(), img.size()*sizeof(float));
    }

    // Broadcast the image to all MPI ranks.  This is more efficient
    // thank all ranks loading the same file.  This is specific
    // to this example.
    MPI_Bcast(img.data(), 28*28, MPI_FLOAT, 0, MPI_COMM_WORLD);
    MPI_Barrier(MPI_COMM_WORLD);

    if(rank==0)
        std::cout<<"All ranks have MNIST image"<<std::endl;

    // Declare keys that we will use in forthcoming client commands
    std::string in_key = "mnist_input_rank_" + std::to_string(rank);
    std::string script_out_key = "mnist_processed_input_rank_" +
                                 std::to_string(rank);
    std::string out_key = "mnist_output_rank_" + std::to_string(rank);

    // Initialize a Client object
    SmartRedis::Client client(false);

    // Put the image tensor on the database
    client.put_tensor(in_key, img.data(), {1,1,28,28},
                      SmartRedis::TensorType::flt,
                      SmartRedis::MemoryLayout::contiguous);

    // Run the preprocessing script
    client.run_script(script_name, "pre_process",
                      {in_key}, {script_out_key});

    // Run the model
    client.run_model(model_name, {script_out_key}, {out_key});

    // Get the result of the model
    std::vector<float> result(1*10);
    client.unpack_tensor(out_key, result.data(), {10},
                         SmartRedis::TensorType::flt,
                         SmartRedis::MemoryLayout::contiguous);

    // Print out the results of the model for Rank 0
    if(rank==0)
        for(size_t i=0; i<result.size(); i++)
            std::cout<<"Rank 0: Result["<<i<<"] = "<<result[i]<<std::endl;

    return;
}

int main(int argc, char* argv[]) {

    // Initialize the MPI comm world
    MPI_Init(&argc, &argv);

    // Retrieve the MPI rank
    int rank;
    MPI_Comm_rank(MPI_COMM_WORLD, &rank);

    // Set the model and script that will be used by all ranks
    // from MPI rank 0.
    if(rank==0) {
        SmartRedis::Client client(false);

        // Build model key, file name, and then set model
        // from file using client API
        std::string model_key = "mnist_model";
        std::string model_file = "../../../"\
                                 "common/mnist_data/mnist_cnn.pt";
        client.set_model_from_file(model_key, model_file,
                                "TORCH", "CPU", 20);

        // Build script key, file name, and then set script
        // from file using client API
        std::string script_key = "mnist_script";
        std::string script_file = "../../../common/mnist_data/"
                                "data_processing_script.txt";
        client.set_script_from_file(script_key, "CPU", script_file);

        // Get model and script to illustrate client API
        // functionality, but this is not necessary for this example.
        std::string_view model = client.get_model(model_key);
        std::string_view script = client.get_script(script_key);
    }

    // Run the MNIST model
    MPI_Barrier(MPI_COMM_WORLD);
    run_mnist("mnist_model", "mnist_script");

    if(rank==0)
        std::cout<<"Finished SmartRedis MNIST example."<<std::endl;

    // Finalize MPI Comm World
    MPI_Finalize();

    return 0;
}

Python Pre-Processing

1
2
3
4
5
def pre_process(inp):
    mean = torch.zeros(1).float().to(inp.device)
    mean[0] = 2.0
    temp = inp.float() * mean
    return temp

2.3 Fortran Client

In this section, examples are presented using the SmartRedis Fortran API to interact with the RedisAI tensor, model, and script data types. Additionally, an example of utilizing the SmartRedis DataSet API is also provided.

Note

The Fortran API examples rely on the SSDB environment variable being set to the address and port of the Redis database.

Note

The Fortran API examples are written to connect to a non-cluster Redis database. Update the Client constructor call to connect to a Redis cluster.

Tensors

The SmartRedis Fortran client is used to communicate between a Fortran client and the Redis database. In this example, the client will be used to send an array to the database and then unpack the data into another Fortran array.

This example will go step-by-step through the program and then present the entirety of the example code at the end.

Importing and declaring the SmartRedis client

The SmartRedis client must be declared as the derived type client_type imported from the smartredis_client module.

program example
  use smartredis_client, only : client_type

  type(client_type) :: client
end program example

Initializing the SmartRedis client

The SmartRedis client needs to be initialized before it can be used to interact with the database. Within Fortran this is done by calling the type-bound procedure initialize with the input argument .true. if using a clustered database or .false. otherwise.

program example
  use smartredis_client, only : client_type

  type(client_type) :: client

  call client%initialize(.false.) ! Change .true. to false if using a clustered database
end program example

Putting a Fortran array into the database

After the SmartRedis client has been initialized, a Fortran array of any dimension and shape and with a type of either 8, 16, 32, 64 bit integer or 32 or 64-bit real can be put into the database using the type-bound procedure put_tensor. In this example, as a proxy for model-generated data, the array send_array_real_64 will be filled with random numbers and stored in the database using put_tensor. This subroutine requires the user to specify a string used as the ‘key’ (here: send_array) identifying the tensor in the database, the array to be stored, and the shape of the array.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
program main

  use iso_c_binding
  use smartredis_client, only : client_type

  implicit none

  integer, parameter :: dim1 = 10
  integer, parameter :: dim2 = 20
  integer, parameter :: dim3 = 30

  real(kind=c_double),    dimension(dim1, dim2, dim3) :: send_array_real_64

  integer :: i, j, k
  type(client_type) :: client

  integer :: err_code, pe_id

  call random_number(send_array_real_64)

  call client%initialize(.false.) ! Change .true. to false if using a clustered database

  call client%put_tensor("send_array", send_array_real_64, shape(send_array_real_64))

end program main

Unpacking an array stored in the database

‘Unpacking’ an array in SmartRedis refers to filling a Fortran array with the values of a tensor stored in the database. The dimensions and type of data of the incoming array and the pre-declared array are checked within the client to ensure that they match. Unpacking requires declaring an array and using the unpack_tensor procedure. This example generates an array of random numbers, puts that into the database, and retrieves the values from the database into a different array.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
program main

  use iso_c_binding
  use smartredis_client, only : client_type

  implicit none

  integer, parameter :: dim1 = 10
  integer, parameter :: dim2 = 20
  integer, parameter :: dim3 = 30

  real(kind=8),    dimension(dim1, dim2, dim3) :: recv_array_real_64
  real(kind=c_double),    dimension(dim1, dim2, dim3) :: send_array_real_64

  integer :: i, j, k
  type(client_type) :: client

  integer :: err_code, pe_id

  call random_number(send_array_real_64)

  call client%initialize(.false.) ! Change .true. to false if using a clustered database

  call client%put_tensor("send_array", send_array_real_64, shape(send_array_real_64))
  call client%unpack_tensor("send_array", recv_array_real_64, shape(recv_array_real_64))

end program main

Datasets

The following code snippet shows how to use the Fortran Client to store and retrieve dataset tensors and dataset metadata scalars.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
program main

  use iso_c_binding
  use smartredis_dataset, only : dataset_type

  implicit none

  integer, parameter :: dim1 = 10
  integer, parameter :: dim2 = 20
  integer, parameter :: dim3 = 30

  real(kind=c_float),      dimension(dim1, dim2, dim3) :: recv_array_real_32
 
  real(kind=c_float),      dimension(dim1, dim2, dim3) :: true_array_real_32

  character(len=16) :: meta_flt = 'meta_flt'

  real(kind=c_float),  dimension(dim1) :: meta_flt_vec
  real(kind=c_float), dimension(:), pointer :: meta_flt_recv

  integer :: i, j, k
  type(dataset_type) :: dataset

  integer :: err_code

  ! Fill array
  call random_number(true_array_real_32)

  call dataset%initialize("example_fortran_dataset")

  call dataset%add_tensor("true_array_real_32", true_array_real_32, shape(true_array_real_32))
  call dataset%unpack_dataset_tensor("true_array_real_32", recv_array_real_32, shape(recv_array_real_32))

  call random_number(meta_flt_vec)

  do i=1,dim1
    call dataset%add_meta_scalar(meta_flt, meta_flt_vec(i))
  enddo

  call dataset%get_meta_scalars(meta_flt, meta_flt_recv)

end program main

Models

For an example of placing a model in the database and executing the model using a stored tensor, see the Parallel (MPI) execution example. The aforementioned example is customized to show how key collisions can be avoided in parallel applications, but the Client API calls pertaining to model actions are identical to non-parallel applications.

Scripts

For an example of placing a PyTorch script in the database and executing the script using a stored tensor, see the Parallel (MPI) execution example. The aforementioned example is customized to show how key collisions can be avoided in parallel applications, but the Client API calls pertaining to script actions are identical to non-parallel applications.

Parallel (MPI) execution

In this example, an MPI program that sets a model, sets a script, executes a script, executes a model, sends a tensor, and receives a tensor is shown. This example illustrates how keys can be prefixed to prevent key collisions across MPI ranks. Note that only one model and script are set, which is shared across all ranks. It is important to note that the Client API calls made in this program are equally applicable to non-MPI programs.

This example will go step-by-step through the program and then present the entirety of the example code at the end.

The MNIST dataset and model typically take images of digits and quantifies how likely that number is to be 0, 1, 2, etc.. For simplicity here, this example instead generates random numbers to represent an image.

Initialization

At the top of the program, the SmartRedis Fortran client (which is coded as a Fortran module) is imported using

use smartredis_client, only : client_type

where client_type is a Fortran derived-type containing the methods used to communicate with the RedisAI database. A particular instance is declared via

type(client_type) :: client

An initializer routine, implemented as a type-bound procedure, must be called before any of the other methods are used:

call client%initialize(.true.)

The only optional argument to the initialize routine is to determine whether the RedisAI database is clustered (i.e. spread over a number of nodes, .true.) or exists as a single instance.

If an individual rank is expected to send only its local data, a separate client must be initialized on every MPI task Furthermore, to avoid the collision of key names when running on multiple MPI tasks, we store the rank of the MPI process which will be used as the suffix for all keys in this example.

On the root MPI task, two additional client methods (set_model_from_file and set_script_from_file) are called. set_model_from_file loads a saved PyTorch model and stores it in the database using the key mnist_model. Similarly, set_script_from_file loads a script that can be used to process data on the database cluster.

if (pe_id == 0) then
  call client%set_model_from_file(model_key, model_file, "TORCH", "CPU")
  call client%set_script_from_file(script_key, "CPU", script_file)
endif

This only needs to be done on the root MPI task because this example assumes that every rank is using the same model. If the model is intended to be rank-specific, a unique identifier (like the MPI rank) must be used.

At this point the initialization of the program is complete: each rank has its own SmartRedis client, initialized a PyTorch model has been loaded and stored into the database with its own identifying key, and a preprocessing script has also been loaded and stored in the database

Performing inference on Fortran data

The run_mnist subroutine coordinates the inference cycle of generating data (i.e. the synthetic MNIST image) from the application and then the use of the client to run a preprocessing script on data within the database and to perform an inference from the AI model. The local variables are declared at the top of the subroutine and are instructive to communicate the expected shape of the inputs to the various client methods.

integer, parameter :: mnist_dim1 = 28
integer, parameter :: mnist_dim2 = 28
integer, parameter :: result_dim1 = 10

The first two integers mnist_dim1 and mnist_dim2 specify the shape of the input data. In the case of the MNIST dataset, it expects a 4D tensor describing a ‘picture’ of a number with dimensions [1,1,28,28] representing a batch size (of one) and a three dimensional array. result_dim1 specifies what the size of the resulting inference will be. In this case, it is a vector of length 10, where each element represents the probability that the data represents a number from 0-9.

The next declaration declare the strings that will be used to define objects representing inputs/outputs from the scripts and inference models.

character(len=255) :: in_key
character(len=255) :: script_out_key
character(len=255) :: out_key

Note that these are standard Fortran strings. However, because the model and scripts may require the use of multiple inputs/outputs, these will need to be converted into a vector of strings.

character(len=255), dimension(1) :: inputs
character(len=255), dimension(1) :: outputs

In this case, only one input and output are expected the vector of strings only need to be one element long. In the case of multiple inputs/outputs, change the dimension attribute of the inputs and outputs accordingly, e.g. for two inputs this code would be character(len=255), dimension(2) :: inputs.

Next, the input and output keys for the model and script are now constructed

in_key = "mnist_input_rank"//trim(key_suffix)
script_out_key = "mnist_processed_input_rank"//trim(key_suffix)
out_key = "mnist_processed_input_rank"//trim(key_suffix)

As mentioned previously, unique identifying keys are constructed by including a suffix based on MPI tasks.

The subroutine, in place of an actual simulation, next generates an array of random numbers and puts this array into the Redis database.

call random_number(array)
call client%put_tensor(in_key, array, shape(array))

The Redis database can now be called to run preprocessing scripts on these data.

inputs(1) = in_key
outputs(1) = script_out_key
call client%run_script(script_name, "pre_process", inputs, outputs)

The call to client%run_script specifies the key used to identify the script loaded during initialization, pre_process is the name of the function to run that is defined in that script, and the inputs/outputs are the vector of keys described previously. In this case, the call to run_script will trigger the RedisAI database to execute pre_process on the generated data (stored using the key mnist_input_rank_XX where XX represents the MPI rank) and storing the result of pre_process in the database as mnist_processed_input_rank_XX. One key aspect to emphasize, is that the calculations are done within the database, not on the application side and the results are not immediately available to the application. The retrieval of data from the database is demonstrated next.

The data have been processed and now we can run the inference model. The setup of the inputs/outputs is the same as before, with the exception that the input to the inference model, is stored using the key mnist_processed_input_rank_XX and the output will stored using the same key.

inputs(1) = script_out_key
outputs(1) = out_key
call client%run_model(model_name, inputs, outputs)

As before the results of running the inference are stored within the database and are not available to the application immediately. However, we can ‘retrieve’ the tensor from the database by using the unpack_tensor method.

call client%unpack_tensor(out_key, result, shape(result))

The result array now contains the outcome of the inference. It is a 10-element array representing the likelihood that the ‘image’ (generated using the random numbers) is one of the numbers [0-9].

Key points

The script, models, and data used here represent the coordination of different software stacks (PyTorch, RedisAI, and Fortran) however the application code is all written in standard Fortran. Any operations that need to be done to communicate with the database and exchange data are opaque to the application.

Source Code

Fortran program:

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
program mnist_example

  use mpi
  use smartredis_client, only : client_type

  implicit none

  character(len=*), parameter :: model_key = "mnist_model"
  character(len=*), parameter :: model_file = "../../../common/mnist_data/mnist_cnn.pt"
  character(len=*), parameter :: script_key = "mnist_script"
  character(len=*), parameter :: script_file = "../../../common/mnist_data/data_processing_script.txt"

  type(client_type) :: client
  integer :: err_code, pe_id
  character(len=2) :: key_suffix

  ! Initialize MPI and get the rank of the processor
  call MPI_init(err_code)
  call MPI_comm_rank( MPI_COMM_WORLD, pe_id, err_code)

  ! Format the suffix for a key as a zero-padded version of the rank
  write(key_suffix, "(A,I1.1)") "_",pe_id
  call client%initialize(.true.)


  if (pe_id == 0) then
    call client%set_model_from_file(model_key, model_file, "TORCH", "CPU")
    call client%set_script_from_file(script_key, "CPU", script_file)
  endif

  call MPI_barrier(MPI_COMM_WORLD, err_code)

  call run_mnist(client, key_suffix, model_key, script_key)

  call MPI_finalize(err_code)

  if (pe_id == 0) then
    print *, "SmartRedis Fortran MPI MNIST example finished without errors."
  endif

contains

subroutine run_mnist( client, key_suffix, model_name, script_name )
  type(client_type), intent(in) :: client
  character(len=*),  intent(in) :: key_suffix
  character(len=*),  intent(in) :: model_name
  character(len=*),  intent(in) :: script_name

  integer, parameter :: mnist_dim1 = 28
  integer, parameter :: mnist_dim2 = 28
  integer, parameter :: result_dim1 = 10

  real, dimension(1,1,mnist_dim1,mnist_dim2) :: array
  real, dimension(1,result_dim1) :: result

  character(len=255) :: in_key
  character(len=255) :: script_out_key
  character(len=255) :: out_key

  character(len=255), dimension(1) :: inputs
  character(len=255), dimension(1) :: outputs

  ! Construct the keys used for the specifiying inputs and outputs
  in_key = "mnist_input_rank"//trim(key_suffix)
  script_out_key = "mnist_processed_input_rank"//trim(key_suffix)
  out_key = "mnist_processed_input_rank"//trim(key_suffix)

  ! Generate some fake data for inference
  call random_number(array)
  call client%put_tensor(in_key, array, shape(array))

  ! Prepare the script inputs and outputs
  inputs(1) = in_key
  outputs(1) = script_out_key
  call client%run_script(script_name, "pre_process", inputs, outputs)
  inputs(1) = script_out_key
  outputs(1) = out_key
  call client%run_model(model_name, inputs, outputs)
  result(:,:) = 0.
  call client%unpack_tensor(out_key, result, shape(result))

end subroutine run_mnist

end program mnist_example

Python Pre-Processing:

1
2
3
4
5
def pre_process(inp):
    mean = torch.zeros(1).float().to(inp.device)
    mean[0] = 2.0
    temp = inp.float() * mean
    return temp