2022-10-08 04:30:12 -06:00
|
|
|
syntax = "proto3";
|
|
|
|
|
|
|
|
package generate.v1;
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
service TextGenerationService {
|
2022-10-08 04:30:12 -06:00
|
|
|
/// Service discovery
|
2022-10-11 08:50:54 -06:00
|
|
|
rpc ServiceDiscovery (ServiceDiscoveryRequest) returns (ServiceDiscoveryResponse) {}
|
2022-10-08 04:30:12 -06:00
|
|
|
/// Empties batch cache
|
2022-10-11 08:50:54 -06:00
|
|
|
rpc ClearCache (ClearCacheRequest) returns (ClearCacheResponse);
|
|
|
|
/// Generate tokens for a batch
|
|
|
|
rpc Generate (GenerateRequest) returns (GenerateResponse);
|
|
|
|
/// Generate tokens for a list of cached batches
|
|
|
|
rpc GenerateWithCache (GenerateWithCacheRequest) returns (GenerateWithCacheResponse);
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
/// Empty request
|
|
|
|
message ServiceDiscoveryRequest {}
|
|
|
|
|
2022-10-08 04:30:12 -06:00
|
|
|
message ServiceDiscoveryResponse {
|
2022-10-11 08:50:54 -06:00
|
|
|
/// Other shards urls
|
2022-10-08 04:30:12 -06:00
|
|
|
repeated string urls = 1;
|
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
/// Empty request
|
|
|
|
message ClearCacheRequest {}
|
|
|
|
|
|
|
|
/// Empty response
|
|
|
|
message ClearCacheResponse {}
|
|
|
|
|
2022-10-08 04:30:12 -06:00
|
|
|
message LogitsWarperParameters {
|
|
|
|
float temperature = 1;
|
|
|
|
uint32 top_k = 2;
|
|
|
|
float top_p = 3;
|
|
|
|
bool do_sample = 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
message Request {
|
|
|
|
/// Request ID
|
|
|
|
uint64 id = 1;
|
|
|
|
/// The generation context
|
|
|
|
string inputs = 2;
|
2022-10-11 08:50:54 -06:00
|
|
|
/// The number of tokens inside inputs
|
|
|
|
uint32 input_length = 3;
|
2022-10-08 04:30:12 -06:00
|
|
|
/// Logits Warper Parameters
|
2022-10-11 08:50:54 -06:00
|
|
|
LogitsWarperParameters parameters = 4;
|
2022-10-08 04:30:12 -06:00
|
|
|
/// Stopping criteria
|
2022-10-11 08:50:54 -06:00
|
|
|
uint32 max_new_tokens = 5;
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
message Batch {
|
|
|
|
/// Batch ID
|
|
|
|
uint64 id = 1;
|
|
|
|
/// Individual requests
|
|
|
|
repeated Request requests = 2;
|
2022-10-11 08:50:54 -06:00
|
|
|
/// Batch size (==len(requests))
|
|
|
|
uint32 size = 3;
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
message GeneratedText {
|
|
|
|
/// Request
|
|
|
|
Request request = 1;
|
2022-10-08 04:30:12 -06:00
|
|
|
/// Output
|
|
|
|
string output = 2;
|
2022-11-04 07:22:47 -06:00
|
|
|
/// Number of generated tokens
|
|
|
|
uint32 tokens = 3;
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
message GenerateRequest {
|
|
|
|
/// Batch
|
|
|
|
Batch batch = 1;
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
message GenerateResponse {
|
|
|
|
/// Finished requests
|
|
|
|
repeated GeneratedText generated_texts = 1;
|
|
|
|
/// Next batch (cached)
|
|
|
|
optional Batch batch = 2;
|
2022-10-08 04:30:12 -06:00
|
|
|
}
|
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
message GenerateWithCacheRequest {
|
|
|
|
/// Cached batches
|
|
|
|
repeated Batch batches = 1;
|
|
|
|
}
|
2022-10-08 04:30:12 -06:00
|
|
|
|
2022-10-11 08:50:54 -06:00
|
|
|
message GenerateWithCacheResponse {
|
|
|
|
/// Finished requests
|
|
|
|
repeated GeneratedText generated_texts = 1;
|
|
|
|
/// Next batch (cached)
|
|
|
|
optional Batch batch = 2;
|
|
|
|
}
|