Skip to content

Commit

Permalink
feat: tokenization
Browse files Browse the repository at this point in the history
Signed-off-by: shraddhazpy <shraddha@shraddhafive.in>
  • Loading branch information
shraddhazpy committed Jan 31, 2025
1 parent 60ec2cf commit 4db4eb7
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 0 deletions.
12 changes: 12 additions & 0 deletions backend/cpp/llama/grpc-server.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2522,6 +2522,18 @@ class BackendServiceImpl final : public backend::Backend::Service {
return grpc::Status::OK;
}

grpc::Status TokenizeString(ServerContext* context, const backend::PredictOptions* request, backend::TokenizationResponse* response){
json data = parse_options(false, request, llama);

std::vector<llama_token> tokens = llama.tokenize(data["prompt"],false);

for (int i=0 ; i< tokens.size(); i++){
response->add_tokens(tokens[i]);
}

return grpc::Status::OK;
}

grpc::Status GetMetrics(ServerContext* context, const backend::MetricsRequest* request, backend::MetricsResponse* response) {
llama_client_slot* active_slot = llama.get_active_slot();

Expand Down
1 change: 1 addition & 0 deletions core/http/endpoints/localai/tokenize.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (

// TokenizeEndpoint exposes a REST API to tokenize the content
// @Summary Tokenize the input.
// @Param request body schema.TokenizeRequest true "Request"
// @Success 200 {object} schema.TokenizeResponse "Response"
// @Router /v1/tokenize [post]
func TokenizeEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
Expand Down

0 comments on commit 4db4eb7

Please sign in to comment.