Skip to content

add additional llm cost #963

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ public class TokenStatsModel
public string Model { get; set; }
public string Prompt { get; set; }
public int PromptCount { get; set; }
public int CachedPromptCount { get; set; }
public int CompletionCount { get; set; }
public AgentLlmConfig LlmConfig { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,22 @@ public class LlmModelSetting
/// </summary>
public int Dimension { get; set; }

public LlmCost AdditionalCost { get; set; } = new();

public override string ToString()
{
return $"[{Type}] {Name} {Endpoint}";
}
}

public class LlmCost
{
public float CachedPromptCost { get; set; } = 0f;
public float AudioPromptCost { get; set; } = 0f;
public float ReasoningCompletionCost { get; } = 0f;
public float AudioCompletionCost { get; } = 0f;
}

public enum LlmModelType
{
Text = 1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,11 @@ public void AddToken(TokenStatsModel stats, RoleDialogModel message)
var settingsService = _services.GetRequiredService<ILlmProviderService>();
var settings = settingsService.GetSetting(stats.Provider, _model);

var deltaPromptCost = stats.PromptCount / 1000f * settings.PromptCost;
var deltaPromptCost = (stats.PromptCount - stats.CachedPromptCount) / 1000f * settings.PromptCost;
var deltaCachedPromptCost = stats.CachedPromptCount / 1000f * (settings.AdditionalCost?.CachedPromptCost ?? 0f);
var deltaCompletionCost = stats.CompletionCount / 1000f * settings.CompletionCost;
var deltaTotal = deltaPromptCost + deltaCompletionCost;

var deltaTotal = deltaPromptCost + deltaCachedPromptCost + deltaCompletionCost;
_promptCost += deltaPromptCost;
_completionCost += deltaCompletionCost;

Expand All @@ -53,6 +55,8 @@ public void AddToken(TokenStatsModel stats, RoleDialogModel message)
stat.SetState("prompt_total", stats.PromptCount + inputCount, isNeedVersion: false, source: StateSource.Application);
var outputCount = int.Parse(stat.GetState("completion_total", "0"));
stat.SetState("completion_total", stats.CompletionCount + outputCount, isNeedVersion: false, source: StateSource.Application);
var cachedCount = int.Parse(stat.GetState("cached_prompt_total", "0"));
stat.SetState("cached_prompt_total", stats.CachedPromptCount + cachedCount, isNeedVersion: false, source: StateSource.Application);

// Total cost
var total_cost = float.Parse(stat.GetState("llm_total_cost", "0"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
Provider = Provider,
Model = _model,
PromptCount = response.Value?.Usage?.InputTokenCount ?? 0,
CachedPromptCount = response.Value?.Usage?.InputTokenDetails?.CachedTokenCount ?? 0,
CompletionCount = response.Value?.Usage?.OutputTokenCount ?? 0
});
}
Expand Down
Loading