Лавка с огурцами

GPT-4o Mini / 3.5 Turbo: no wait / GPT-4: no wait / GPT-4 32k: no wait / GPT-4 Turbo: no wait / GPT-4o: no wait / Claude (Sonnet): no wait / Claude (Opus): no wait / AWS Claude (Sonnet): no wait / OpenAI o1: no wait


Service Info

{
  "uptime": 482786,
  "endpoints": {
    "openai": "https://cucumber-shop.ru/proxy/openai",
    "anthropic": "https://cucumber-shop.ru/proxy/anthropic",
    "aws-claude": "https://cucumber-shop.ru/proxy/aws/claude",
    "aws-mistral": "https://cucumber-shop.ru/proxy/aws/mistral"
  },
  "proompts": 3237,
  "tookens": "110.31m",
  "proomptersNow": 1,
  "openaiKeys": 76,
  "openaiOrgs": 1,
  "anthropicKeys": 2,
  "awsKeys": 1,
  "turbo": {
    "usage": "0 tokens",
    "activeKeys": 8,
    "revokedKeys": 44,
    "overQuotaKeys": 24,
    "trialKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-turbo": {
    "usage": "0 tokens",
    "activeKeys": 8,
    "overQuotaKeys": 23,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4": {
    "usage": "0 tokens",
    "activeKeys": 8,
    "overQuotaKeys": 23,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4o": {
    "usage": "110.31m tokens",
    "activeKeys": 8,
    "overQuotaKeys": 24,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o1": {
    "usage": "0 tokens",
    "activeKeys": 8,
    "overQuotaKeys": 24,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-32k": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "claude": {
    "usage": "0 tokens",
    "activeKeys": 0,
    "revokedKeys": 2,
    "overQuotaKeys": 0,
    "trialKeys": 0,
    "prefilledKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "claude-opus": {
    "usage": "0 tokens",
    "activeKeys": 0,
    "revokedKeys": 2,
    "overQuotaKeys": 0,
    "trialKeys": 0,
    "prefilledKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "aws-claude": {
    "usage": "0 tokens",
    "activeKeys": 0,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "config": {
    "gatekeeper": "proxy_key",
    "maxIpsAutoBan": "false",
    "captchaMode": "none",
    "powTokenHours": "24",
    "powTokenMaxIps": "2",
    "powDifficultyLevel": "low",
    "powChallengeTimeout": "30",
    "textModelRateLimit": "4",
    "imageModelRateLimit": "4",
    "maxContextTokensOpenAI": "200000",
    "maxContextTokensAnthropic": "100000",
    "maxOutputTokensOpenAI": "4000",
    "maxOutputTokensAnthropic": "2000",
    "allowAwsLogging": "false",
    "promptLogging": "false",
    "tokenQuota": {
      "turbo": "0",
      "gpt4": "0",
      "gpt4-32k": "0",
      "gpt4-turbo": "0",
      "gpt4o": "0",
      "o1": "0",
      "o1-mini": "0",
      "dall-e": "0",
      "claude": "0",
      "claude-opus": "0",
      "gemini-flash": "0",
      "gemini-pro": "0",
      "gemini-ultra": "0",
      "mistral-tiny": "0",
      "mistral-small": "0",
      "mistral-medium": "0",
      "mistral-large": "0",
      "aws-claude": "0",
      "aws-claude-opus": "0",
      "aws-mistral-tiny": "0",
      "aws-mistral-small": "0",
      "aws-mistral-medium": "0",
      "aws-mistral-large": "0",
      "gcp-claude": "0",
      "gcp-claude-opus": "0",
      "azure-turbo": "0",
      "azure-gpt4": "0",
      "azure-gpt4-32k": "0",
      "azure-gpt4-turbo": "0",
      "azure-gpt4o": "0",
      "azure-dall-e": "0",
      "azure-o1": "0",
      "azure-o1-mini": "0"
    },
    "allowOpenAIToolUsage": "false",
    "allowedVisionServices": "openai,anthropic,aws",
    "tokensPunishmentFactor": "0"
  },
  "build": "[ci] 36e2430 (main@khanon/oai-reverse-proxy)"
}