Pomidor

Moonshot: no wait / Qwen: no wait / GLM: no wait / Cohere: no wait / Grok: no wait / Deepseek: no wait / GPT-4o Mini / 3.5 Turbo: no wait / GPT-4: no wait / GPT-4 Turbo: no wait / GPT-4o: no wait / GPT-4.1: no wait / GPT-4.1 Mini: no wait / GPT-4.1 Nano: no wait / GPT-5: no wait / GPT-5 Mini: no wait / GPT-5 Nano: no wait / GPT-5 Chat Latest: no wait / OpenAI o1: no wait / OpenAI o1 mini: no wait / OpenAI o3 mini: no wait / OpenAI o3: no wait / OpenAI o4 mini: no wait / OpenAI Codex Mini: no wait / DALL-E: no wait / GPT Image: no wait / Gemini Flash: no wait / Gemini Pro: 2sec / Mistral 7B: no wait / Mistral Nemo: no wait / Mistral Medium: no wait / Mistral Large: no wait

05.11.2025
Добавил новый коммит Реанона: Ключи gemini, которые с ошибкой о сливе, помечаются как revoked.

08.11.2025
Добавил побольше ключей от Gemini.


Service Info

{
  "uptime": 178572,
  "endpoints": {
    "openai": "https://oai-proxy-rw1j.onrender.com/proxy/openai",
    "openai-image": "https://oai-proxy-rw1j.onrender.com/proxy/openai-image",
    "google-ai": "https://oai-proxy-rw1j.onrender.com/proxy/google-ai",
    "mistral-ai": "https://oai-proxy-rw1j.onrender.com/proxy/mistral-ai",
    "deepseek": "https://oai-proxy-rw1j.onrender.com/proxy/deepseek",
    "xai": "https://oai-proxy-rw1j.onrender.com/proxy/xai",
    "cohere": "https://oai-proxy-rw1j.onrender.com/proxy/cohere",
    "qwen": "https://oai-proxy-rw1j.onrender.com/proxy/qwen",
    "glm": "https://oai-proxy-rw1j.onrender.com/proxy/glm",
    "moonshot": "https://oai-proxy-rw1j.onrender.com/proxy/moonshot"
  },
  "proompts": 23681,
  "tookens": "1.111b",
  "proomptersNow": 3,
  "openaiKeys": 8,
  "openaiOrgs": 8,
  "google-aiKeys": 803,
  "mistral-aiKeys": 31,
  "deepseekKeys": 114,
  "xaiKeys": 1,
  "cohereKeys": 13,
  "qwenKeys": 99,
  "glmKeys": 18,
  "moonshotKeys": 25,
  "turbo": {
    "usage": "392 (In: 364, Out: 28)",
    "activeKeys": 2,
    "revokedKeys": 2,
    "Requests": 16,
    "overQuotaKeys": 4,
    "trialKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 1,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-turbo": {
    "usage": "220.0k (In: 211.3k, Out: 8.7k)",
    "activeKeys": 2,
    "Requests": 12,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4o": {
    "usage": "9.38m (In: 9.27m, Out: 111.0k)",
    "activeKeys": 2,
    "Requests": 288,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt41": {
    "usage": "7.97m (In: 7.84m, Out: 123.2k)",
    "activeKeys": 2,
    "Requests": 152,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt41-mini": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt41-nano": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt5": {
    "usage": "519.4k (In: 446.6k, Out: 72.7k)",
    "activeKeys": 2,
    "Requests": 52,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt5-mini": {
    "usage": "31.9k (In: 13.9k, Out: 18.0k)",
    "activeKeys": 2,
    "Requests": 12,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt5-nano": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt5-chat-latest": {
    "usage": "15.79m (In: 15.55m, Out: 245.8k)",
    "activeKeys": 2,
    "Requests": 428,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o1": {
    "usage": "13.0k (In: 4.2k, Out: 8.8k)",
    "activeKeys": 2,
    "Requests": 156,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o1-mini": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o3": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o3-mini": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o4-mini": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "codex-mini": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 3,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "dall-e": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt-image": {
    "usage": "0 tokens",
    "activeKeys": 1,
    "Requests": 0,
    "overQuotaKeys": 4,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-flash": {
    "usage": "8.65m (In: 7.26m, Out: 1.39m)",
    "activeKeys": 506,
    "revokedKeys": 181,
    "Requests": 798,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-pro": {
    "usage": "1.050b (In: 1.014b, Out: 36.32m)",
    "activeKeys": 67,
    "revokedKeys": 295,
    "Requests": 20684,
    "overQuotaKeys": 27,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "2sec"
  },
  "mistral-tiny": {
    "usage": "0 tokens",
    "activeKeys": 30,
    "revokedKeys": 1,
    "Requests": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-small": {
    "usage": "70.5k (In: 70.0k, Out: 470)",
    "activeKeys": 30,
    "revokedKeys": 1,
    "Requests": 2,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-medium": {
    "usage": "1.19m (In: 1.16m, Out: 25.1k)",
    "activeKeys": 30,
    "revokedKeys": 1,
    "Requests": 32,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-large": {
    "usage": "2.8k (In: 2.6k, Out: 273)",
    "activeKeys": 30,
    "revokedKeys": 1,
    "Requests": 1,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "deepseek": {
    "usage": "9.57m (In: 9.31m, Out: 263.3k)",
    "activeKeys": 96,
    "revokedKeys": 9,
    "Requests": 338,
    "overQuotaKeys": 9,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "xai": {
    "usage": "0 tokens",
    "activeKeys": 1,
    "revokedKeys": 0,
    "Requests": 0,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "cohere": {
    "usage": "0 tokens",
    "activeKeys": 13,
    "revokedKeys": 0,
    "Requests": 0,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "qwen": {
    "usage": "100.5k (In: 67.4k, Out: 33.1k)",
    "activeKeys": 88,
    "revokedKeys": 11,
    "Requests": 21,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "glm": {
    "usage": "3.54m (In: 3.48m, Out: 61.9k)",
    "activeKeys": 5,
    "revokedKeys": 0,
    "Requests": 150,
    "overQuotaKeys": 13,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "moonshot": {
    "usage": "4.05m (In: 3.47m, Out: 583.6k)",
    "activeKeys": 23,
    "revokedKeys": 2,
    "Requests": 538,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "config": {
    "gatekeeper": "proxy_key",
    "maxIpsAutoBan": "false",
    "captchaMode": "none",
    "powTokenHours": "1488",
    "powTokenMaxIps": "0",
    "powDifficultyLevel": "low",
    "powChallengeTimeout": "30",
    "textModelRateLimit": "12",
    "imageModelRateLimit": "6",
    "maxContextTokensOpenAI": "1488000",
    "maxContextTokensAnthropic": "1488000",
    "maxOutputTokensOpenAI": "1488000",
    "maxOutputTokensAnthropic": "1488000",
    "useRemoteTokenCounting": "true",
    "allowAwsLogging": "false",
    "promptLogging": "false",
    "tokenQuota": {
      "moonshot": "0",
      "qwen": "0",
      "glm": "0",
      "cohere": "0",
      "xai": "0",
      "deepseek": "0",
      "turbo": "0",
      "gpt4": "0",
      "gpt4-32k": "0",
      "gpt4-turbo": "0",
      "gpt4o": "0",
      "gpt45": "0",
      "gpt41": "0",
      "gpt41-mini": "0",
      "gpt41-nano": "0",
      "gpt5": "0",
      "gpt5-mini": "0",
      "gpt5-nano": "0",
      "gpt5-chat-latest": "0",
      "gpt5-pro": "0",
      "o1": "0",
      "o1-mini": "0",
      "o1-pro": "0",
      "o3-pro": "0",
      "o3-mini": "0",
      "o3": "0",
      "o4-mini": "0",
      "codex-mini": "0",
      "dall-e": "0",
      "gpt-image": "0",
      "claude": "0",
      "claude-opus": "0",
      "gemini-flash": "0",
      "gemini-pro": "0",
      "gemini-ultra": "0",
      "mistral-tiny": "0",
      "mistral-small": "0",
      "mistral-medium": "0",
      "mistral-large": "0",
      "aws-claude": "0",
      "aws-claude-opus": "0",
      "aws-mistral-tiny": "0",
      "aws-mistral-small": "0",
      "aws-mistral-medium": "0",
      "aws-mistral-large": "0",
      "gcp-claude": "0",
      "gcp-claude-opus": "0",
      "azure-turbo": "0",
      "azure-gpt4": "0",
      "azure-gpt4-32k": "0",
      "azure-gpt4-turbo": "0",
      "azure-gpt4o": "0",
      "azure-gpt45": "0",
      "azure-gpt41": "0",
      "azure-gpt41-mini": "0",
      "azure-gpt41-nano": "0",
      "azure-gpt5": "0",
      "azure-gpt5-mini": "0",
      "azure-gpt5-nano": "0",
      "azure-gpt5-chat-latest": "0",
      "azure-gpt5-pro": "0",
      "azure-dall-e": "0",
      "azure-o1": "0",
      "azure-o1-mini": "0",
      "azure-o1-pro": "0",
      "azure-o3-pro": "0",
      "azure-o3-mini": "0",
      "azure-o3": "0",
      "azure-o4-mini": "0",
      "azure-codex-mini": "0",
      "azure-gpt-image": "0",
      "openrouter-paid": "0",
      "openrouter-free": "0"
    },
    "allowOpenAIToolUsage": "false",
    "tokensPunishmentFactor": "0",
    "serviceInfoAuthMode": "token"
  }
}