GPT-4o Mini / 3.5 Turbo: no wait / Gemini Flash: no wait / Gemini Pro: no wait / Gemini Ultra: no wait / Mistral 7B: no wait / Mistral Nemo: no wait / Mistral Medium: no wait / Mistral Large: no wait / DeepSeek: no wait / Grok 3: no wait / Grok 4: no wait / Moonshot: no wait
This will be my final annoucement. This proxy will remain up until 23/10. After this date this proxy will close forever. I will no longer be refilling keys either between now and this date so if any models die before it that will be it.
I'm done with the ai community and will not be helping noone else or providing anymore content etc. I no longer find it fun or productive and tbh I'm just done with all the drama. I don't want it all I wanted was to help people. For all who found my stuff useful and helpful I say thanks. That is what all this was for. All it was ever for was to help people. It made me happy helping others.
If anyone wants to keep in touch then my dms are open on discord.
Anyway that it again thank you and see you down the road maybe.
{ "uptime": 311939, "endpoints": { "openai": "https://sir-pro3-proxy.org/proxy/openai", "google-ai": "https://sir-pro3-proxy.org/proxy/google-ai", "mistral-ai": "https://sir-pro3-proxy.org/proxy/mistral-ai", "deepseek": "https://sir-pro3-proxy.org/proxy/deepseek", "xai": "https://sir-pro3-proxy.org/proxy/xai", "moonshotai": "https://sir-pro3-proxy.org/proxy/moonshotai" }, "proompts": 3094, "tookens": "71.45m", "proomptsTotal": 141428, "proomptersNow": 0, "tookensTotal": "3.467b", "openaiKeys": {}, "openaiOrgs": 1, "google-aiKeys": 227, "mistral-aiKeys": 7, "deepseekKeys": 83, "xaiKeys": 9, "moonshotaiKeys": 22, "turbo": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 1, "overQuotaKeys": 0, "trialKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gemini-pro": { "usage": "68.24m tokens", "activeKeys": 225, "overQuotaKeys": 0, "revokedKeys": 2, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gemini-flash": { "usage": "0 tokens", "activeKeys": 225, "overQuotaKeys": 0, "revokedKeys": 1, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gemini-ultra": { "usage": "0 tokens", "activeKeys": 225, "overQuotaKeys": 0, "revokedKeys": 1, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "mistral-medium": { "usage": "0 tokens", "activeKeys": 7, "revokedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "mistral-large": { "usage": "0 tokens", "activeKeys": 7, "revokedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "mistral-small": { "usage": "0 tokens", "activeKeys": 7, "revokedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "mistral-tiny": { "usage": "0 tokens", "activeKeys": 7, "revokedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "deepseek": { "usage": "3.20m tokens", "activeKeys": 3, "revokedKeys": 0, "overQuotaKeys": 80, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "grok-3": { "usage": "0 tokens", "activeKeys": 0, "revokedKeys": 2, "overQuotaKeys": 7, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "grok-4": { "usage": "13.4k tokens", "activeKeys": 0, "revokedKeys": 0, "overQuotaKeys": 7, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "moonshot": { "usage": "0 tokens", "activeKeys": 21, "revokedKeys": 1, "overQuotaKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "config": { "gatekeeper": "user_token", "captchaMode": "none", "powTokenPrompt": "100", "powTokenMaxIps": "2", "powDifficultyLevel": "low", "powChallengeTimeout": "30", "textModelRateLimit": "4", "imageModelRateLimit": "4", "maxContextTokensOpenAI": "50000", "maxContextTokensAnthropic": "32768", "maxOutputTokensOpenAI": "4000", "maxOutputTokensAnthropic": "1024", "maxOutputTokensGoogleAI": "30000", "universalEndpoint": "false", "rejectMessage": "This content violates /aicg/'s acceptable use policy.", "hashIp": "false", "allowAwsLogging": "false", "promptLogging": "false", "tokenQuota": { "turbo": "0", "gpt4": "0", "gpt4-32k": "0", "gpt4-turbo": "0", "gpt4o": "0", "gpt45": "1", "gpt41": "0", "gpt5": "0", "gpt5-chat": "0", "chatgpt": "0", "o1": "0", "o1-mini": "0", "dall-e": "0", "claude": "0", "claude-opus": "1", "gemini-flash": "0", "gemini-pro": "0", "gemini-ultra": "0", "mistral-tiny": "0", "mistral-small": "0", "mistral-medium": "0", "mistral-large": "0", "aws-claude": "0", "aws-claude-opus": "0", "aws-mistral-tiny": "0", "aws-mistral-small": "0", "aws-mistral-medium": "0", "aws-mistral-large": "0", "gcp-claude": "0", "gcp-claude-opus": "0", "azure-turbo": "0", "azure-gpt4": "0", "azure-gpt4-32k": "0", "azure-gpt4-turbo": "0", "azure-gpt4o": "0", "azure-gpt5": "0", "azure-gpt5-chat": "0", "azure-dall-e": "0", "azure-o1": "0", "azure-o1-mini": "0", "azure-gpt41": "0", "azure-chatgpt": "0", "azure-gpt45": "0", "deepseek": "0", "grok-3": "0", "grok-4": "0", "moonshot": "0" }, "allowOpenAIToolUsage": "false", "tokensPunishmentFactor": "0" }, "build": "4d95b80 (modified) (main@SirPro3/oai-reverse-proxy)" }