ba96-0-packard.hf.space
Open in
urlscan Pro
34.192.171.49
Public Scan
Submitted URL: http://ba96-0-packard.hf.space/
Effective URL: https://ba96-0-packard.hf.space/
Submission: On January 09 via api from US — Scanned from DE
Effective URL: https://ba96-0-packard.hf.space/
Submission: On January 09 via api from US — Scanned from DE
Form analysis
0 forms found in the DOMText Content
BA96-0 / MOEMOE GPT-3.5 Turbo: no wait / GPT-4: no wait / GPT-4 32k: no wait / GPT-4 Turbo: no wait / Claude: no wait -------------------------------------------------------------------------------- SERVICE INFO { "uptime": 10396, "endpoints": { "openai": "https://ba96-0-packard.hf.space/proxy/openai", "openai2": "https://ba96-0-packard.hf.space/proxy/openai/turbo-instruct", "anthropic": "https://ba96-0-packard.hf.space/proxy/anthropic", "google-ai": "https://ba96-0-packard.hf.space/proxy/google-ai", "mistral-ai": "https://ba96-0-packard.hf.space/proxy/mistral-ai", "aws": "https://ba96-0-packard.hf.space/proxy/aws/claude", "azure": "https://ba96-0-packard.hf.space/proxy/azure/openai" }, "proompts": 76, "tookens": "1.22m", "proomptersNow": 1, "openaiKeys": 27, "openaiOrgs": 21, "anthropicKeys": 1, "gpt4": { "usage": "0 tokens", "activeKeys": 4, "overQuotaKeys": 5, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "turbo": { "usage": "0 tokens", "activeKeys": 9, "revokedKeys": 7, "overQuotaKeys": 11, "trialKeys": 4, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4-turbo": { "usage": "1.22m tokens", "activeKeys": 4, "overQuotaKeys": 5, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "gpt4-32k": { "usage": "0 tokens", "activeKeys": 1, "overQuotaKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "claude": { "usage": "0 tokens", "activeKeys": 1, "revokedKeys": 0, "pozzedKeys": 0, "proomptersInQueue": 0, "estimatedQueueTime": "no wait" }, "config": { "gatekeeper": "proxy_key", "maxIpsAutoBan": "true", "textModelRateLimit": "4", "imageModelRateLimit": "4", "maxContextTokensOpenAI": "128000", "maxContextTokensAnthropic": "0", "maxOutputTokensOpenAI": "1024", "maxOutputTokensAnthropic": "2048", "allowAwsLogging": "false", "promptLogging": "false", "tokenQuota": { "turbo": "0", "gpt4": "0", "gpt4-32k": "0", "gpt4-turbo": "0", "dall-e": "0", "claude": "0", "gemini-pro": "0", "mistral-tiny": "0", "mistral-small": "0", "mistral-medium": "0", "aws-claude": "0", "azure-turbo": "0", "azure-gpt4": "0", "azure-gpt4-32k": "0", "azure-gpt4-turbo": "0" } }, "build": "2239bea (main@khanon/oai-reverse-proxy)" }