Quick Start
Install SDK
Section titled “Install SDK”Python
Section titled “Python”pip install omniedgeJavaScript
Section titled “JavaScript”npm install omniedgeInitialize Client
Section titled “Initialize Client”Python
Section titled “Python”from omniedge import OmniEdge
# Initialize with API keyclient = OmniEdge(api_key="sk-your-api-key-here")JavaScript
Section titled “JavaScript”import { OmniEdge } from 'omniedge';
// Initialize with API keyconst client = new OmniEdge({ apiKey: 'sk-your-api-key-here'});Make First Request
Section titled “Make First Request”Python Example
Section titled “Python Example”from omniedge import OmniEdge
with OmniEdge(api_key="sk-your-api-key-here") as client: # Make chat request response = client.chat.create( model="openai/gpt-4o", messages=[ {"role": "user", "content": "Hello, world!"} ] )
# Print response print(response.choices[0].message.content)JavaScript Example
Section titled “JavaScript Example”import { OmniEdge } from 'omniedge';
const client = new OmniEdge({ apiKey: 'sk-your-api-key-here'});
async function main() { // Make chat request const response = await client.chat.create({ model: "openai/gpt-4o", messages: [ { role: "user", content: "Hello, world!" } ] });
// Print response console.log(response.choices[0].message.content);}
main();Streaming Response
Section titled “Streaming Response”Python Streaming Example
Section titled “Python Streaming Example”from omniedge import OmniEdge
with OmniEdge(api_key="sk-your-api-key-here") as client: # Make streaming request stream = client.chat.create( model="openai/gpt-4o", messages=[ {"role": "user", "content": "Tell me a story"} ], stream=True )
# Process response chunk by chunk for chunk in stream: if chunk.choices: content = chunk.choices[0].delta.content if content: print(content, end='', flush=True)JavaScript Streaming Example
Section titled “JavaScript Streaming Example”import { OmniEdge } from 'omniedge';
const client = new OmniEdge({ apiKey: 'sk-your-api-key-here'});
async function main() { // Make streaming request const stream = await client.chat.create({ model: "openai/gpt-4o", messages: [ { role: "user", content: "Tell me a story" } ], stream: true });
// Process response chunk by chunk for await (const chunk of stream) { if (chunk.choices) { const content = chunk.choices[0].delta.content; if (content) { process.stdout.write(content); } } }}
main();Error Handling
Section titled “Error Handling”Python Error Handling
Section titled “Python Error Handling”from omniedge import OmniEdgefrom omniedge.exceptions import APIError, AuthenticationError
try: with OmniEdge(api_key="sk-your-api-key-here") as client: response = client.chat.create( model="openai/gpt-4o", messages=[{"role": "user", "content": "Hello"}] ) print(response.choices[0].message.content)
except AuthenticationError: print("Authentication failed, please check API key")except APIError as e: print(f"API Error: {e.message}")except Exception as e: print(f"Unknown error: {e}")JavaScript Error Handling
Section titled “JavaScript Error Handling”import { OmniEdge } from 'omniedge';
const client = new OmniEdge({ apiKey: 'sk-your-api-key-here'});
async function main() { try { const response = await client.chat.create({ model: "openai/gpt-4o", messages: [{ role: "user", content: "Hello" }] }); console.log(response.choices[0].message.content);
} catch (error) { if (error.status === 401) { console.log("Authentication failed, please check API key"); } else if (error.status >= 400 && error.status < 500) { console.log(`Client error: ${error.message}`); } else if (error.status >= 500) { console.log(`Server error: ${error.message}`); } else { console.log(`Unknown error: ${error.message}`); } }}
main();Best Practices
Section titled “Best Practices”- Use environment variables to store API keys
- Enable streaming responses for long text
- Implement retry mechanisms for temporary errors
- Set reasonable request frequency to avoid rate limiting
- Use async methods to improve performance