from flask import Flask, request, jsonify import openai app = Flask(__name__) # OpenAI API Key (Replace with your actual API key) openai.api_key = "your_openai_api_key" # Sample AI Models Configurations AI_MODELS = { "chatgpt": "gpt-4", "deepseek": "deepseek-chat", "claude": "claude-2", "meta_ai": "meta-llama-3" } # Prompt Templates PROMPT_TEMPLATES = { "blogging": "Generate a compelling blog post introduction about {topic}.", "research": "Provide an in-depth analysis on {topic} with references.", "coding": "Write an efficient {language} function for {task}.", "social_media": "Create an engaging social media caption for {topic}.", "copywriting": "Write a high-converting ad copy for {product}.", "qna": "Explain {topic} in a simple and clear manner." } @app.route('/generate_prompt', methods=['POST']) def generate_prompt(): data = request.json model = data.get("model") category = data.get("category") params = data.get("params", {}) if model not in AI_MODELS: return jsonify({"error": "Invalid AI model"}), 400 if category not in PROMPT_TEMPLATES: return jsonify({"error": "Invalid category"}), 400 prompt_template = PROMPT_TEMPLATES[category] prompt = prompt_template.format(**params) try: response = openai.ChatCompletion.create( model=AI_MODELS[model], messages=[{"role": "user", "content": prompt}] ) output = response["choices"][0]["message"]["content"] except Exception as e: return jsonify({"error": str(e)}), 500 return jsonify({"prompt": prompt, "output": output}) if __name__ == '__main__': app.run(debug=True)

Post a Comment

0 Comments