Prompt in external file
This commit is contained in:
parent
43708d2e31
commit
e1483e0a29
1
my-app/utils/prompt.txt
Normal file
1
my-app/utils/prompt.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
Provide a concise summary of the resume, highlighting key skills and potential areas for improvement, in a at least 5 sentences.
|
||||||
@ -2,24 +2,25 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
|
import io
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from pdfminer.high_level import extract_text
|
from pdfminer.high_level import extract_text
|
||||||
|
|
||||||
# Load environment variables from .env file
|
# Load environment variables from .env file
|
||||||
load_dotenv()
|
load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), '.env'))
|
||||||
|
|
||||||
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
||||||
|
|
||||||
def analyze_resume(text):
|
def analyze_resume(text):
|
||||||
response = client.chat.completions.create(
|
response = client.chat.completions.create(
|
||||||
model="gpt-4o-mini",
|
model=os.getenv("MODEL_NAME"),
|
||||||
messages=[{
|
messages=[{
|
||||||
"role": "system",
|
"role": "system",
|
||||||
"content": "Provide a concise summary of the resume, highlighting key skills and potential areas for improvement, in a few sentences."
|
"content": open(os.path.join(os.path.dirname(__file__), "prompt.txt"), "r").read()
|
||||||
},
|
},
|
||||||
{"role": "user", "content": text}],
|
{"role": "user", "content": text}],
|
||||||
max_tokens=200 # Add a max_tokens parameter to limit the output length
|
max_tokens=int(os.getenv("MAX_TOKENS"))
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|||||||
@ -1 +0,0 @@
|
|||||||
Subproject commit be751b77fd71ac830d81090ad792091493040729
|
|
||||||
Loading…
x
Reference in New Issue
Block a user