Skip to content

Instantly share code, notes, and snippets.

@BenLiyanage
Created September 29, 2023 18:27
Show Gist options
  • Save BenLiyanage/079b3ce7df467879388306962fac734c to your computer and use it in GitHub Desktop.
Save BenLiyanage/079b3ce7df467879388306962fac734c to your computer and use it in GitHub Desktop.
lever candidate evaluation
import requests
from requests.auth import HTTPBasicAuth
from environs import Env
import openai
# Initialize environs and OpenAI
env = Env()
env.read_env()
openai.api_key = env("OPENAI_API_KEY")
# Constants
LEVER_API_KEY = env("LEVER_API_KEY")
POSTING_ID = '56db654f-aa40-43f8-a55c-86852bacd064'
# Endpoint, Headers, and Auth
URL_OPPORTUNITIES = "https://api.lever.co/v1/opportunities"
URL_POSTINGS = f"https://api.lever.co/v1/postings/{POSTING_ID}"
URL_RESUMES = "https://api.lever.co/v1/opportunities/{}/resumes"
basic_auth = HTTPBasicAuth(LEVER_API_KEY, '')
def fetch_job_description_from_lever():
response = requests.get(URL_POSTINGS, auth=basic_auth)
posting_data = response.json()["data"]
job_description = posting_data["content"]["description"]
job_requirements = ' '.join([item["text"] for item in posting_data["content"]["lists"]])
job_closing = posting_data["content"]["closing"]
return job_description + ' ' + job_requirements + ' ' + job_closing
def fetch_resumes_for_applicant(opportunity_id):
url = URL_RESUMES.format(opportunity_id)
response = requests.get(url, auth=basic_auth)
if response.status_code == 200:
return response.json()["data"]
else:
print(f"Error {response.status_code} fetching resumes for applicant {opportunity_id}")
return []
def get_resume_data(opportunity_id):
URL_RESUMES = f"https://api.lever.co/v1/opportunities/{opportunity_id}/resumes"
response = requests.get(URL_RESUMES, auth=basic_auth)
if response.status_code == 200:
resumes = response.json()["data"]
# For simplicity, we'll consider the latest (first) resume in the list.
if resumes and 'parsedData' in resumes[0]:
return resumes[0]['parsedData']
return None
def evaluate_applicant(applicant, job_description):
# Fetch resume data for the applicant
resume_data = get_resume_data(applicant["id"])
evaluation_data = f"Applicant Profile: {applicant['headline']} "
if resume_data:
for position in resume_data["positions"]:
evaluation_data += f"Position at {position['org']}: {position['title']} {position['summary']} "
for school in resume_data["schools"]:
evaluation_data += f"Education at {school['org']}: {school['degree']} in {school['field']} {school['summary']} "
# gets hangry
# if 'linkedin.com' in applicant['links'][0]:
# evaluation_data += f"LinkedIn: {applicant['links'][0]} "
# Direct Lookup URL
lookup_url = f"https://hire.lever.co/candidates/{applicant['contact']}"
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": f"Given the job description: {job_description}, evaluate the following profile: {evaluation_data}. For example, you might return something like: `{{\"classification\": \"Strong Match\", \"justification\": \"The candidate fits the job requirements well.\", \"email_content\": \"We are impressed with your profile and would like to proceed further.\"}}`"}
]
# Debug print before making the OpenAI call
print(f"\n\nDebug Info: {messages}")
response = openai.ChatCompletion.create(model="gpt-4-0613", messages=messages)
response_text = response.choices[0].message['content'].strip()
print(f"\n\nResponse: {response_text}")
try:
structured_response = eval(response_text)
classification = structured_response["classification"]
justification = structured_response["justification"]
email_content = structured_response["email_content"]
except:
# Default fallback in case the LLM does not return the expected structure
classification = "Unknown"
justification = "The LLM did not provide a valid structured response."
email_content = ""
result = {
"url": lookup_url,
"classification": classification,
"justification": justification,
"email_content": email_content
}
print(f"\n\nResult: {result}")
import pdb;pdb.set_trace()
return result
def main():
job_description = fetch_job_description_from_lever()
params = {"posting_id": POSTING_ID, "stage_id": "applicant-new", "pipeline": "applicant"}
response = requests.get(URL_OPPORTUNITIES, auth=basic_auth, params=params)
if response.status_code == 200:
applicants = response.json()["data"]
for applicant in applicants:
evaluation = evaluate_applicant(applicant, job_description)
print(evaluation) # You can expand this to save to CSV or any other format
else:
print(f"Error {response.status_code}")
if __name__ == "__main__":
main()
@BenLiyanage
Copy link
Author

https://gist.github.com/BenLiyanage/079b3ce7df467879388306962fac734c#file-main-py-L71

Wondering what this does. SHould I be posting the JD as a system message? Also should I be reusing the same conversation for all applicants?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment