|  | @@ -204,9 +204,9 @@ def run_extractor_prompt(req_data):
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  def run_chat_prompt(req_data):
 | 
	
		
			
				|  |  |      prompt = req_data['prompt']
 | 
	
		
			
				|  |  | -    staff_profile = req_data['staff_profile']
 | 
	
		
			
				|  |  | -    user_profile = req_data['user_profile']
 | 
	
		
			
				|  |  | -    dialogue_history = req_data['dialogue_history']
 | 
	
		
			
				|  |  | +    staff_profile = req_data.get('staff_profile', {})
 | 
	
		
			
				|  |  | +    user_profile = req_data.get('user_profile', {})
 | 
	
		
			
				|  |  | +    dialogue_history = req_data.get('dialogue_history', [])
 | 
	
		
			
				|  |  |      model_name = req_data['model_name']
 | 
	
		
			
				|  |  |      current_timestamp = req_data['current_timestamp'] / 1000
 | 
	
		
			
				|  |  |      prompt_context = {**staff_profile, **user_profile}
 |