Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions application/pages/1_🌍_Natural_Language_Querying.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,8 +361,8 @@ def main():
st.session_state.messages[selected_profile].append(
{"role": "assistant", "content": current_nlq_chain.get_generated_sql_explain()})

st.markdown('The generated SQL statement is:')
st.code(current_nlq_chain.get_generated_sql(), language="sql")
with st.expander("The generated SQL"):
st.code(current_nlq_chain.get_generated_sql(), language="sql")

st.markdown('Generation process explanations:')
st.markdown(current_nlq_chain.get_generated_sql_explain())
Expand Down
6 changes: 3 additions & 3 deletions application/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def invoke_model_claude3(model_id, system_prompt, messages, max_tokens, with_res
return response_body


@logger.catch

def get_sagemaker_client():
global sagemaker_client
if not sagemaker_client:
Expand Down Expand Up @@ -191,7 +191,7 @@ def claude3_to_sql(ddl, hints, search_box, sql_examples=None, ner_example=None,
return final_response


@logger.catch

def sagemaker_to_explain(endpoint_name: str, sql: str, with_response_stream=False):
body = json.dumps({"query": generate_sagemaker_explain_prompt(sql),
"stream": with_response_stream,})
Expand All @@ -204,7 +204,7 @@ def sagemaker_to_explain(endpoint_name: str, sql: str, with_response_stream=Fals
return response


@logger.catch

def sagemaker_to_sql(ddl, hints, search_box, endpoint_name, sql_examples=None, ner_example=None, dialect='mysql',
model_provider=None, with_response_stream=False):
body = json.dumps({"prompt": generate_sagemaker_sql_prompt(ddl, hints, search_box, sql_examples, ner_example,
Expand Down
2 changes: 1 addition & 1 deletion application/utils/prompts/generate_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def generate_llm_prompt(ddl, hints, search_box, sql_examples=None, ner_example=N
example_sql_prompt += "A: ```sql\n" + item['_source']['sql'] + "```\n"

if ner_example:
for item in sql_examples:
for item in ner_example:
example_ner_prompt += "ner: " + item['_source']['entity'] + "\n"
example_ner_prompt += "ner info:" + item['_source']['comment'] + "\n"

Expand Down