Skip to content

Commit

Permalink
address display
Browse files Browse the repository at this point in the history
Signed-off-by: B-Step62 <yuki.watanabe@databricks.com>
  • Loading branch information
B-Step62 committed May 2, 2024
1 parent 90cf929 commit e8e4ed1
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
15 changes: 8 additions & 7 deletions mlflow/langchain/langchain_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,14 @@ def _end_span(
status=SpanStatus(SpanStatusCode.OK),
):
"""Close MLflow Span (or Trace if it is root component)"""
self._mlflow_client.end_span(
request_id=span.request_id,
span_id=span.span_id,
outputs=outputs,
attributes=attributes,
status=status,
)
with set_prediction_context(self._prediction_context):
self._mlflow_client.end_span(
request_id=span.request_id,
span_id=span.span_id,
outputs=outputs,
attributes=attributes,
status=status,
)

def _reset(self):
self._run_span_mapping = {}
Expand Down
7 changes: 6 additions & 1 deletion tests/langchain/test_langchain_autolog.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,8 @@ def test_llmchain_autolog_log_inputs_outputs():
assert new_session_id != session_id


def test_loaded_llmchain_autolog_within_model_evaluation(tmp_path):
@mock.patch("mlflow.tracing.export.mlflow.get_display_handler")
def test_loaded_llmchain_autolog_within_model_evaluation(mock_get_display, tmp_path):
model = create_openai_llmchain()
model_path = tmp_path / "model"
mlflow.langchain.save_model(model, path=model_path)
Expand All @@ -429,6 +430,10 @@ def test_loaded_llmchain_autolog_within_model_evaluation(tmp_path):
assert trace.info.request_id == request_id
assert trace.info.request_metadata["mlflow.sourceRun"] == run_id

# Trace should not be displayed in the notebook cell if it is in evaluation
mock_display_handler = mock_get_display.return_value
mock_display_handler.display_traces.assert_not_called()


def test_agent_autolog(clear_trace_singleton):
mlflow.langchain.autolog(log_models=True)
Expand Down

0 comments on commit e8e4ed1

Please sign in to comment.