Skip to content

Commit 506fac6

Browse files
Refactor cache, logger paths and clean up imports
Updated cache and log directory paths to use '.mllmcelltype' for consistency. Removed unused LangExtract-related code from __init__.py. Cleaned up imports in functions.py. Improved consensus handling and logging in consensus.py. Added a warning for ignored base_url in Gemini provider.
1 parent 05a29d2 commit 506fac6

File tree

6 files changed

+15
-39
lines changed

6 files changed

+15
-39
lines changed

python/mllmcelltype/__init__.py

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -38,18 +38,6 @@
3838
save_to_cache,
3939
)
4040

41-
# LangExtract components (simplified)
42-
43-
# LangExtract parser module (optional dependency)
44-
try:
45-
# LangExtract parser is available but not exposed at package level
46-
# Individual components can be imported directly if needed
47-
import mllmcelltype.langextract_parser # noqa: F401
48-
49-
LANGEXTRACT_AVAILABLE = True
50-
except ImportError:
51-
LANGEXTRACT_AVAILABLE = False
52-
5341
__version__ = "1.3.3"
5442

5543
__all__ = [
@@ -90,24 +78,4 @@
9078
"resolve_provider_base_url",
9179
"get_default_api_url",
9280
"validate_base_url",
93-
# LangExtract configuration (removed undefined functions)
9481
]
95-
96-
# Add LangExtract functionality if available
97-
if LANGEXTRACT_AVAILABLE:
98-
__all__.extend(
99-
[
100-
# LangExtract Parser
101-
"LangextractParser",
102-
"ParsingConfig",
103-
"CellTypeAnnotation",
104-
"ConsensusMetrics",
105-
"BatchAnnotationResult",
106-
"DiscussionAnalysis",
107-
"ParsingComplexity",
108-
"create_parser",
109-
"parse_cell_types",
110-
"analyze_consensus",
111-
"LANGEXTRACT_AVAILABLE",
112-
]
113-
)

python/mllmcelltype/cache_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
def clear_mllmcelltype_cache():
1717
"""Clear the mLLMCelltype cache directory."""
18-
cache_dir = os.path.join(os.path.expanduser("~"), ".llmcelltype", "cache")
18+
cache_dir = os.path.join(os.path.expanduser("~"), ".mllmcelltype", "cache")
1919

2020
if os.path.exists(cache_dir):
2121
print(f"Found cache directory: {cache_dir}")
@@ -38,7 +38,7 @@ def clear_mllmcelltype_cache():
3838

3939
def get_cache_info():
4040
"""Get information about the current cache state."""
41-
cache_dir = os.path.join(os.path.expanduser("~"), ".llmcelltype", "cache")
41+
cache_dir = os.path.join(os.path.expanduser("~"), ".mllmcelltype", "cache")
4242

4343
if not os.path.exists(cache_dir):
4444
return {"exists": False, "path": cache_dir, "file_count": 0, "total_size": 0}

python/mllmcelltype/consensus.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -549,6 +549,7 @@ def process_controversial_clusters(
549549
final_decision = None
550550
rounds_history = []
551551
current_votes = model_votes.copy()
552+
consensus_response = None # Initialize for later use
552553

553554
# Create initial consensus check prompt for LLM to calculate metrics
554555

@@ -728,7 +729,7 @@ def process_controversial_clusters(
728729
# Try to extract majority_prediction from the last consensus check
729730
# Only try to access consensus_response if it was actually created in this iteration
730731
last_consensus_check = None
731-
if rounds_history and len(rounds_history) >= 1 and "consensus_response" in locals():
732+
if rounds_history and consensus_response is not None:
732733
# Get the response from the last consensus check
733734
last_consensus_check = consensus_response
734735

@@ -1121,6 +1122,7 @@ def interactive_consensus_annotation(
11211122

11221123
# If there are controversial clusters, resolve them
11231124
resolved = {}
1125+
discussion_logs = {}
11241126
if controversial:
11251127
# Choose model for discussion
11261128
discussion_model = None
@@ -1232,7 +1234,7 @@ def interactive_consensus_annotation(
12321234
"controversial_clusters": controversial,
12331235
"resolved": resolved,
12341236
"model_annotations": model_results,
1235-
"discussion_logs": discussion_logs if "discussion_logs" in locals() else {},
1237+
"discussion_logs": discussion_logs,
12361238
"metadata": {
12371239
"timestamp": time.strftime("%Y-%m-%d %H:%M:%S"),
12381240
"models": models,

python/mllmcelltype/functions.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from __future__ import annotations
22

3-
from .logger import write_log
43
from .providers import (
54
process_anthropic,
65
process_deepseek,
@@ -13,7 +12,7 @@
1312
process_stepfun,
1413
process_zhipu,
1514
)
16-
from .utils import clean_annotation, find_agreement
15+
from .utils import find_agreement
1716

1817
# Global provider function mapping for reuse across modules
1918
PROVIDER_FUNCTIONS = {

python/mllmcelltype/logger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
logger = logging.getLogger("llmcelltype")
1919

2020
# Default log directory
21-
DEFAULT_LOG_DIR = os.path.join(os.path.expanduser("~"), ".llmcelltype", "logs")
21+
DEFAULT_LOG_DIR = os.path.join(os.path.expanduser("~"), ".mllmcelltype", "logs")
2222

2323

2424
def setup_logging(log_dir: Optional[str] = None, log_level: str = "INFO") -> None:

python/mllmcelltype/providers/gemini.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,13 @@ def process_gemini(
2626
"""
2727
write_log(f"Starting Gemini API request with model: {model}")
2828

29+
# Warn if base_url is provided (Gemini SDK doesn't support custom URLs)
30+
if base_url:
31+
write_log(
32+
"base_url parameter is ignored for Gemini (SDK doesn't support custom URLs)",
33+
level="warning",
34+
)
35+
2936
# Check if API key is provided and not empty
3037
if not api_key:
3138
error_msg = "Google API key is missing or empty"

0 commit comments

Comments
 (0)