Skip to content

Commit a823983

Browse files
Merge pull request #42 from renan-siqueira/develop
Develop -> main
2 parents 2c58bd9 + b2c3a07 commit a823983

File tree

9 files changed

+254
-202
lines changed

9 files changed

+254
-202
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@ dist/
66
.env
77
.samples
88
.session
9+
.rsazure

CHANGELOG.md

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,32 @@
33
All notable changes to this project will be documented in this file.
44
___
55

6+
## [0.6.2] - 2025-04-07
7+
8+
### Changed
9+
- Refactored all template files in `samples/templates/` to align with the modular CLI architecture introduced in v0.6.0
10+
- Replaced all uses of `call_azure_openai_handler()` with `main()` function
11+
- Replaced manual print formatting with `ChatResult().print()` for consistent output
12+
- Replaced direct logging calls with `get_logger()` and `result.to_log_dict()`
13+
- Replaced hardcoded message structures with `get_context_messages()` for context support
14+
- Added defensive `response.choices` check and fallback token estimation
15+
- Standardized import style: `import rsazure_openai_toolkit as rschat`
16+
- Wrapped logic in `main()` blocks for reuse and testability
17+
18+
### Affected Templates
19+
- `basic_usage.py.j2`
20+
- `chat_loop_usage.py.j2`
21+
- `env_usage.py.j2`
22+
- `env_chat_loop_usage.py.j2`
23+
24+
> These templates are now fully aligned with the official CLI (`rschat`) and offer consistent logging, context persistence, and safety across all use cases.
25+
26+
### Notes
27+
- No breaking changes
28+
- Recommended: regenerate examples using `rschat-tools samples`
29+
- Version bumped to 0.6.2
30+
___
31+
632
## [0.6.1] - 2025-04-05
733

834
### Changed

README.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,14 +59,18 @@ A fast, modular, secure, and auditable Python toolkit to integrate with Azure Op
5959
6060
___
6161

62-
## 🚀 What's New in v0.6.0
62+
+ ## 🚀 What's New in v0.6.x
6363

6464
- Object-oriented CLI (`rschat`, `rschat-tools`) — easier to test, extend and reuse
6565
- Persistent session context — with system prompt validation and full/trimmed history tracking
6666
- Reproducible model config — centralized via `get_model_config()` and `ModelConfig`
6767
- Transparent logging — structured logs via `InteractionLogger` (CSV/JSONL)
6868
- Modular architecture — folders like `core/`, `session/`, `logging/`, `model_config/`, etc.
6969

70+
### v0.6.2
71+
- All sample templates have been fully aligned with the core architecture and CLI
72+
- Logging, context, result formatting, and safety checks now follow production-grade standards
73+
7074
> Check the full [CHANGELOG](https://github.com/renan-siqueira/rsazure-openai-toolkit/blob/main/CHANGELOG.md) for details.
7175
___
7276

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "rsazure-openai-toolkit"
7-
version = "0.6.1"
7+
version = "0.6.2"
88
description = "A fast, modular, secure, and auditable toolkit to integrate with Azure OpenAI — with a friendly CLI and dev-first architecture."
99
authors = [{ name = "Renan Siqueira Antonio", email = "[email protected]" }]
1010
readme = { file = "README.md", content-type = "text/markdown" }

src/rsazure_openai_toolkit/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
- rschat.ContextInfo(...)
3434
"""
3535

36-
__version__ = "0.6.1"
36+
__version__ = "0.6.2"
3737
__description__ = "A fast, modular, secure, and auditable toolkit to integrate with Azure OpenAI — with a friendly CLI and dev-first architecture."
3838
__author__ = "Renan Siqueira Antonio"
3939
__license__ = "MIT"
Lines changed: 63 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,64 @@
11
import time
2-
from rsazure_openai_toolkit import call_azure_openai_handler
3-
from rsazure_openai_toolkit.utils import get_model_config
4-
from rsazure_openai_toolkit.logging.interaction_logger import InteractionLogger
5-
6-
7-
messages = [
8-
{"role": "system", "content": "You are a helpful assistant."},
9-
{"role": "user", "content": "Say hello!"}
10-
]
11-
12-
model_config = get_model_config()
13-
start = time.time()
14-
15-
response = call_azure_openai_handler(
16-
api_key="your-api-key",
17-
azure_endpoint="https://your-resource.openai.azure.com/",
18-
api_version="2023-12-01-preview",
19-
deployment_name="your-deployment-name",
20-
messages=messages,
21-
**model_config
22-
)
23-
24-
elapsed = round(time.time() - start, 2)
25-
content = response.choices[0].message.content
26-
usage = response.usage.model_dump() if response.usage else {}
27-
model_used = response.model
28-
seed_used = model_config.get("seed")
29-
30-
input_tokens = usage.get("prompt_tokens", 0)
31-
output_tokens = usage.get("completion_tokens", 0)
32-
total_tokens = usage.get("total_tokens", input_tokens + output_tokens)
33-
34-
print(f"\nAssistant:\n\t{content}")
35-
print("\n----- REQUEST INFO -----")
36-
print(f"📤 Input tokens: {input_tokens}")
37-
print(f"📥 Output tokens: {output_tokens}")
38-
print(f"🧾 Total tokens: {total_tokens}")
39-
print(f"🧠 Model: {model_used}")
40-
print(f"🎲 Seed: {seed_used}")
41-
print(f"⏱️ Time: {elapsed}s\n")
42-
43-
# Optional logging mode: none [default], jsonl, csv
44-
logger = InteractionLogger(mode="none", path="chat_logs.jsonl")
45-
46-
if logger.enabled:
47-
logger.log({
48-
"question": "Say hello!",
49-
"response": content,
50-
"model": model_used,
51-
"usage": usage,
52-
"model_config": model_config,
53-
"raw_response": response.model_dump()
54-
})
55-
else:
56-
print("📭 Logging is disabled (RSCHAT_LOG_MODE is 'none' or not configured)\n")
2+
import rsazure_openai_toolkit as rschat
3+
4+
5+
def main():
6+
user_input = "Say hello!"
7+
rschat.load_env()
8+
9+
context_data = rschat.get_context_messages(user_input=user_input)
10+
messages = context_data["messages"]
11+
12+
model_config = rschat.get_model_config()
13+
config = rschat.get_cli_config()
14+
15+
start = time.time()
16+
response = rschat.main(
17+
api_key=config["api_key"],
18+
azure_endpoint=config["endpoint"],
19+
api_version=config["version"],
20+
deployment_name=config["deployment_name"],
21+
messages=messages,
22+
**model_config
23+
)
24+
elapsed = round(time.time() - start, 2)
25+
26+
if not response.choices:
27+
print("❌ No response received from the model.")
28+
return
29+
30+
content = response.choices[0].message.content
31+
usage = response.usage.model_dump() if response.usage else {}
32+
33+
input_tokens = usage.get("prompt_tokens", 0)
34+
output_tokens = usage.get("completion_tokens") or rschat.estimate_input_tokens(
35+
messages=[{"role": "assistant", "content": content}],
36+
deployment_name=config["deployment_name"]
37+
)
38+
total_tokens = usage.get("total_tokens", input_tokens + output_tokens)
39+
40+
result = rschat.ChatResult(
41+
question=user_input,
42+
response_text=content,
43+
system_prompt=config["system_prompt"],
44+
model=response.model,
45+
seed=model_config.get("seed"),
46+
input_tokens=input_tokens,
47+
output_tokens=output_tokens,
48+
total_tokens=total_tokens,
49+
elapsed_time=elapsed,
50+
model_config=model_config,
51+
raw_response=response.model_dump()
52+
)
53+
54+
result.print()
55+
56+
logger = rschat.get_logger()
57+
if logger.enabled:
58+
logger.log(result.to_log_dict())
59+
else:
60+
print(f"📭 Logging is disabled ({logger})\n")
61+
62+
63+
if __name__ == "__main__":
64+
main()
Lines changed: 51 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,72 +1,81 @@
11
import time
2-
from rsazure_openai_toolkit import call_azure_openai_handler
3-
from rsazure_openai_toolkit.utils.utils import get_model_config
4-
from rsazure_openai_toolkit.logging.interaction_logger import InteractionLogger
2+
import rsazure_openai_toolkit as rschat
53

64

7-
def chat():
5+
def chat_loop():
86
print("🔁 Chat loop started")
97
print("💡 Type 'exit' to quit\n")
108

11-
# Optional logging mode: none [default], jsonl, csv
12-
logger = InteractionLogger(mode="none", path="chat_logs.jsonl")
9+
# Carrega variáveis de ambiente
10+
rschat.load_env()
11+
config = rschat.get_cli_config()
12+
logger = rschat.get_logger()
1313

1414
while True:
15-
user_input = input("You: ")
16-
if user_input.strip().lower() == "exit":
15+
user_input = input("You: ").strip()
16+
if user_input.lower() == "exit":
1717
print("👋 Goodbye!")
1818
break
19-
if not user_input.strip():
19+
if not user_input:
2020
continue
2121

22-
messages = [
23-
{"role": "system", "content": "You are a helpful assistant."},
24-
{"role": "user", "content": user_input}
25-
]
22+
# Prepara mensagens com ou sem contexto persistente
23+
context_data = rschat.get_context_messages(user_input=user_input)
24+
messages = context_data["messages"]
2625

27-
model_config = get_model_config()
28-
start = time.time()
26+
model_config = rschat.get_model_config()
2927

30-
response = call_azure_openai_handler(
31-
api_key="your-api-key",
32-
azure_endpoint="https://your-resource.openai.azure.com/",
33-
api_version="2023-12-01-preview",
34-
deployment_name="your-deployment-name",
28+
start = time.time()
29+
response = rschat.main(
30+
api_key=config["api_key"],
31+
azure_endpoint=config["endpoint"],
32+
api_version=config["version"],
33+
deployment_name=config["deployment_name"],
3534
messages=messages,
3635
**model_config
3736
)
38-
3937
elapsed = round(time.time() - start, 2)
38+
39+
if not response.choices:
40+
print("❌ No response received from the model.")
41+
continue
42+
4043
content = response.choices[0].message.content
4144
usage = response.usage.model_dump() if response.usage else {}
42-
model_used = response.model
43-
seed_used = model_config.get("seed")
4445

4546
input_tokens = usage.get("prompt_tokens", 0)
46-
output_tokens = usage.get("completion_tokens", 0)
47+
output_tokens = usage.get("completion_tokens") or rschat.estimate_input_tokens(
48+
messages=[{"role": "assistant", "content": content}],
49+
deployment_name=config["deployment_name"]
50+
)
4751
total_tokens = usage.get("total_tokens", input_tokens + output_tokens)
4852

49-
print(f"\nAssistant:\n\t{content}")
50-
print("\n----- REQUEST INFO -----")
51-
print(f"📤 Input tokens: {input_tokens}")
52-
print(f"📥 Output tokens: {output_tokens}")
53-
print(f"🧾 Total tokens: {total_tokens}")
54-
print(f"🧠 Model: {model_used}")
55-
print(f"🎲 Seed: {seed_used}")
56-
print(f"⏱️ Time: {elapsed}s\n")
53+
# Constrói resultado e imprime no formato padrão
54+
result = rschat.ChatResult(
55+
question=user_input,
56+
response_text=content,
57+
system_prompt=config["system_prompt"],
58+
model=response.model,
59+
seed=model_config.get("seed"),
60+
input_tokens=input_tokens,
61+
output_tokens=output_tokens,
62+
total_tokens=total_tokens,
63+
elapsed_time=elapsed,
64+
model_config=model_config,
65+
raw_response=response.model_dump()
66+
)
67+
68+
result.print()
5769

5870
if logger.enabled:
59-
logger.log({
60-
"question": user_input,
61-
"response": content,
62-
"model": model_used,
63-
"usage": usage,
64-
"model_config": model_config,
65-
"raw_response": response.model_dump()
66-
})
71+
logger.log(result.to_log_dict())
6772
else:
68-
print("📭 Logging is disabled (RSCHAT_LOG_MODE is 'none' or not configured)\n")
73+
print(f"📭 Logging is disabled ({logger})\n")
74+
75+
76+
def main():
77+
chat_loop()
6978

7079

7180
if __name__ == "__main__":
72-
chat()
81+
main()

0 commit comments

Comments
 (0)