-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
172 lines (138 loc) · 6.56 KB
/
app.py
File metadata and controls
172 lines (138 loc) · 6.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import streamlit as st
import os
# Disable artifact registry before importing agent
os.environ['DISABLE_ARTIFACT_REGISTRY'] = '1'
from langgraph_agent import graph
st.set_page_config(
page_title="Geospatial Analysis Agent",
page_icon="🌍",
layout="wide"
)
st.title("Geospatial Analysis Agent")
st.markdown("Query geospatial data using natural language")
# Initialize session state
if 'history' not in st.session_state:
st.session_state.history = []
# Query input
query = st.text_area(
"Enter your query:",
placeholder="e.g., How did cropping intensity change from 2017 to 2023 at latitude 25.317, longitude 75.097?",
height=100
)
# Example queries in sidebar
with st.sidebar:
st.header("Example Queries")
st.subheader("Timeseries Analysis")
if st.button("Cropping intensity 2017-2023", use_container_width=True):
st.session_state.selected_query = "How did cropping intensity change from 2017 to 2023 at latitude 25.31698754297551, longitude 75.09702609349773?"
if st.button("Precipitation trend", use_container_width=True):
st.session_state.selected_query = "What was the precipitation trend from 2017 to 2023 at latitude 25.31698754297551, longitude 75.09702609349773?"
st.subheader("Spatial Analysis")
if st.button("Water bodies count", use_container_width=True):
st.session_state.selected_query = "How many water bodies are within 1km of coordinates 25.317, 75.097?"
if st.button("Vegetation index", use_container_width=True):
st.session_state.selected_query = "What's the average vegetation index around latitude 25.31, longitude 75.09?"
if st.button("Land use analysis", use_container_width=True):
st.session_state.selected_query = "Analyze land use distribution around latitude 25.317, longitude 75.097"
# Use selected query from sidebar if available
if 'selected_query' in st.session_state:
query = st.session_state.selected_query
del st.session_state.selected_query
# Submit button
col1, col2, col3 = st.columns([1, 1, 4])
with col1:
submit = st.button("Analyze", type="primary", use_container_width=True)
with col2:
clear = st.button("Clear History", use_container_width=True)
if clear:
st.session_state.history = []
st.rerun()
# Process query
if submit and query:
with st.spinner("Processing your query..."):
try:
# Run the agent
state = {"user_query": query}
app = graph.compile()
result_state = app.invoke(state)
# Get response
response = result_state.get("response", "No response generated")
error = result_state.get("error")
# Add to history
st.session_state.history.append({
"query": query,
"response": response,
"error": error
})
except Exception as e:
st.error(f"Error processing query: {str(e)}")
# Display results
if st.session_state.history:
st.header("Results")
# Show most recent first
for idx, item in enumerate(reversed(st.session_state.history)):
with st.container():
st.subheader(f"Query {len(st.session_state.history) - idx}")
# Query
st.markdown("**Your Query:**")
st.info(item["query"])
# Response
st.markdown("**Analysis Result:**")
if item.get("error"):
st.error(item["error"])
else:
# Parse and format the response for better display
response = item["response"]
# Check if it's a structured timeseries response
if "—" in response and "changed from" in response.lower():
parts = response.split("—", 1)
location = parts[0].strip()
analysis = parts[1].strip() if len(parts) > 1 else response
st.markdown(f"**Location:** {location}")
# Extract key metrics
import re
# Extract values
start_match = re.search(r'from ([\d.]+) \(([\d-]+)\)', analysis)
peak_match = re.search(r'peak of ([\d.]+) \(([\d-]+)\)', analysis)
end_match = re.search(r'is ([\d.]+) in ([\d-]+)', analysis)
change_match = re.search(r'≈ ([-\d.]+)%', analysis)
if start_match and end_match:
col1, col2, col3 = st.columns(3)
with col1:
st.metric(
label=f"Start ({start_match.group(2)})",
value=start_match.group(1)
)
with col2:
if peak_match:
st.metric(
label=f"Peak ({peak_match.group(2)})",
value=peak_match.group(1)
)
with col3:
change_val = change_match.group(1) if change_match else "N/A"
st.metric(
label=f"Current ({end_match.group(2)})",
value=end_match.group(1),
delta=f"{change_val}% overall change"
)
# Show data sources in expander
if "Data sources:" in analysis:
sources = analysis.split("Data sources:")[-1].strip()
with st.expander("View Data Sources"):
source_list = [s.strip() for s in sources.split(",")]
for src in source_list:
st.caption(f"• {src}")
else:
# Parsing failed, show as-is
st.success(response)
else:
# For spatial or other analysis types
st.success(response)
st.divider()
else:
# Show placeholder when no queries yet
st.info("Enter a query above or select an example from the sidebar to get started")
# Footer
st.markdown("---")
st.caption("Powered by LangGraph, Gemini, and CoreStack API")