Compare commits

...

10 Commits

Author SHA1 Message Date
nasim
4c064ec50c feat: frontend ChatUI 2025-04-03 10:38:48 +02:00
nasim
f96f4af069 feat: ms-bot-framework with dynamic adaptive cards 2025-04-03 10:28:32 +02:00
nasim
3aa78be3d6 feat: house price prediction model integration 2025-04-03 10:14:54 +02:00
jcbwndsr
72375db5cd
Update README.md 2025-02-20 14:20:34 +01:00
jcbwndsr
5f48aba9cd
Update Makefile to only install dev requirements 2025-02-20 11:33:08 +01:00
jcbwndsr
ca07b161c7
Update README to be specific about vscode 2025-02-20 11:31:51 +01:00
Jacob Windsor
9880b00fad Add .env 2025-02-19 18:34:08 +01:00
jcbwndsr
eb20b68f03
Merge pull request #1 from Axiomatic-AI/try-excercise
Add some fixes after trying the excercise
2025-02-19 18:27:03 +01:00
Jacob Windsor
041168ad02 Fix import 2025-02-19 17:52:17 +01:00
Jacob Windsor
6b458654a7 Add sorting 2025-02-19 17:52:11 +01:00
53 changed files with 9629 additions and 44 deletions

1
.gitignore vendored
View File

@ -128,7 +128,6 @@ celerybeat.pid
*.sage.py
# Environments
.env
.venv
env/
venv/

View File

@ -10,7 +10,7 @@ install:
$(VENV_DIR)/bin/pip install -r $(REQ_FILE)
install-dev:
$(VENV_DIR)/bin/pip install -r $(REQ_FILE) -r requirements-dev.txt
$(VENV_DIR)/bin/pip install -r requirements-dev.txt
start:
docker compose down -v db

View File

@ -4,7 +4,7 @@ Welcome to this pair programming excercies for Axiomatic AI!
This repository is a very minimal application that we have put together to test new candidates. The repository resembles how our codebase is structured but in a very much "cut down" fashion. As you will see, the application takes pieces from "clean architecture" standards and slims it down. It is by no means perfect (and neither is our real code) - if you see areas to improve, please mention them in the interview!
During the pair programming excercise, you will be given some features, and be expected to implement them e2e. We are there to guide and help you! We do not expect you to achieve this goal, answering any questions you may need, or even helping with syntax.
During the pair programming excercise, you will be given some features, and be expected to implement them e2e. We are there to guide and help you! - answering any questions you may need, or even helping with syntax.
Please make sure to follow the `Dev Setup` section before the day of the interview so that we can help within any problems you may have.
@ -39,7 +39,7 @@ Go to `http://localhost:8080/docs` to see the Swagger UI
There is a postgres database that powers the application. For simplicity, it is cleaned and re-seeded when you run `make start`. This means you do not have think about DB migrations. You can run `make clean-db` if you need to clean it manually.
## IDE Setup
## VSCode Setup
If you open the project directly within vscode, the debugger has been setup for you.

6
backend/.env Normal file
View File

@ -0,0 +1,6 @@
ENVIRONMENT=development
PG_USER=developer
PG_PASSWORD=password
PG_DB_NAME=dev
PG_HOST=localhost
PG_PORT=5432

View File

@ -0,0 +1,33 @@
from sklearn.datasets import fetch_california_housing
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
import joblib
import pandas as pd
# Load dataset
data = fetch_california_housing()
df = pd.DataFrame(data.data, columns=data.feature_names)
df['target'] = data.target # in 100k USD
# Engineer features
df['square_feet'] = df['AveRooms'] * 350
df['bedrooms'] = df['AveBedrms']
df['bathrooms'] = df['AveRooms'] * 0.2
# Clean bathrooms
df['bathrooms'] = df['bathrooms'].clip(lower=1)
X = df[['square_feet', 'bedrooms', 'bathrooms']]
y = df['target']
# Train/test split
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
# Train model
model = LinearRegression()
model.fit(X_train, y_train)
# Need to be tested of course..: )
# Save model
joblib.dump(model, 'price_predictor.pkl')

Binary file not shown.

View File

@ -0,0 +1,47 @@
from typing import Dict, Any
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain
import asyncio
class AdaptiveCards:
def __init__(self):
self.llm = ChatOpenAI(temperature=0)
self.prompt = ChatPromptTemplate.from_template("""
You are a Microsoft Adaptive Card generator. Given a data schema and known values,
generate an Adaptive Card (v1.3) that asks the user only for missing fields.
Use this schema: https://adaptivecards.io/schemas/adaptive-card.json
Respond only with valid Adaptive Card JSON. Do not include explanations.
Always include isRequired, and errorMessage in the schema.
Always include a submit button at the bottom of the card as defined in the schema.
### Schema:
{schema}
### Known values:
{known_values}
""")
self.chain = LLMChain(llm=self.llm, prompt=self.prompt)
async def generate_card(self, schema: Dict[str, Any], known_values: Dict[str, Any]) -> Dict[str, Any]:
loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, self.chain.run, {
"schema": schema,
"known_values": known_values
})
def create_welcome_card(self):
"""Create a welcome card"""
return {
"type": "AdaptiveCard",
"body": [
{
"type": "TextBlock",
"text": "Welcome to the Housing Bot!",
"size": "large"
}
],
"version": "1.0"
}

115
backend/app/bots/dayta.py Normal file
View File

@ -0,0 +1,115 @@
import json
from typing import Annotated
from fastapi import Depends
from langchain.chat_models import ChatOpenAI
from botbuilder.core import ActivityHandler, TurnContext
from botbuilder.schema import Activity, Attachment, ActivityTypes
import asyncio
from pydantic import ValidationError
from backend.app.bots.adaptive_cards import AdaptiveCards
from backend.app.bots.intent_detector import IntentDetector
from backend.app.bots.slot_filler import SlotFiller
from backend.app.dtos.house.house_features import HouseFeatures
from backend.app.services.house_price_predictor import HousePricePredictor
class Dayta(ActivityHandler):
def __init__(
self,
intent_detector: Annotated[IntentDetector, Depends()],
card_bot: Annotated[AdaptiveCards, Depends()],
slot_filler: Annotated[SlotFiller, Depends()],
price_predictor: Annotated[HousePricePredictor, Depends()],):
self.intent_detector = intent_detector
self.card_bot = card_bot
self.slot_filler = slot_filler
self.price_predictor = price_predictor
self.chat_llm = ChatOpenAI(temperature=0.7)
self.user_sessions = {}
async def on_message_activity(self, turn_context: TurnContext):
user_message = turn_context.activity.text
user_id = turn_context.activity.from_property.id
submitted_values = turn_context.activity.value
known_values = self.user_sessions.get(user_id, {})
schema = HouseFeatures.model_json_schema()
#required_fields = list(HouseFeatures.model_fields.keys())
required_fields = [
name for name, field in HouseFeatures.model_fields.items()
if field.is_required()
]
print(f"required_fields: {required_fields}")
# Update known values
if submitted_values is not None:
known_values.update(submitted_values)
else:
extracted = await self.slot_filler.extract_slots(schema, user_message)
known_values.update(extracted)
self.user_sessions[user_id] = known_values
# Detect intent only if message-based
if not submitted_values:
intent = await self.intent_detector.detect_intent(user_message)
if intent.strip().lower() in ("unknown", ""):
response = await asyncio.get_event_loop().run_in_executor(
None,
lambda: self.chat_llm.predict(f"The user said: '{user_message}'. Respond helpfully.")
)
await turn_context.send_activity(response)
return
# Delegate to common logic
await self._handle_collected_data(turn_context, user_id, known_values, required_fields, schema)
async def _handle_collected_data(
self,
turn_context: TurnContext,
user_id: str,
known_values: dict,
required_fields: list[str],
full_schema: dict
):
missing_fields = [f for f in required_fields if f not in known_values]
print(f"Missing fields: {missing_fields}")
if not missing_fields:
try:
features = HouseFeatures(**known_values)
price = self.price_predictor.predict(features)
await turn_context.send_activity(f"The estimated price of the house is ${price:.2f}")
del self.user_sessions[user_id]
return
except ValidationError as e:
await turn_context.send_activity(f"Validation failed: {e}")
return
# Generate adaptive card for missing fields
filtered_schema = {
**full_schema,
"properties": {
k: v for k, v in full_schema["properties"].items() if k in missing_fields
},
"required": missing_fields
}
card_json = await self.card_bot.generate_card(filtered_schema, known_values)
if isinstance(card_json, str):
card_json = json.loads(card_json)
print(f"card_json: {card_json}")
await turn_context.send_activity(
Activity(
type=ActivityTypes.message,
attachments=[
Attachment(
content_type="application/vnd.microsoft.card.adaptive",
content=card_json
)
]
)
)

View File

@ -0,0 +1,23 @@
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain
import asyncio
class IntentDetector:
def __init__(self, temperature: float = 0.0):
self.llm = ChatOpenAI(temperature=temperature)
self.prompt = ChatPromptTemplate.from_template("""
You are an intent detection bot. Classify the user input into one of the following intents:
- Information about house prices
- unknown
If you're unsure, respond with `unknown`.
User: {message}
Intent:""")
self.chain = LLMChain(llm=self.llm, prompt=self.prompt)
async def detect_intent(self, message: str) -> str:
loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, self.chain.run, {"message": message})

View File

@ -0,0 +1,31 @@
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain
from typing import Dict, Any
import asyncio
class SlotFiller:
def __init__(self):
self.llm = ChatOpenAI(temperature=0)
self.prompt = ChatPromptTemplate.from_template("""
You are a helpful assistant. Given a message and a schema, extract all known values.
Only return a JSON object containing the extracted values and no extra text.
Schema: {schema}
Message: {message}
""")
self.chain = LLMChain(llm=self.llm, prompt=self.prompt)
async def extract_slots(self, schema: Dict[str, Any], message: str) -> Dict[str, Any]:
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(None, self.chain.run, {
"schema": schema,
"message": message
})
import json
try:
return json.loads(result)
except Exception:
return {}

View File

@ -0,0 +1,8 @@
from pydantic import BaseModel, Field
from typing import Optional
class HouseFeatures(BaseModel):
square_feet: float = Field(..., description="Total square feet of the house")
bedrooms: int = Field(..., description="Number of bedrooms")
bathrooms: float = Field(..., description="Number of bathrooms")
number_of_floors: Optional[int] = Field(default=None, description="Number of floors")

View File

@ -0,0 +1,7 @@
from pydantic import BaseModel
class HousePricePredictionRequest(BaseModel):
square_feet: float
bedrooms: int
bathrooms: float

View File

@ -0,0 +1,5 @@
from pydantic import BaseModel
class HousePricePredictionResponse(BaseModel):
predicted_price: float

View File

@ -7,8 +7,4 @@ class HouseResponse(BaseModel):
address: str
city: str
country: str
price: float
class HousesListResponse(BaseModel):
houses: list[HouseResponse]
price: float

View File

@ -0,0 +1,6 @@
from pydantic import BaseModel
from backend.app.dtos.house.house_response import HouseResponse
class HousesListResponse(BaseModel):
houses: list[HouseResponse]

View File

@ -0,0 +1,6 @@
from pydantic import BaseModel
from backend.app.dtos.user.user_response import UserResponse
class UserListResponse(BaseModel):
users: list[UserResponse]

View File

@ -0,0 +1,6 @@
from pydantic import BaseModel
class UserResponse(BaseModel):
id: str
email: str

View File

@ -0,0 +1,33 @@
from typing import Dict
from backend.app.bots.dayta import Dayta
from backend.app.bots.intent_detector import IntentDetector
from backend.app.bots.slot_filler import SlotFiller
from backend.app.bots.adaptive_cards import AdaptiveCards
from backend.app.services.house_price_predictor import HousePricePredictor
from botbuilder.core import BotFrameworkAdapter, BotFrameworkAdapterSettings
class BotFactory:
def __init__(self):
self._bots: Dict[str, object] = {}
self.adapter_settings = BotFrameworkAdapterSettings(app_id="", app_password="")
self.adapter = BotFrameworkAdapter(self.adapter_settings)
# Shared services
self.intent_detector = IntentDetector()
self.slot_filler = SlotFiller()
self.card_bot = AdaptiveCards()
self.price_predictor = HousePricePredictor()
# Register all bots
self._bots["dayta"] = Dayta(
intent_detector=self.intent_detector,
card_bot=self.card_bot,
slot_filler=self.slot_filler,
price_predictor=self.price_predictor
)
def get_bot(self, name: str):
return self._bots.get(name)
def get_adapter(self):
return self.adapter

View File

@ -7,14 +7,13 @@ from .middleware.authenticate import authenticate
from .providers.db_provider import create_db_and_tables
from .routers.houses import router as houses_router
from .routers.owners import router as owners_router
from .routers.direct_line import router as direct_line_router
from .routers.bot import router as bot_router
@asynccontextmanager
async def lifespan(_app: FastAPI):
create_db_and_tables()
create_db_and_tables()
yield
app = FastAPI(
title="Fair Housing API",
description="Provides access to core functionality for the fair housing platform.",
@ -33,3 +32,5 @@ app.add_middleware(
app.include_router(houses_router, prefix="/houses", tags=["houses"])
app.include_router(owners_router, prefix="/owners", tags=["owners"])
app.include_router(bot_router, tags=["bot"])
app.include_router(direct_line_router, tags=["directline"])

View File

@ -1,6 +1,11 @@
from typing import Optional, TYPE_CHECKING
from uuid import UUID, uuid4
from sqlmodel import Field, SQLModel
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from backend.app.models.owner import Owner
class House(SQLModel, table=True):
@ -14,3 +19,4 @@ class House(SQLModel, table=True):
square_feet: float = Field()
bedrooms: int = Field()
bathrooms: float = Field()
owner: Optional["Owner"] = Relationship(back_populates="houses")

View File

@ -1,8 +1,17 @@
from typing import Optional, TYPE_CHECKING
from uuid import UUID, uuid4
from sqlmodel import Field, SQLModel
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from backend.app.models.house import House
from backend.app.models.user import User
class Owner(SQLModel, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True)
user_id: UUID = Field(foreign_key="user.id", unique=True)
# Relationship
houses: list["House"] = Relationship(back_populates="owner")
user: Optional["User"] = Relationship(back_populates="owner")

View File

@ -1,9 +1,16 @@
from typing import Optional, TYPE_CHECKING
from uuid import UUID, uuid4
from sqlmodel import Field, SQLModel
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from backend.app.models.owner import Owner
class User(SQLModel, table=True):
id: UUID = Field(default_factory=lambda: uuid4(), primary_key=True)
email: str = Field(unique=True, nullable=False)
password_hash: str = Field(nullable=False)
# Relationships
owner: Optional["Owner"] = Relationship(back_populates="user")

View File

@ -1,20 +1,30 @@
from typing import Annotated
from typing import Annotated, Literal
from uuid import UUID
from fastapi import Depends
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from sqlmodel import asc, desc, select
from ..models.house import House
from ..providers.db_provider import get_session
class HouseRepository:
def __init__(self, session: Annotated[AsyncSession, Depends(get_session)]) -> None:
self.session = session
async def get_all(self, limit: int = 100, offset: int = 0) -> list[House]:
statement = select(House).offset(offset).limit(limit)
async def get_all(
self,
limit: int = 100,
offset: int = 0,
order_by: Literal["PRICE"] = "PRICE",
sort_order: Literal["ASC", "DESC"] = "DESC",
) -> list[House]:
sorter = desc if sort_order == "DESC" else asc
statement = (
select(House).offset(offset).limit(limit).order_by(sorter(House.price))
)
result = await self.session.execute(statement)
return result.scalars().all()
@ -23,6 +33,11 @@ class HouseRepository:
result = await self.session.execute(statement)
return result.scalar_one_or_none()
async def get_by_user_id(self, user_id: UUID):
statement = select(House).where(House.owner_user_id == user_id)
result = await self.session.execute(statement)
return result.scalars().all()
async def save(self, house: House) -> None:
"""
Save a house to the database. If a house with that ID already exists, do an upsert.

View File

@ -5,6 +5,9 @@ from fastapi import Depends
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from backend.app.models.house import House
from backend.app.models.user import User
from ..models.owner import Owner
from ..providers.db_provider import get_session
@ -27,7 +30,26 @@ class OwnerRepository:
statement = select(Owner).where(Owner.user_id == user_id)
result = await self.session.execute(statement)
return result.scalar_one_or_none()
async def get_details_by_house_id(self, house_id: UUID):
statement = (
select(Owner, User)
.join(User, Owner.user_id == User.id)
.join(House, House.owner_user_id == Owner.user_id)
.where(House.id == house_id)
)
result = await self.session.execute(statement)
row = result.first()
if row:
owner, user = row
return {
"owner": owner,
"user": user
}
return None
async def save(self, owner: Owner) -> None:
"""
Save a owner to the database. If an owner with that ID already exists, do an upsert.

View File

@ -0,0 +1,23 @@
from fastapi import APIRouter, Request, Depends
from botbuilder.schema import Activity
from botbuilder.core import TurnContext
from backend.app.factories.bot_factory import BotFactory
router = APIRouter()
@router.post("/api/messages", response_model=None)
async def messages(
req: Request,
bot = Depends(lambda: BotFactory().get_bot("dayta")),
adapter = Depends(lambda: BotFactory().get_adapter() )
):
body = await req.json()
activity = Activity().deserialize(body)
async def call_bot_logic(turn_context: TurnContext):
await bot.on_turn(turn_context)
auth_header = req.headers.get("Authorization", "")
await adapter.process_activity(activity, auth_header, call_bot_logic)
return {}

View File

@ -0,0 +1,86 @@
from fastapi import APIRouter, HTTPException
from typing import Dict, Any
from uuid import uuid4
from botbuilder.core import TurnContext
from botbuilder.schema import Activity, ActivityTypes
from backend.app.factories.bot_factory import BotFactory
router = APIRouter(prefix="/v3/directline")
# In-memory conversation store
conversations: Dict[str, Dict[str, Any]] = {}
# Each conversation will look like:
# { "activities": [ { id, type, text, from } ], "watermark": int }
@router.post("/conversations")
async def start_conversation():
conversation_id = str(uuid4())
conversations[conversation_id] = {
"activities": [],
"watermark": 0
}
return {
"conversationId": conversation_id,
"token": "mock-token", # Optional for dev use
"streamUrl": f"/v3/directline/conversations/{conversation_id}/stream"
}
@router.get("/conversations/{conversation_id}/activities")
async def get_activities(conversation_id: str, watermark: int = 0):
if conversation_id not in conversations:
raise HTTPException(status_code=404, detail="Conversation not found")
activities = conversations[conversation_id]["activities"]
return {
"activities": activities[watermark:],
"watermark": len(activities)
}
@router.post("/conversations/{conversation_id}/activities")
async def post_activity(conversation_id: str, activity: Dict[str, Any]):
if conversation_id not in conversations:
raise HTTPException(status_code=404, detail="Conversation not found")
# Starting with deserializing the activity
act = Activity().deserialize(activity)
# Store my responses in this list please
bot_responses = []
#Patch TurnContext.send_activity to capture output
async def call_bot_logic(turn_context: TurnContext):
async def capture_response(response):
# If it's a string, wrap it into an Activity
if isinstance(response, str):
bot_activity = Activity(
type=ActivityTypes.message,
text=response,
from_property={"id": "bot"}
)
else:
bot_activity = response
bot_responses.append(bot_activity)
turn_context.send_activity = capture_response
await bot.on_turn(turn_context)
# 4. Call the adapter with the activity
adapter = BotFactory().get_adapter()
bot = BotFactory().get_bot("dayta")
auth_header = ""
await adapter.process_activity(act, auth_header, call_bot_logic)
# 5. Store bot responses into conversation memory
for act in bot_responses:
conversations[conversation_id]["activities"].append({
"id": str(uuid4()),
"type": act.type,
"text": act.text,
"from": {"id": "bot"},
"attachments": [a.serialize() for a in (act.attachments or [])]
})
return { "id": str(uuid4()) }

View File

@ -1,16 +1,20 @@
from typing import Annotated
from typing import Annotated, Literal
from fastapi import APIRouter, Depends
from ..dtos.house_create_request import HouseCreateRequest
from ..dtos.house_create_response import HouseCreateResponse
from ..dtos.houses_list_response import HouseResponse, HousesListResponse
from backend.app.dtos.house.house_create_request import HouseCreateRequest
from backend.app.dtos.house.house_create_response import HouseCreateResponse
from backend.app.dtos.house.house_features import HouseFeatures
from backend.app.dtos.house.house_predict_request import HousePricePredictionRequest
from backend.app.dtos.house.house_predict_response import HousePricePredictionResponse
from backend.app.dtos.house.house_response import HouseResponse
from backend.app.dtos.house.houses_list_response import HousesListResponse
from ..models.house import House
from ..models.owner import Owner
from ..providers.auth_provider import AuthContext
from ..repositories.house_repository import HouseRepository
from ..repositories.owner_repository import OwnerRepository
from ..services.house_price_predictor import HousePricePredictor
router = APIRouter()
@ -48,10 +52,14 @@ async def create_house(
@router.get("")
async def get_all_houses(
house_repository: Annotated[HouseRepository, Depends()],
order_by: Literal["PRICE"] = "PRICE",
sort_order: Literal["ASC", "DESC"] = "DESC",
limit: int = 100,
offset: int = 0,
) -> HousesListResponse:
all_houses = await house_repository.get_all(offset=offset, limit=limit)
all_houses = await house_repository.get_all(
offset=offset, limit=limit, order_by=order_by, sort_order=sort_order
)
house_responses = [
HouseResponse(
@ -64,6 +72,23 @@ async def get_all_houses(
)
for house in all_houses
]
print(house_responses)
return HousesListResponse(houses=house_responses)
@router.post("/predict-price")
async def predict_house_price(
body: HouseFeatures,
price_predictor: Annotated[HousePricePredictor, Depends()],
) -> HousePricePredictionResponse:
"""
Predict the price of a house based on its features.
"""
predicted_price = await price_predictor.predict_california(
square_feet=body.square_feet,
bedrooms=body.bedrooms,
bathrooms=body.bathrooms
)
return HousePricePredictionResponse(predicted_price=predicted_price)

View File

@ -1,15 +1,14 @@
from typing import Annotated
from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, HTTPException
from ..dtos.owner_detail_response import OwnerDetailResponse
from ..dtos.owner_list_response import OwnerListResponse, OwnerResponse
from ..dtos.owner.owner_detail_response import OwnerDetailResponse
from ..dtos.owner.owner_list_response import OwnerListResponse, OwnerResponse
from ..repositories.owner_repository import OwnerRepository
from ..repositories.user_repository import UserRepository
router = APIRouter()
@router.get("")
async def get_owners(
owner_repository: Annotated[OwnerRepository, Depends()],
@ -22,7 +21,6 @@ async def get_owners(
return OwnerListResponse(owners=owners_response)
@router.get("/{id}")
async def get_owner(
id: str,
@ -35,3 +33,22 @@ async def get_owner(
return OwnerDetailResponse(
id=str(owner.id), user_id=str(owner.user_id), email=user.email
)
@router.get("/byhouse/{house_id}")
async def get_owner_by_house_id(
house_id: str,
owner_repository: Annotated[OwnerRepository, Depends()],
) -> OwnerDetailResponse:
result = await owner_repository.get_details_by_house_id(house_id)
if result is None:
raise HTTPException(status_code=404, detail="House or owner not found")
owner = result["owner"]
user = result["user"]
return OwnerDetailResponse(
id=str(owner.id),
user_id=str(owner.user_id),
email=str(user.email)
)

View File

@ -1,16 +1,17 @@
import os
import joblib
import numpy as np
from backend.app.dtos.house.house_features import HouseFeatures
class HousePricePredictor:
"""
Mock ML model that predicts house prices.
In a real scenario, this would load a trained model.
"""
async def predict(
self, square_feet: float, bedrooms: int, bathrooms: float
) -> float:
base_price = square_feet * 200
bedroom_value = bedrooms * 25000
bathroom_value = bathrooms * 15000
predicted_price = base_price + bedroom_value + bathroom_value
return predicted_price
def __init__(self):
self.model = joblib.load("backend/app/ai_models/price_predictor.pkl")
def predict(self, features: HouseFeatures) -> float:
X = np.array([[features.square_feet, features.bedrooms, features.bathrooms]])
return self.model.predict(X)[0] * 100000

View File

@ -1,5 +1,7 @@
import random
from ..models import User
class InvestorPredictor:
def is_investor(user: User) -> bool:

View File

@ -8,3 +8,5 @@ python-dotenv
pg8000
asyncpg
greenlet
botbuilder-core
openai

24
frontend/.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

54
frontend/README.md Normal file
View File

@ -0,0 +1,54 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default tseslint.config({
extends: [
// Remove ...tseslint.configs.recommended and replace with this
...tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
...tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
...tseslint.configs.stylisticTypeChecked,
],
languageOptions: {
// other options...
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
},
})
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default tseslint.config({
plugins: {
// Add the react-x and react-dom plugins
'react-x': reactX,
'react-dom': reactDom,
},
rules: {
// other rules...
// Enable its recommended typescript rules
...reactX.configs['recommended-typescript'].rules,
...reactDom.configs.recommended.rules,
},
})
```

28
frontend/eslint.config.js Normal file
View File

@ -0,0 +1,28 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
export default tseslint.config(
{ ignores: ['dist'] },
{
extends: [js.configs.recommended, ...tseslint.configs.recommended],
files: ['**/*.{ts,tsx}'],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
plugins: {
'react-hooks': reactHooks,
'react-refresh': reactRefresh,
},
rules: {
...reactHooks.configs.recommended.rules,
'react-refresh/only-export-components': [
'warn',
{ allowConstantExport: true },
],
},
},
)

13
frontend/index.html Normal file
View File

@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Dayta is reality now</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

8628
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
frontend/package.json Normal file
View File

@ -0,0 +1,32 @@
{
"name": "dayta",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"adaptivecards": "^3.0.5",
"axios": "^1.8.4",
"botframework-webchat": "^4.18.0",
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"devDependencies": {
"@eslint/js": "^9.21.0",
"@types/react": "^19.0.10",
"@types/react-dom": "^19.0.4",
"@vitejs/plugin-react": "^4.3.4",
"eslint": "^9.21.0",
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"globals": "^15.15.0",
"typescript": "~5.7.2",
"typescript-eslint": "^8.24.1",
"vite": "^6.2.0"
}
}

1
frontend/public/vite.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

0
frontend/src/App.css Normal file
View File

13
frontend/src/App.tsx Normal file
View File

@ -0,0 +1,13 @@
import './App.css'
import ChatUI from './components/ChatUI'
function App() {
return (
<ChatUI />
)
}
export default App

View File

@ -0,0 +1,179 @@
import React, { useEffect, useRef, useState } from 'react';
import * as AdaptiveCards from 'adaptivecards';
import axios from 'axios';
interface Message {
from: string;
text?: string;
card?: any;
}
const DIRECT_LINE_BASE = 'https://ax.thedigitalbridge.io/v3/directline';
const Chat: React.FC = () => {
const [conversationId, setConversationId] = useState<string | null>(null);
const [watermark, setWatermark] = useState<number>(0);
const [messages, setMessages] = useState<Message[]>([]);
const inputRef = useRef<HTMLInputElement>(null);
const chatRef = useRef<HTMLDivElement>(null);
// Start a new conversation
useEffect(() => {
const startConversation = async () => {
try {
const res = await axios.post(`${DIRECT_LINE_BASE}/conversations`);
setConversationId(res.data.conversationId);
} catch (err) {
console.error('Failed to start conversation:', err);
}
};
startConversation();
}, []);
// Poll bot messages
useEffect(() => {
if (!conversationId) return;
const interval = setInterval(async () => {
try {
const res = await axios.get(`${DIRECT_LINE_BASE}/conversations/${conversationId}/activities`, {
params: { watermark }
});
const newActivities = res.data.activities || [];
if (newActivities.length) {
const newMsgs = newActivities.map((a: any) => ({
from: a.from.id,
text: a.text,
card: a.attachments?.[0]?.content
}));
setMessages((prev) => [...prev, ...newMsgs]);
setWatermark(res.data.watermark);
}
} catch (err) {
console.error('Failed to fetch activities:', err);
}
}, 1500);
return () => clearInterval(interval);
}, [conversationId, watermark]);
// Send message to bot
const sendMessage = async (message: string | Record<string, any>) => {
if (!conversationId) return;
try {
const payload =
typeof message === 'string'
? { type: 'message', from: { id: 'user' }, text: message }
: { type: 'message', from: { id: 'user' }, value: message };
await axios.post(`${DIRECT_LINE_BASE}/conversations/${conversationId}/activities`, payload);
// Show submitted content in chat
if (typeof message === 'string') {
setMessages((prev) => [...prev, { from: 'user', text: message }]);
if (inputRef.current) inputRef.current.value = '';
}
} catch (error) {
console.error('Failed to send message:', error);
setMessages((prev) => [
...prev,
{
from: 'bot',
text: 'Something went wrong sending your message.',
},
]);
}
};
// Render Adaptive Card
const renderCard = (card: any) => {
const container = document.createElement('div');
const adaptiveCard = new AdaptiveCards.AdaptiveCard();
adaptiveCard.onExecuteAction = async (action: AdaptiveCards.Action) => {
if (action instanceof AdaptiveCards.SubmitAction) {
const inputs = adaptiveCard.getAllInputs();
const formData: Record<string, any> = {};
inputs.forEach((input) => {
if ('id' in input && input.id) {
formData[input.id] = input.value;
}
});
// Send the form data as object
await sendMessage(JSON.stringify(formData));
}
};
adaptiveCard.parse(card);
const rendered = adaptiveCard.render();
if (rendered) {
container.appendChild(rendered);
}
return container;
};
useEffect(() => {
if (chatRef.current) {
chatRef.current.scrollTop = chatRef.current.scrollHeight;
}
}, [messages]);
return (
<div style={{ maxWidth: 600, margin: '0 auto', padding: 16 }}>
<h2>Dayta Assistant</h2>
<div
ref={chatRef}
style={{
border: '1px solid #ccc',
height: 400,
overflowY: 'auto',
padding: 12,
background: '#fff'
}}
>
{messages.map((msg, idx) => (
<div key={idx} style={{ marginBottom: 12 }}>
{msg.from === 'user' ? (
<div><strong>You:</strong> {msg.text}</div>
) : msg.card ? (
<div ref={(el) => {
if (el && !el.hasChildNodes()) {
const card = renderCard(msg.card);
el.appendChild(card);
}
}} />
) : (
<div><strong>Bot:</strong> {msg.text}</div>
)}
</div>
))}
</div>
<div style={{ display: 'flex', marginTop: 10 }}>
<input
type="text"
ref={inputRef}
placeholder="Type your message..."
style={{ flexGrow: 1, padding: 8 }}
onKeyDown={(e) => {
if (e.key === 'Enter') {
sendMessage(inputRef.current!.value);
}
}}
/>
<button onClick={() => sendMessage(inputRef.current!.value)}>Send</button>
</div>
</div>
);
};
export default Chat;

0
frontend/src/index.css Normal file
View File

10
frontend/src/main.tsx Normal file
View File

@ -0,0 +1,10 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'
import App from './App.tsx'
createRoot(document.getElementById('root')!).render(
<StrictMode>
<App />
</StrictMode>,
)

1
frontend/src/vite-env.d.ts vendored Normal file
View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@ -0,0 +1,26 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2020",
"useDefineForClassFields": true,
"lib": ["ES2020", "DOM", "DOM.Iterable"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"isolatedModules": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

7
frontend/tsconfig.json Normal file
View File

@ -0,0 +1,7 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2022",
"lib": ["ES2023"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"isolatedModules": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

7
frontend/vite.config.ts Normal file
View File

@ -0,0 +1,7 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
})

1
frontend/vite.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB