Skip to content

rm --decentralization.off option #93

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Feb 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 73 additions & 0 deletions .github/workflows/dev-pipeline.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: Dev CI/CD Pipeline

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
workflow_dispatch:

env:
REGION: us-central1
REPO_NAME: bittensor
IMAGE_NAME: sn24-vali-api
DEPLOYMENT_NAME: dev-sn24-vali-api
DEV_CLUSTER: dev-sn24-vali-api
NAMESPACE: dev-sn24-vali-api

jobs:
build-and-deploy:
runs-on: ubuntu-latest
environment: development

steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0

- name: Get Git SHA
id: git_sha
run: echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT

- name: Authenticate to Google Cloud
uses: google-github-actions/auth@v1
with:
credentials_json: ${{ secrets.GCP_SA_KEY }}

- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v1

- name: Configure Docker
run: gcloud auth configure-docker ${{ env.REGION }}-docker.pkg.dev --quiet

- name: Create .env file
run: |
echo "${{ secrets.ENV_VARIABLES }}" > .env

- name: Build Docker image
run: |
docker build --platform=linux/amd64 \
--build-arg APP_DIR=/app \
-t ${{ env.REGION }}-docker.pkg.dev/${{ secrets.PROJECT_ID }}/${{ env.REPO_NAME }}/${{ env.IMAGE_NAME }}:prod-${{ steps.git_sha.outputs.sha }} .

- name: Push Docker image
run: |
docker push ${{ env.REGION }}-docker.pkg.dev/${{ secrets.PROJECT_ID }}/${{ env.REPO_NAME }}/${{ env.IMAGE_NAME }}:prod-${{ steps.git_sha.outputs.sha }}

- name: Get GKE credentials
uses: google-github-actions/get-gke-credentials@v1
with:
cluster_name: ${{ env.DEV_CLUSTER }}
location: ${{ env.REGION }}-a

- name: Deploy to Dev
run: |
kubectl set image deployment/${{ env.DEPLOYMENT_NAME }} \
${{ env.IMAGE_NAME }}=${{ env.REGION }}-docker.pkg.dev/${{ secrets.PROJECT_ID }}/${{ env.REPO_NAME }}/${{ env.IMAGE_NAME }}:prod-${{ steps.git_sha.outputs.sha }} \
-n ${{ env.NAMESPACE }}
kubectl rollout status deployment/${{ env.DEPLOYMENT_NAME }} -n ${{ env.NAMESPACE }}

- name: Cleanup sensitive files
if: always()
run: |
rm -f .env
20 changes: 6 additions & 14 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,30 +1,22 @@
FROM --platform=linux/amd64 nvcr.io/nvidia/cuda:12.1.0-devel-ubuntu20.04
FROM --platform=linux/amd64 python@sha256:370c586a6ffc8c619e6d652f81c094b34b14b8f2fb9251f092de23f16e299b78

ENV DEBIAN_FRONTEND=noninteractive

# Install software-properties-common to add repositories
# Install software-properties-common to add repositories.
# Note that mariadb is compatible with mysql which is why we use it
RUN apt-get -y update && apt-get install -y software-properties-common && \
add-apt-repository ppa:deadsnakes/ppa && \
apt-get -y update && apt-get install -y \
python3.10 python3.10-distutils python3.10-venv python3.10-dev \
git libsndfile1 build-essential ffmpeg libpq-dev \
pkg-config libmysqlclient-dev && \
pkg-config libmariadb-dev curl && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

# Update the symbolic link for python to point to python3.10
RUN rm /usr/bin/python3 && \
ln -s /usr/bin/python3.10 /usr/bin/python3 && \
ln -s /usr/bin/python3.10 /usr/bin/python

WORKDIR /app/

# Install python requirements
COPY ./requirements.txt ./requirements.txt
COPY ./requirements_api.txt ./requirements_api.txt

RUN python -m ensurepip && python -m pip install --upgrade pip setuptools wheel uv
RUN python -m uv pip install -r requirements_api.txt --prerelease=allow --no-cache-dir
RUN python -m pip install --upgrade pip setuptools wheel uv
RUN python -m uv pip install --no-cache-dir -r requirements_api.txt --prerelease=allow

COPY . .
RUN python -m pip install -e . --no-cache-dir
Expand Down
2 changes: 1 addition & 1 deletion neurons/test_miner.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
else:
videos = Videos(query=query, num_videos=num_videos, video_metadata=video_metadata_list)
response = requests.get(
"https://dev-validator.api.omega-labs.ai/api/count_unique",
"https://dev-sn24-api.omegatron.ai/api/count_unique",
json=videos.to_serializable_dict(videos)
)
print(response.json())
29 changes: 4 additions & 25 deletions neurons/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,35 +127,20 @@ def __init__(self, config=None):
self.successfully_started_wandb = False

self.api_root = (
"https://dev-validator.api.omega-labs.ai"
"https://dev-sn24-api.omegatron.ai"
if self.config.subtensor.network == "test" else
"https://validator.api.omega-labs.ai"
"https://sn24-api.omegatron.ai"
)
# load topics from topics URL (CSV) or fallback to local topics file
self.load_topics_start = dt.datetime.now()
self.all_topics = self.load_topics()

self.imagebind = None
self.imagebind = ImageBind(v2=True)
self.load_focus_rewards_start = dt.datetime.now()
self.FOCUS_REWARDS_PERCENT = self.load_focus_rewards_percent()
self.AUDIO_REWARDS_PERCENT = AUDIO_REWARDS_PERCENT
self.YOUTUBE_REWARDS_PERCENT = 1.0 - self.FOCUS_REWARDS_PERCENT - self.AUDIO_REWARDS_PERCENT

if not self.config.neuron.decentralization.off:
if torch.cuda.is_available():
bt.logging.info(
f"Running with decentralization enabled, thank you Bittensor Validator!")
self.decentralization = True
self.imagebind = ImageBind(v2=True)
else:
bt.logging.warning(
f"Attempting to run decentralization, but no GPU found. Please see min_compute.yml for minimum resource requirements.")
self.decentralization = False
else:
bt.logging.warning(
"Running with --decentralization.off. It is strongly recommended to run with decentralization enabled.")
self.decentralization = False

def new_wandb_run(self):
# Shoutout SN13 for the wandb snippet!
"""Creates a new wandb run to save information to."""
Expand Down Expand Up @@ -344,13 +329,7 @@ async def forward(self):

# Adjust the scores based on responses from miners.
try:
# Check if this validator is running decentralization
if not self.decentralization:
# if not, use validator API get_rewards system
rewards_list = await self.get_rewards(input_synapse=input_synapse, responses=finished_responses)
else:
# if so, use decentralization logic with local GPU
rewards_list = await self.handle_checks_and_rewards_youtube(input_synapse=input_synapse, responses=finished_responses)
rewards_list = await self.handle_checks_and_rewards_youtube(input_synapse=input_synapse, responses=finished_responses)
except Exception as e:
bt.logging.error(
f"Error in handle_checks_and_rewards_youtube: {e}")
Expand Down
7 changes: 0 additions & 7 deletions omega/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,6 @@ def add_args(cls, parser):
default=False,
)

parser.add_argument(
"--neuron.decentralization.off",
action="store_true",
help="Disable decentralization (not recommended).",
default=False,
)

parser.add_argument(
"--neuron.focus_videos",
action="store_true",
Expand Down
4 changes: 2 additions & 2 deletions purchase_focus_video.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@
SUBTENSOR_NETWORK = None # "test" or None

API_BASE = (
"https://dev-validator.api.omega-labs.ai"
"https://dev-sn24-api.omegatron.ai"
if SUBTENSOR_NETWORK == "test" else
"https://validator.api.omega-labs.ai"
"https://sn24-api.omegatron.ai"
)

CYAN = "\033[96m"
Expand Down
54 changes: 0 additions & 54 deletions validator-api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
from validator_api.communex.client import CommuneClient
from validator_api.communex._common import get_node_url
from omega.protocol import Videos, VideoMetadata, AudioMetadata
from validator_api.imagebind_loader import ImageBindLoader
import aiohttp
from validator_api.config import (
NETWORK, NETUID, PORT,
Expand Down Expand Up @@ -84,7 +83,6 @@ def connect_to_db():
focus_api_key_header = APIKeyHeader(name="FOCUS_API_KEY", auto_error=False)

security = HTTPBasic()
imagebind_loader = ImageBindLoader()

focus_scoring_service = FocusScoringService()

Expand Down Expand Up @@ -826,58 +824,6 @@ async def cache_max_focus_alpha():

################ END OMEGA FOCUS ENDPOINTS ################

""" TO BE DEPRECATED """
@app.post("/api/validate")
async def validate(
videos: Videos,
hotkey: Annotated[str, Depends(get_hotkey)],
) -> float:
if not authenticate_with_bittensor(hotkey, metagraph):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Valid hotkey required.",
)
uid = metagraph.hotkeys.index(hotkey)

start_time = time.time()

youtube_rewards = await score.score_and_upload_videos(videos, await imagebind_loader.get_imagebind())

if youtube_rewards is None:
print("YouTube rewards are empty, returning None")
return None

total_rewards: float = youtube_rewards

print(f"Total Rewards: {total_rewards}")
print(
f"Returning score={total_rewards} for validator={uid} in {time.time() - start_time:.2f}s")

return total_rewards

if not IS_PROD:
@app.get("/api/count_unique")
async def count_unique(
videos: Videos,
) -> str:
nunique = await score.get_num_unique_videos(videos)
return f"{nunique} out of {len(videos.video_metadata)} submitted videos are unique"

@app.get("/api/check_score")
async def check_score(
videos: Videos,
) -> dict:
detailed_score = await score.score_videos_for_testing(videos, await imagebind_loader.get_imagebind())
return detailed_score

@app.get("/api/topic")
async def get_topic() -> str:
return random.choice(TOPICS_LIST)

@app.get("/api/topics")
async def get_topics() -> List[str]:
return TOPICS_LIST

@app.get("/")
async def healthcheck():
return datetime.utcnow()
Expand Down
31 changes: 27 additions & 4 deletions validator-api/check_vali_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,21 @@
import asyncio
import datetime
import os
import time
import matplotlib.pyplot as plt

API_URL = "https://validator.api.omega-labs.ai"
NUM_REQUESTS = 5
# API_URL = "http://localhost:8001"
API_URL = "https://sn24-api.omegatron.ai"
NUM_REQUESTS = 1000
SAVE_DIR = "api_logs"

async def check_validator_api():
start = time.time()
async with aiohttp.ClientSession() as session:
# async with session.get(f"{API_URL}/api/focus/get_list") as response:
async with session.get(f"{API_URL}") as response:
return await response.text()
return await response.text(), time.time() - start

async def main():
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
Expand All @@ -21,11 +27,28 @@ async def main():
results = await asyncio.gather(*tasks)
end_time = asyncio.get_event_loop().time()

# Extract durations for histogram
durations = [result[1] for result in results]

# Create histogram
plt.figure(figsize=(10, 6))
plt.hist(durations, bins=50, edgecolor='black')
plt.title('Distribution of API Request Durations')
plt.xlabel('Duration (seconds)')
plt.ylabel('Frequency')
plt.savefig(f"{SAVE_DIR}/duration_histogram_{timestamp}.png")
plt.close()

with open(output_file, 'w') as f:
f.write(f"API URL: {API_URL}\n")
f.write(f"Number of requests: {NUM_REQUESTS}\n")
f.write(f"Total time taken: {end_time - start_time:.2f} seconds\n")
f.write(f"Average time per request: {(end_time - start_time) / NUM_REQUESTS:.2f} seconds\n")
f.write(f"Max response time: {max(result[1] for result in results):.2f} seconds\n\n\n\n")
for i, result in enumerate(results, 1):
f.write(f"Request {i}:\n{result}\n\n")
f.write(f"Total time taken: {end_time - start_time:.2f} seconds")
f.write(f"Request {i}:\n{result[0]}\nTime taken: {result[1]:.2f} seconds\n\n")

print(f"Results saved to {output_file}")
print(f"Histogram saved to {SAVE_DIR}/duration_histogram_{timestamp}.png")

asyncio.run(main())
Loading