Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,5 @@ htmlcov/
.cache/
Dockerfile
docker-compose.yml
*.charm
*.rock
2 changes: 2 additions & 0 deletions .github/workflows/playwright.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ jobs:
docker run \
-p 8104:8104 \
-e SECRET_KEY="$SECRET_KEY" \
-e REDIS_HOST=localhost \
-e REDIS_PORT=6379 \
-e GH_TOKEN="$GH_TOKEN" \
-e REPO_ORG="$REPO_ORG" \
-e DATABASE_URL="$DATABASE_URL" \
Expand Down
Binary file removed bin/act
Binary file not shown.
4 changes: 4 additions & 0 deletions entrypoint
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ activate() {
{
activate

# Provision database
# ===
flask --app webapp.app db upgrade

RUN_COMMAND="gunicorn webapp.app:app --name $(hostname) --workers=2 --bind $1"

if [ -z ${FLASK_DEBUG+x} ]; then
Expand Down
6 changes: 3 additions & 3 deletions webapp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@ def create_app():

app.context_processor(base_context)

# Initialize cache
init_cache(app)

# Initialize database
init_db(app)

# Initialize SSO
init_sso(app)

# Initialize cache
init_cache(app)

# Initialize JIRA
init_jira(app)

Expand Down
2 changes: 1 addition & 1 deletion webapp/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
celery_app = init_celery(app)

# Initialize scheduled tasks
init_scheduled_tasks()
init_scheduled_tasks(app)


# Server-side routes
Expand Down
26 changes: 12 additions & 14 deletions webapp/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,12 @@ def connect(self, app):
Return an instance of the redis cache. If not available, throw a
ConnectionError.
"""
self.logger.info("Connecting to Redis cache.")
if url := os.environ.get("REDIS_DB_CONNECT_STRING"):
if app.config.get("REDIS_HOST"):
self.logger.info("Connecting to Redis cache.")
url = app.config.get("REDIS_DB_CONNECT_STRING")
r = redis.from_url(url)
else:
host = app.config["REDIS_HOST"]
port = app.config["REDIS_PORT"]
r = redis.Redis(host=host, port=port, db=0)
r.ping()
return r
r.ping()
return r

def __get_prefixed_key__(self, key: str):
return f"{self.CACHE_PREFIX}_{key}"
Expand Down Expand Up @@ -152,12 +149,13 @@ def load_from_file(self, key: str):
"""
Load the JSON data from a file and return the python object.
"""
file_path = Path(f"{self.cache_path}/{key}")
# Check if the file exists
if not Path(self.cache_path + "/" + key).exists():
if not file_path.exists():
return None
with open(self.cache_path + "/" + key) as f:
with file_path.open("r") as f:
data = f.read()
return json.loads(data)
return json.loads(data)

def __get_prefixed_key__(self, key: str):
return f"{self.CACHE_PREFIX}_{key}"
Expand All @@ -180,11 +178,11 @@ def onerror(*args, **kwargs):


def init_cache(app: Flask) -> Cache:
try:
if app.config.get("REDIS_HOST"):
cache = RedisCache(app)
except Exception as e:
else:
cache = FileCache(app)
msg = f"Error: {e} Redis cache is not available."
msg = "Redis cache is not available."
" Using FileCache instead."
app.logger.info(msg)
app.config["CACHE"] = cache
Expand Down
67 changes: 29 additions & 38 deletions webapp/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

from celery import Celery, Task
from celery.app import Proxy
from celery.schedules import crontab
from celery.utils.log import get_task_logger
from flask import Flask

Expand All @@ -20,65 +19,57 @@ def register_celery_task(
fn: Callable | None,
celery_app: Proxy,
) -> CeleryTask:
"""
Register a celery task.
"""
"""Register a celery task."""
fn = celery_app.task()(fn)

return fn


def run_celery_task(
fn: Callable | None,
fn: Callable,
delay: int | None,
celery_app: Proxy,
args: tuple,
kwargs: dict,
) -> CeleryTask:
"""
Run a registered celery task.
"""
fn = register_celery_task(fn, celery_app)

def _setup_periodic_tasks(sender: Celery, **snkwargs: dict) -> None:
sender.add_periodic_task(
crontab(minute=str(delay)),
fn.s(*args, **kwargs),
name=f"{fn.__name__} every {delay}",
**snkwargs,
)

) -> CeleryTask | LocalTask:
"""Run a registered celery task."""
if delay:
celery_app.on_after_configure.connect(_setup_periodic_tasks)
# Celery doesn't allow us to add tasks to the beat schedule
# at runtime, so we'll use the non-celery asynchronous
# task decorator to handle periodic tasks
func = LocalTask(
fn=fn,
delay=delay,
)
else:
func = register_celery_task(fn, celery_app)

return fn
return func


def init_celery(app: Flask) -> Celery:
def init_celery(app: Flask) -> Celery | None:
class FlaskTask(Task):
def __call__(self, *args: object, **kwargs: object) -> object:
with app.app_context():
return self.run(*args, **kwargs)

celery_app = Celery(app.name, task_cls=FlaskTask)
# Use redis if available
if os.getenv("REDIS_HOST"):
celery_app = Celery(app.name, task_cls=FlaskTask)
broker_url = app.config.get("REDIS_DB_CONNECT_STRING")
# Otherwise, use default broke
else:
app.logger.error(
"No Redis host found, celery tasks will not be available.",
app.config.from_mapping(
CELERY={
"broker_url": broker_url,
"result_backend": broker_url,
"task_ignore_result": True,
},
)
return None
celery_app.config_from_object(app.config["CELERY"])
celery_app.set_default()
app.extensions["celery"] = celery_app
return celery_app

app.config.from_mapping(
CELERY={
"broker_url": broker_url,
"result_backend": broker_url,
"task_ignore_result": True,
},
app.logger.error(
"No Redis host found, celery tasks will not be available.",
)
celery_app.config_from_object(app.config["CELERY"])
celery_app.set_default()
app.extensions["celery"] = celery_app
return celery_app
return None
46 changes: 39 additions & 7 deletions webapp/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@


def versioned_static(filename):
"""
Template function for generating URLs to static assets:
"""Template function for generating URLs to static assets:
Given the path for a static file, output a url path
with a hex hash as a query string for versioning
"""
Expand All @@ -40,11 +39,9 @@ def base_context():


def clear_trailing_slash():
"""
Remove trailing slashes from all routes
"""Remove trailing slashes from all routes
We like our URLs without slashes
"""

parsed_url = urlparse(unquote(request.url))
path = parsed_url.path

Expand All @@ -58,9 +55,43 @@ def clear_trailing_slash():


@contextmanager
def database_lock() -> Generator:
def site_cloning_lock(site_name: str) -> Generator:
"""A context manager for acquiring a lock to control access
to site cloning operations.

This function creates a distributed lock using the available Cache to
ensure only one process can clone a specific site at a time. If the
lock is already acquired by another process, this will poll every 2 seconds
until the lock is released.

Args:
site_name: The name of the site to acquire a lock for

Yields:
The current lock status from the cache

Example:
with site_cloning_lock("ubuntu.com"):
# Site cloning operations here
...

"""
A context manager for acquiring a file-based lock to control access
cache = init_cache(current_app)
lock_name = f"{site_name}_lock"
locked = cache.get(lock_name)
while locked:
sleep(2)
locked = cache.get(lock_name)
try:
cache.set(lock_name, 1)
yield cache.get(lock_name)
finally:
cache.set(lock_name, 0)


@contextmanager
def database_lock() -> Generator:
"""A context manager for acquiring a lock to control access
to a shared db.

This function creates a distributed lock using the available Cache to
Expand All @@ -74,6 +105,7 @@ def database_lock() -> Generator:
Example:
with database_lock():
. . .

"""
cache = init_cache(current_app)
locked = cache.get(DB_LOCK_NAME)
Expand Down
15 changes: 15 additions & 0 deletions webapp/migrate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from flask_migrate import upgrade

from webapp.app import app
from webapp.context import database_lock


def migrate() -> None:
# Use lock to prevent multiple concurrent migrations on startup
# Automatically upgrade to head revision
with app.app_context(), database_lock():
upgrade()


if __name__ == "__main__":
migrate()
15 changes: 4 additions & 11 deletions webapp/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import yaml
from flask import Flask
from flask_migrate import Migrate, upgrade
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import (
Column,
Expand All @@ -24,8 +24,6 @@
)
from sqlalchemy.orm.session import Session

from webapp.context import database_lock

with open("data/data.yaml") as file:
data = yaml.load(file, Loader=yaml.FullLoader)

Expand All @@ -38,9 +36,8 @@ class Base(DeclarativeBase):


def get_or_create(session: Session, model: Base, commit=True, **kwargs):
"""
Return an instance of the specified model if it exists, otherwise create a
new instance.
"""Return an instance of the specified model if it exists, otherwise create
a new instance.

:param session: The database session to use for querying and committing
changes.
Expand Down Expand Up @@ -202,13 +199,9 @@ class WebpageProduct(db.Model, DateTimeMixin):
def init_db(app: Flask):
engine = create_engine(app.config["SQLALCHEMY_DATABASE_URI"])
session_factory = sessionmaker(bind=engine)
Migrate(app, db)

db.init_app(app)
# Use lock to prevent multiple concurrent migrations on startup
# Automatically upgrade to head revision
with app.app_context(), database_lock():
upgrade()
Migrate(app, db)

@app.before_request
def before_request():
Expand Down
Loading
Loading