Skip to content

Commit

Permalink
Solves #1143 (#1154)
Browse files Browse the repository at this point in the history
* Solves #1143

- Check if SELinux is enabled during Ansible deploying
- PEP8 (long lines, indents, typing etc.)
- The unused import are removed, the rest imports are optimized
- Get right major version if a full version is not listed in duplicated versions (the main issue)
- Old field `versions_arches` is removed from the service config. Field `arches` provides the full compatibility with it

* - Arch is optional argument, so, we should check its existing

* - Fixing the review comments
  • Loading branch information
soksanichenko authored Dec 10, 2024
1 parent 76f7a52 commit 487f6bb
Show file tree
Hide file tree
Showing 8 changed files with 118 additions and 106 deletions.
1 change: 1 addition & 0 deletions ci/ansible/roles/deploy/tasks/backend_installation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
- "{{ source_path }}/venv/bin/gunicorn"
tags:
- backend_selinux
when: ansible_facts.selinux.status == 'enabled'

- name: Prepare started Py script
template:
Expand Down
1 change: 1 addition & 0 deletions ci/ansible/roles/deploy/tasks/common_setup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
state: yes
name: httpd_can_network_connect
persistent: yes
when: ansible_facts.selinux.status == 'enabled'

- name: Set config_root
set_fact:
Expand Down
74 changes: 38 additions & 36 deletions src/backend/api/handlers.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,40 @@
# coding=utf-8
import asyncio
import itertools
import os
import random
import time
from collections import defaultdict
from inspect import signature
from pathlib import Path
from dataclasses import asdict
from typing import Optional, Union
from urllib.parse import urljoin

import dateparser
from dataclasses import asdict
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.expression import or_

from api.exceptions import UnknownRepoAttribute
from api.mirror_processor import MirrorProcessor
from db.db_engine import FlaskCacheEngine, FlaskCacheEngineRo
from yaml_snippets.utils import (
get_config,
get_mirrors_info,
)
from api.redis import (
_generate_redis_key_for_the_mirrors_list,
MIRRORS_LIST_EXPIRED_TIME,
CACHE_EXPIRED_TIME
)
from api.utils import (
get_geo_data_by_ip,
get_aws_subnets,
get_azure_subnets,
sort_mirrors_by_distance_and_country,
randomize_mirrors_within_distance,
)
from yaml_snippets.data_models import (
RepoData,
MainConfig,
MirrorData,
)
from common.sentry import get_logger
from db.db_engine import FlaskCacheEngine, FlaskCacheEngineRo
from db.models import (
Url,
mirrors_urls,
Mirror,
get_asn_by_ip,
is_ip_in_any_subnet,
Subnet,
)
from db.utils import session_scope
from sqlalchemy.sql.expression import or_
from common.sentry import get_logger
from yaml_snippets.data_models import (
RepoData,
MainConfig,
MirrorData,
)
from yaml_snippets.utils import (
get_config,
)

logger = get_logger(__name__)
cache = FlaskCacheEngine.get_instance()
Expand Down Expand Up @@ -404,12 +390,17 @@ def _is_vault_repo(


def get_allowed_arch(
arch: str,
version: float,
arches: list,
duplicated_versions: dict[str, str]
arch: str,
version: str,
arches: dict[str, list[str]],
) -> str:
version = next((i for i in duplicated_versions if duplicated_versions[i] == version), version)
version = next(
(
i for i in arches
if version.startswith(i)
),
version,
)
if arch not in arches[version]:
raise UnknownRepoAttribute(
'Unknown architecture "%s". Allowed list of arches "%s"',
Expand All @@ -428,11 +419,15 @@ def get_allowed_version(
) -> str:

if version not in versions and version not in vault_versions:
optional_versions = check_optional_version(version=version, optional_module_versions=optional_module_versions)
optional_versions = check_optional_version(
version=version,
optional_module_versions=optional_module_versions,
)
if version not in optional_versions:
try:
major_version = next(
ver for ver in duplicated_versions if version.startswith(ver)
ver for ver in duplicated_versions
if version.startswith(ver)
)
return duplicated_versions[major_version]
except StopIteration:
Expand Down Expand Up @@ -475,7 +470,7 @@ def get_mirrors_list(
debug_info: bool = False,
redis_key: Optional[str] = None,
module: Optional[str] = None
) -> Union[str, dict]:
) -> Union[list[str], dict]:
mirrors_list = []
config = get_config(
logger=logger,
Expand Down Expand Up @@ -515,13 +510,17 @@ def get_mirrors_list(
vault_versions=vault_versions,
repo=repo
):
if arch is not None:
repo_path = repo_path.replace(
'$basearch',
arch,
)
return [os.path.join(
vault_mirror,
version,
repo_path,
)]


if redis_key:
nearest_mirrors = cache_ro.get(redis_key)
from_cache = True
Expand Down Expand Up @@ -610,7 +609,10 @@ def get_main_isos_table(config: MainConfig) -> dict[str, list[str]]:
result = defaultdict(list)
for version, arches in config.arches.items():
for arch in arches:
if version in config.duplicated_versions and arch in config.versions_arches.get(version, config.arches[version]):
if (
version in config.duplicated_versions and
arch in config.arches[version]
):
if not result.get(arch):
result[arch] = []
result[arch].append(config.duplicated_versions[version])
Expand Down
12 changes: 9 additions & 3 deletions src/backend/api/mirror_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def __init__(self, logger: Logger):
},
)
self.client_session = ClientSession(
timeout=ClientTimeout(total=15,connect=10),
timeout=ClientTimeout(total=15, connect=10),
connector=self.tcp_connector,
headers=HEADERS,
raise_for_status=True,
Expand Down Expand Up @@ -463,12 +463,18 @@ async def is_mirror_expired(
def get_mirror_iso_uris(
self,
versions: set[str],
arches: list,
arches: dict[str, list[str]],
duplicated_versions
) -> list[str]:
result = []
for version in versions:
base_version = next((i for i in duplicated_versions if duplicated_versions[i] == version), version)
base_version = next(
(
i for i in arches
if version.startswith(i)
),
version,
)
for arch in arches[base_version]:
for iso_file_template in self.iso_files_templates:
iso_file = iso_file_template.format(
Expand Down
14 changes: 6 additions & 8 deletions src/backend/api/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# coding=utf-8
import inspect
import os
import asyncio

import time
Expand All @@ -15,7 +14,7 @@

from aiohttp import (
ClientSession,
ClientConnectorError,
ClientError,
)
from aiohttp_retry.types import ClientType
from bs4 import BeautifulSoup
Expand All @@ -25,7 +24,6 @@
from yaml_snippets.data_models import MirrorData
from api.exceptions import (
BaseCustomException,
AuthException,
)
from flask import (
Response,
Expand Down Expand Up @@ -130,7 +128,7 @@ async def decorated_function(*args, **kwargs):


def get_geo_data_by_ip(
ip: str
ip: str,
) -> Optional[tuple[str, str, str, str, float, float]]:
"""
The function returns continent, country and locations of IP in English
Expand Down Expand Up @@ -176,7 +174,7 @@ async def get_azure_subnets_json(
raise_for_status=True
) as resp:
response_text = await resp.text()
except (ClientConnectorError, TimeoutError) as err:
except (ClientError, asyncio.exceptions.TimeoutError) as err:
logger.error(
'Cannot get json with Azure subnets by url "%s" because "%s"',
url,
Expand All @@ -187,7 +185,7 @@ async def get_azure_subnets_json(
soup = BeautifulSoup(response_text, features='lxml')
link_tag = soup.find('a', attrs=link_attributes)
link_to_json_url = link_tag.attrs['href']
except (ValueError, KeyError) as err:
except (ValueError, KeyError, AttributeError) as err:
logger.error(
'Cannot get json link with Azure '
'subnets from page content because "%s',
Expand All @@ -203,7 +201,7 @@ async def get_azure_subnets_json(
response_json = await resp.json(
content_type='application/octet-stream',
)
except (ClientConnectorError, asyncio.exceptions.TimeoutError) as err:
except (ClientError, asyncio.exceptions.TimeoutError) as err:
logger.error(
'Cannot get json with Azure subnets by url "%s" because "%s"',
link_to_json_url,
Expand All @@ -222,7 +220,7 @@ async def get_aws_subnets_json(http_session: ClientSession) -> Optional[dict]:
raise_for_status=True
) as resp:
response_json = await resp.json()
except (ClientConnectorError, TimeoutError) as err:
except (ClientError, TimeoutError) as err:
logger.error(
'Cannot get json with AWS subnets by url "%s" because "%s"',
url,
Expand Down
44 changes: 33 additions & 11 deletions src/backend/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@
get_all_mirrors,
get_isos_list_by_countries,
get_main_isos_table,
get_allowed_arch,
check_optional_version,
get_optional_module_from_version,
SERVICE_CONFIG_JSON_SCHEMA_DIR_PATH,
SERVICE_CONFIG_PATH, get_allowed_version, get_allowed_arch,
SERVICE_CONFIG_PATH,
get_allowed_version,
get_allowed_arch,
)
from werkzeug.exceptions import InternalServerError

Expand Down Expand Up @@ -58,6 +59,7 @@
cache = FlaskCacheEngine.get_instance(app)
cache_ro = FlaskCacheEngineRo.get_instance(app)


@app.context_processor
def inject_now_date():
return {
Expand Down Expand Up @@ -164,22 +166,38 @@ def get_mirror_list(

# protocol get arg
request_protocol = request.args.get('protocol')
if request_protocol and request_protocol not in ["http","https"]:
return "Invalid input for protocol, valid options: http, https"
if request_protocol and request_protocol not in ['http', 'https']:
return 'Invalid input for protocol, valid options: http, https'
# country get arg
request_country = request.args.get('country')
if request_country and len(request_country) != 2:
return "Invalid input for country, valid options are 2 letter country codes"
return (
'Invalid input for country, '
'valid options are 2 letter country codes'
)
# arch get arg
request_arch = request.args.get('arch')
if request_arch:
if not get_allowed_arch(arch=request_arch, version=version, arches=config.arches, duplicated_versions=config.duplicated_versions):
return f"Invalid arch/version combination requested, valid options are {config.arches}"
if not get_allowed_arch(
arch=request_arch,
version=version,
arches=config.arches,
):
return (
'Invalid arch/version combination requested, '
f'valid options are {config.arches}'
)

# check if optional module
module = None
if version in check_optional_version(version=version, optional_module_versions=config.optional_module_versions):
module = get_optional_module_from_version(version=version, optional_module_versions=config.optional_module_versions)
if version in check_optional_version(
version=version,
optional_module_versions=config.optional_module_versions,
):
module = get_optional_module_from_version(
version=version,
optional_module_versions=config.optional_module_versions,
)

ip_address = _get_request_ip()

Expand All @@ -191,7 +209,12 @@ def get_mirror_list(
request_protocol=request_protocol,
request_country=request_country,
debug_info=False,
redis_key=make_redis_key(ip=ip_address, protocol=request_protocol, country=request_country, module=module),
redis_key=make_redis_key(
ip=ip_address,
protocol=request_protocol,
country=request_country,
module=module,
),
module=module
)

Expand Down Expand Up @@ -311,7 +334,6 @@ def isos(
arch=arch,
version=version,
arches=config.arches,
duplicated_versions=config.duplicated_versions
)
data.update({
'arch': arch,
Expand Down
Loading

0 comments on commit 487f6bb

Please sign in to comment.