Skip to content

Commit

Permalink
Solves #1143
Browse files Browse the repository at this point in the history
- Check if SELinux is enabled during Ansible deploying
- PEP8 (long lines, indents, typing etc.)
- The unused import are removed, the rest imports are optimized
- Get right major version if a full version is not listed in duplicated versions (the main issue)
- Old field `versions_arches` is removed from the service config. Field `arches` provides the full compatibility with it
  • Loading branch information
soksanichenko committed Dec 5, 2024
1 parent 92ca057 commit 49c3491
Show file tree
Hide file tree
Showing 3 changed files with 93 additions and 47 deletions.
11 changes: 5 additions & 6 deletions data_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class MirrorData:
asn: list[str] = None
monopoly: bool = False
urls: dict[str, str] = field(default_factory=dict)
module_urls: dict[str, list] = field(default_factory=dict)
module_urls: dict[str, dict] = field(default_factory=dict)
has_optional_modules: Optional[str] = None
subnets: list[str] = field(default_factory=list)
subnets_int: list[tuple] = field(default_factory=tuple)
Expand Down Expand Up @@ -152,12 +152,11 @@ class MainConfig:
mirrors_dir: str
vault_mirror: str
versions: list[str] = field(default_factory=list)
optional_module_versions: dict[list] = field(default_factory=dict)
arches: list = field(default_factory=list)
optional_module_versions: dict[str, list[str]] = field(
default_factory=dict
)
arches: dict[str, list] = field(default_factory=dict)
duplicated_versions: dict[str, str] = field(default_factory=dict)
vault_versions: list[str] = field(default_factory=list)
versions_arches: dict[str, list[str]] = field(
default_factory=lambda: defaultdict(list)
)
required_protocols: list[str] = field(default_factory=list)
repos: list[RepoData] = field(default_factory=list)
12 changes: 0 additions & 12 deletions json_schemas/service_config/v3.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,18 +41,6 @@
},
"additionalProperties": false
},
"versions_arches": {
"type": "object",
"patternProperties": {
"^.*$": {
"$ref": "#/definitions/Arches"
}
},
"propertyNames": {
"$ref": "#/definitions/Version"
},
"additionalProperties": false
},
"vault_versions": {
"$ref": "#/definitions/Versions"
},
Expand Down
117 changes: 88 additions & 29 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,13 +235,29 @@ def _process_repo_attributes(
version: str = None,
) -> list[str]:
for repo_arch in repo_attributes:
# rules for major versions listed in duplicates will be used if found
# Rules for major versions listed
# in duplicates will be used if found
if version:
version = next((i for i in yaml_data['duplicated_versions'] if yaml_data['duplicated_versions'][i] == version), version)
if repo_arch not in attributes.get(version, list(set(val for sublist in attributes.values() for val in sublist))) and repo_arch not in yaml_data['arches']:
version = next(
(
i for i in yaml_data['duplicated_versions']
if yaml_data['duplicated_versions'][i] == version
),
version,
)
ver_attrs = attributes.get(
version,
list(set(
val for sublist in attributes.values() for val in sublist
))
)
if (
repo_arch not in ver_attrs and
repo_arch not in yaml_data['arches']
):
raise ValidationError(
f'Attr "{repo_arch}" of repo "{repo_name}" is absent '
f'in the main list of attrs "{", ".join(attributes.get(version, list(set(val for sublist in attributes.values() for val in sublist))))}"'
f'in the main list of attrs "{", ".join(ver_attrs)}"'
)
return repo_attributes

Expand All @@ -259,14 +275,12 @@ def _process_repo_attributes(
mirrors_dir=yaml_data['mirrors_dir'],
vault_mirror=yaml_data.get('vault_mirror'),
versions=[str(version) for version in yaml_data['versions']],
optional_module_versions=yaml_data.get('optional_module_versions', {}),
optional_module_versions=yaml_data.get(
'optional_module_versions', {}
),
duplicated_versions=duplicated_versions,
vault_versions=vault_versions,
arches=yaml_data['arches'],
versions_arches={
arch: versions for arch, versions in
yaml_data.get('versions_arches', {}).items()
},
required_protocols=yaml_data['required_protocols'],
repos=[
RepoData(
Expand All @@ -276,14 +290,18 @@ def _process_repo_attributes(
repo_name=repo['name'],
repo_attributes=repo.get('arches', []),
attributes=yaml_data['arches'],
version=repo.get('versions', [None])[0] # Assuming each repo has at least one version
# Assuming each repo has at least one version
version=repo.get('versions', [None])[0]
),
versions=_process_repo_attributes(
repo_name=repo['name'],
repo_attributes=[
str(ver) for ver in repo.get('versions', [])
],
attributes={str(ver): yaml_data['versions'] for ver in repo.get('versions', [])}
attributes={
str(ver): yaml_data['versions']
for ver in repo.get('versions', [])
}
),
vault=repo.get('vault', False),
) for repo in yaml_data['repos']
Expand Down Expand Up @@ -383,7 +401,7 @@ def _get_mirror_subnets(
urls={
_type: url for _type, url in yaml_data['address'].items()
},
module_urls = {
module_urls={
module: {
_type: url for _type, url in urls.items()
} for module, urls in yaml_data.get('address_optional', {}).items()
Expand Down Expand Up @@ -505,11 +523,11 @@ def _get_arches_for_version(
def _is_permitted_arch_for_this_version_and_repo(
version: str,
arch: str,
versions_arches: dict[str, list[str]]
arches: dict[str, list[str]]
) -> bool:
if version not in versions_arches:
if version not in arches:
return True
elif version in versions_arches and arch in versions_arches[version]:
elif version in arches and arch in arches[version]:
return True
else:
return False
Expand All @@ -525,6 +543,26 @@ def get_mirror_url(
)


def _is_excluded_mirror_by_repo(
mirror_name: str,
repo_name: str,
) -> bool:
return (
mirror_name in WHITELIST_MIRRORS_PER_ARCH_REPO and
repo_name not in WHITELIST_MIRRORS_PER_ARCH_REPO[mirror_name]['repos']
)


def _is_excluded_mirror_by_arch(
mirror_name: str,
arch: str,
) -> bool:
return (
mirror_name in WHITELIST_MIRRORS_PER_ARCH_REPO and
arch not in WHITELIST_MIRRORS_PER_ARCH_REPO[mirror_name]['arches']
)


async def mirror_available(
mirror_info: MirrorData,
http_session: ClientType,
Expand All @@ -546,7 +584,7 @@ async def mirror_available(
'Mirror "%s" is private and won\'t be checked',
mirror_name,
)
return True
return True, None
urls_for_checking = {}
for version in main_config.versions:
# cloud mirrors (Azure/AWS) don't store beta versions
Expand All @@ -556,12 +594,20 @@ async def mirror_available(
if version in main_config.duplicated_versions:
continue
for repo_data in main_config.repos:
if mirror_info.name in WHITELIST_MIRRORS_PER_ARCH_REPO and \
repo_data.name not in WHITELIST_MIRRORS_PER_ARCH_REPO[mirror_info.name]['repos']:
if _is_excluded_mirror_by_repo(
mirror_name=mirror_name,
repo_name=repo_data.name,
):
continue
if repo_data.vault:
continue
base_version = next((i for i in main_config.duplicated_versions if main_config.duplicated_versions[i] == version), version)
base_version = next(
(
i for i in main_config.arches
if version.startswith(i)
),
version
)
arches = _get_arches_for_version(
repo_arches=repo_data.arches,
global_arches=main_config.arches[base_version],
Expand All @@ -570,13 +616,15 @@ async def mirror_available(
if repo_versions and version not in repo_versions:
continue
for arch in arches:
if mirror_info.name in WHITELIST_MIRRORS_PER_ARCH_REPO and \
arch not in WHITELIST_MIRRORS_PER_ARCH_REPO[mirror_info.name]['arches']:
if _is_excluded_mirror_by_arch(
mirror_name=mirror_name,
arch=arch,
):
continue
if not _is_permitted_arch_for_this_version_and_repo(
version=version,
version=base_version,
arch=arch,
versions_arches=main_config.versions_arches,
arches=main_config.arches,
):
continue
repo_path = repo_data.path.replace('$basearch', arch)
Expand Down Expand Up @@ -641,7 +689,10 @@ async def optional_modules_available(
return

mirror_name = mirror_info.name
logger.info('Checking optional module "%s" on mirror "%s"...', module, mirror_name)
logger.info(
'Checking optional module "%s" on mirror "%s"...', module,
mirror_name,
)
if mirror_info.private:
logger.info(
'Mirror "%s" is private and optional modules won\'t be checked',
Expand All @@ -665,14 +716,18 @@ async def optional_modules_available(
if not _is_permitted_arch_for_this_version_and_repo(
version=f'{ver}-{module}',
arch=arch,
versions_arches=main_config.versions_arches,
arches=main_config.arches,
):
continue
repo_path = repo_data.path.replace('$basearch', arch)
module_urls = mirror_info.module_urls[module]
url_for_check = urljoin(
urljoin(
urljoin(
mirror_info.module_urls[module].get('http', mirror_info.module_urls[module].get('https', None)) + '/',
(
module_urls.get('http') or
module_urls.get('https')
) + '/',
f'{ver}-{module}',
) + '/',
repo_path,
Expand All @@ -686,10 +741,12 @@ async def optional_modules_available(
}

success_msg = (
'Mirror "%(name)s" optional module "%(module)s" is available by url "%(url)s"'
'Mirror "%(name)s" optional module "%(module)s" '
'is available by url "%(url)s"'
)
error_msg = (
'Mirror "%(name)s" optional module "%(module)s" is not available for version '
'Mirror "%(name)s" optional module "%(module)s" '
'is not available for version '
'"%(version)s" and repo path "%(repo)s" because "%(err)s"'
)

Expand Down Expand Up @@ -723,5 +780,7 @@ async def optional_modules_available(
if not mirror_info.has_optional_modules:
mirror_info.has_optional_modules = module
else:
mirror_info.has_optional_modules = f'{mirror_info.has_optional_modules},{module}'
mirror_info.has_optional_modules = (
f'{mirror_info.has_optional_modules},{module}'
)
return result, reason

0 comments on commit 49c3491

Please sign in to comment.