Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

use default lavapipe for screenshot #660

Merged
merged 4 commits into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ jobs:
pyversion: '3.13'
- name: Test Linux pypy3
os: ubuntu-latest
pyversion: 'pypy3.9'
pyversion: 'pypy3.10'
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.pyversion }}
Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/screenshots.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ jobs:
python-version: '3.12'
- name: Install llvmpipe and lavapipe for offscreen canvas
run: |
sudo apt-get update -y -qq
sudo add-apt-repository ppa:oibaf/graphics-drivers -y
sudo apt-get update -y -qq
sudo apt install -y libegl1-mesa-dev libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
- name: Install dev dependencies
Expand Down
12 changes: 6 additions & 6 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@
wgpu_text = f.read().decode()
wgpu_lines = [line.strip() for line in wgpu_text.splitlines()]
for cls_name in wgpu.classes.__all__:
assert (
f"~{cls_name}" in wgpu_lines
), f"Class '{cls_name}' not listed in class list in wgpu.rst"
assert (
f":class:`{cls_name}`" in wgpu_text
), f"Class '{cls_name}' not referenced in the text in wgpu.rst"
assert f"~{cls_name}" in wgpu_lines, (
f"Class '{cls_name}' not listed in class list in wgpu.rst"
)
assert f":class:`{cls_name}`" in wgpu_text, (
f"Class '{cls_name}' not referenced in the text in wgpu.rst"
)


# -- Hacks to tweak docstrings -----------------------------------------------
Expand Down
4 changes: 2 additions & 2 deletions examples/compute_timestamps.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
var<storage,read_write> data3: array<i32>;

@compute
@workgroup_size({','.join(map(str, local_size))})
@workgroup_size({",".join(map(str, local_size))})
fn main(@builtin(global_invocation_id) index: vec3<u32>) {{
let i: u32 = index.x;
data3[i] = data1[i] + data2[i];
Expand Down Expand Up @@ -152,7 +152,7 @@
Index 1: end timestamp
"""
timestamps = device.queue.read_buffer(query_buf).cast("Q").tolist()
print(f"Adding two {n} sized arrays took {(timestamps[1]-timestamps[0])/1000} us")
print(f"Adding two {n} sized arrays took {(timestamps[1] - timestamps[0]) / 1000} us")

# Read result
out = device.queue.read_buffer(buffer3).cast("i")
Expand Down
2 changes: 1 addition & 1 deletion examples/gui_asyncio.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async def main_loop():
frame_count += 1
etime = time.perf_counter() - last_frame_time
if etime > 1:
print(f"{frame_count/etime:0.1f} FPS")
print(f"{frame_count / etime:0.1f} FPS")
last_frame_time, frame_count = time.perf_counter(), 0

# dispose resources
Expand Down
2 changes: 1 addition & 1 deletion examples/gui_direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def main():
frame_count += 1
etime = time.perf_counter() - last_frame_time
if etime > 1:
print(f"{frame_count/etime:0.1f} FPS")
print(f"{frame_count / etime:0.1f} FPS")
last_frame_time, frame_count = time.perf_counter(), 0

# dispose resources
Expand Down
2 changes: 1 addition & 1 deletion examples/gui_trio.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ async def main_loop():
frame_count += 1
etime = time.perf_counter() - last_frame_time
if etime > 1:
print(f"{frame_count/etime:0.1f} FPS")
print(f"{frame_count / etime:0.1f} FPS")
last_frame_time, frame_count = time.perf_counter(), 0

# dispose resources
Expand Down
6 changes: 3 additions & 3 deletions examples/tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,9 @@ def unload_module():
imageio.imwrite(screenshot_path, img)

# if a reference screenshot exists, assert it is equal
assert (
screenshot_path.exists()
), "found # test_example = true but no reference screenshot available"
assert screenshot_path.exists(), (
"found # test_example = true but no reference screenshot available"
)
stored_img = imageio.imread(screenshot_path)
# assert similarity
atol = 1
Expand Down
14 changes: 7 additions & 7 deletions tests/test_wgpu_native_buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def test_consecutive_writes1():
# Write in parts
for i in range(4):
buf.map_sync("write")
buf.write_mapped(f"{i+1}".encode() * 8, i * 8)
buf.write_mapped(f"{i + 1}".encode() * 8, i * 8)
buf.unmap()

# Download from buffer to CPU
Expand All @@ -160,7 +160,7 @@ def test_consecutive_writes1():
# Also in parts
for i in range(4):
data = device.queue.read_buffer(buf, i * 8, size=8)
assert data == f"{i+1}".encode() * 8
assert data == f"{i + 1}".encode() * 8


@mark.skipif(not can_use_wgpu_lib, reason="Needs wgpu lib")
Expand All @@ -177,7 +177,7 @@ def test_consecutive_writes2():
# Write in parts
buf.map_sync("write")
for i in range(4):
buf.write_mapped(f"{i+1}".encode() * 8, i * 8)
buf.write_mapped(f"{i + 1}".encode() * 8, i * 8)
buf.unmap()

# Download from buffer to CPU
Expand All @@ -187,7 +187,7 @@ def test_consecutive_writes2():
# Also in parts
for i in range(4):
data = device.queue.read_buffer(buf, i * 8, size=8)
assert data == f"{i+1}".encode() * 8
assert data == f"{i + 1}".encode() * 8


@mark.skipif(not can_use_wgpu_lib, reason="Needs wgpu lib")
Expand Down Expand Up @@ -221,20 +221,20 @@ def test_consecutive_reads():

# Write using the queue. Do in parts, to touch those offsets too
for i in range(4):
device.queue.write_buffer(buf, i * 8, f"{i+1}".encode() * 8)
device.queue.write_buffer(buf, i * 8, f"{i + 1}".encode() * 8)

# Read in parts, the inefficient way
for i in range(4):
buf.map_sync("read")
data = buf.read_mapped(i * 8, 8)
assert data == f"{i+1}".encode() * 8
assert data == f"{i + 1}".encode() * 8
buf.unmap()

# Read in parts, the efficient way
buf.map_sync("read")
for i in range(4):
data = buf.read_mapped(i * 8, 8)
assert data == f"{i+1}".encode() * 8
assert data == f"{i + 1}".encode() * 8
buf.unmap()


Expand Down
6 changes: 3 additions & 3 deletions tests_mem/testutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,9 +237,9 @@ def core_test_func():

# Make sure the actual object has increased
assert more2 # not empty
assert (
more2 == options["expected_counts_after_create"]
), f"Expected:\n{options['expected_counts_after_create']}\nGot:\n{more2}"
assert more2 == options["expected_counts_after_create"], (
f"Expected:\n{options['expected_counts_after_create']}\nGot:\n{more2}"
)

# It's ok if other objects are created too ...

Expand Down
36 changes: 18 additions & 18 deletions tools/build_all_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,19 +81,19 @@
found_files = os.listdir("dist")
found_wheels = [fname for fname in found_files if fname.endswith(".whl")]
found_tags = {fname.split("none-")[1].split(".")[0] for fname in found_wheels}
assert (
found_tags == all_tags
), f"Found tags does not match expected tags: {found_tags}\n{all_tags}"
assert found_tags == all_tags, (
f"Found tags does not match expected tags: {found_tags}\n{all_tags}"
)

found_others = list(set(found_files) - set(found_wheels))
assert len(found_others) == 1 and found_others[0].endswith(
".tar.gz"
), f"Found unexpected files: {found_others}"
assert len(found_others) == 1 and found_others[0].endswith(".tar.gz"), (
f"Found unexpected files: {found_others}"
)

for archive_name in found_wheels:
assert (
"-any-" not in archive_name
), f"There should not be an 'any' wheel: {archive_name}"
assert "-any-" not in archive_name, (
f"There should not be an 'any' wheel: {archive_name}"
)


# --- Report and check content of archives
Expand All @@ -104,14 +104,14 @@
# Simple check for sdist archive
for archive_name in found_others:
size = os.stat("dist/" + archive_name).st_size
print(f"{archive_name} ({size/1e6:0.2f} MB)")
print(f"{archive_name} ({size / 1e6:0.2f} MB)")
assert size < 1e6, f"Did not expected {archive_name} to be this large"

# Collect content of each wheel
hash_to_file = {}
for archive_name in found_wheels:
size = os.stat("dist/" + archive_name).st_size
print(f"{archive_name} ({size/1e6:0.2f} MB)")
print(f"{archive_name} ({size / 1e6:0.2f} MB)")
z = zipfile.ZipFile("dist/" + archive_name)
flat_map = {os.path.basename(fi.filename): fi.filename for fi in z.filelist}
lib_hashes = []
Expand All @@ -120,14 +120,14 @@
bb = z.read(flat_map[fname])
hash = hashlib.sha256(bb).hexdigest()
lib_hashes.append(hash)
print(f" - {fname} ({len(bb)/1e6:0.2f} MB)\n {hash}")
assert (
len(lib_hashes) == 1
), f"Expected 1 lib per wheel, got {len(lib_hashes)} in {archive_name}"
print(f" - {fname} ({len(bb) / 1e6:0.2f} MB)\n {hash}")
assert len(lib_hashes) == 1, (
f"Expected 1 lib per wheel, got {len(lib_hashes)} in {archive_name}"
)
hash = lib_hashes[0]
assert (
hash not in hash_to_file
), f"Same lib found in {hash_to_file[hash]} and archive_name"
assert hash not in hash_to_file, (
f"Same lib found in {hash_to_file[hash]} and archive_name"
)
hash_to_file[hash] = archive_name

# Meta check
Expand Down
3 changes: 1 addition & 2 deletions wgpu/backends/wgpu_native/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,8 +445,7 @@ def _request_adapter(
backend = enum_str2int["BackendType"][force_backend]
except KeyError:
logger.warning(
f"Invalid value for WGPU_BACKEND_TYPE: '{force_backend}'.\n"
f"Valid values are: {list(enum_str2int['BackendType'].keys())}"
f"Invalid value for WGPU_BACKEND_TYPE: '{force_backend}'.\nValid values are: {list(enum_str2int['BackendType'].keys())}"
)
else:
logger.warning(f"Forcing backend: {force_backend} ({backend})")
Expand Down
Loading