Added support for multiple tags on a single model
This commit is contained in:
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Support for streaming http requests in emscripten.
|
||||
|
||||
A few caveats -
|
||||
|
||||
Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed.
|
||||
Streaming only works if you're running pyodide in a web worker.
|
||||
|
||||
Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch
|
||||
operation, so it requires that you have crossOriginIsolation enabled, by serving over https
|
||||
(or from localhost) with the two headers below set:
|
||||
|
||||
Cross-Origin-Opener-Policy: same-origin
|
||||
Cross-Origin-Embedder-Policy: require-corp
|
||||
|
||||
You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in
|
||||
javascript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole
|
||||
request into a buffer and then returning it. it shows a warning in the javascript console in this case.
|
||||
|
||||
Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once
|
||||
control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch.
|
||||
|
||||
NB: in this code, there are a lot of javascript objects. They are named js_*
|
||||
to make it clear what type of object they are.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import json
|
||||
from email.parser import Parser
|
||||
from importlib.resources import files
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import js # type: ignore[import-not-found]
|
||||
from pyodide.ffi import ( # type: ignore[import-not-found]
|
||||
JsArray,
|
||||
JsException,
|
||||
JsProxy,
|
||||
to_js,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Buffer
|
||||
|
||||
from .request import EmscriptenRequest
|
||||
from .response import EmscriptenResponse
|
||||
|
||||
"""
|
||||
There are some headers that trigger unintended CORS preflight requests.
|
||||
See also https://github.com/koenvo/pyodide-http/issues/22
|
||||
"""
|
||||
HEADERS_TO_IGNORE = ("user-agent",)
|
||||
|
||||
SUCCESS_HEADER = -1
|
||||
SUCCESS_EOF = -2
|
||||
ERROR_TIMEOUT = -3
|
||||
ERROR_EXCEPTION = -4
|
||||
|
||||
_STREAMING_WORKER_CODE = (
|
||||
files(__package__)
|
||||
.joinpath("emscripten_fetch_worker.js")
|
||||
.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
|
||||
class _RequestError(Exception):
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
*,
|
||||
request: EmscriptenRequest | None = None,
|
||||
response: EmscriptenResponse | None = None,
|
||||
):
|
||||
self.request = request
|
||||
self.response = response
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class _StreamingError(_RequestError):
|
||||
pass
|
||||
|
||||
|
||||
class _TimeoutError(_RequestError):
|
||||
pass
|
||||
|
||||
|
||||
def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy:
|
||||
return to_js(dict_val, dict_converter=js.Object.fromEntries)
|
||||
|
||||
|
||||
class _ReadStream(io.RawIOBase):
|
||||
def __init__(
|
||||
self,
|
||||
int_buffer: JsArray,
|
||||
byte_buffer: JsArray,
|
||||
timeout: float,
|
||||
worker: JsProxy,
|
||||
connection_id: int,
|
||||
request: EmscriptenRequest,
|
||||
):
|
||||
self.int_buffer = int_buffer
|
||||
self.byte_buffer = byte_buffer
|
||||
self.read_pos = 0
|
||||
self.read_len = 0
|
||||
self.connection_id = connection_id
|
||||
self.worker = worker
|
||||
self.timeout = int(1000 * timeout) if timeout > 0 else None
|
||||
self.is_live = True
|
||||
self._is_closed = False
|
||||
self.request: EmscriptenRequest | None = request
|
||||
|
||||
def __del__(self) -> None:
|
||||
self.close()
|
||||
|
||||
# this is compatible with _base_connection
|
||||
def is_closed(self) -> bool:
|
||||
return self._is_closed
|
||||
|
||||
# for compatibility with RawIOBase
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self.is_closed()
|
||||
|
||||
def close(self) -> None:
|
||||
if not self.is_closed():
|
||||
self.read_len = 0
|
||||
self.read_pos = 0
|
||||
self.int_buffer = None
|
||||
self.byte_buffer = None
|
||||
self._is_closed = True
|
||||
self.request = None
|
||||
if self.is_live:
|
||||
self.worker.postMessage(_obj_from_dict({"close": self.connection_id}))
|
||||
self.is_live = False
|
||||
super().close()
|
||||
|
||||
def readable(self) -> bool:
|
||||
return True
|
||||
|
||||
def writable(self) -> bool:
|
||||
return False
|
||||
|
||||
def seekable(self) -> bool:
|
||||
return False
|
||||
|
||||
def readinto(self, byte_obj: Buffer) -> int:
|
||||
if not self.int_buffer:
|
||||
raise _StreamingError(
|
||||
"No buffer for stream in _ReadStream.readinto",
|
||||
request=self.request,
|
||||
response=None,
|
||||
)
|
||||
if self.read_len == 0:
|
||||
# wait for the worker to send something
|
||||
js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT)
|
||||
self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id}))
|
||||
if (
|
||||
js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout)
|
||||
== "timed-out"
|
||||
):
|
||||
raise _TimeoutError
|
||||
data_len = self.int_buffer[0]
|
||||
if data_len > 0:
|
||||
self.read_len = data_len
|
||||
self.read_pos = 0
|
||||
elif data_len == ERROR_EXCEPTION:
|
||||
string_len = self.int_buffer[1]
|
||||
# decode the error string
|
||||
js_decoder = js.TextDecoder.new()
|
||||
json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len))
|
||||
raise _StreamingError(
|
||||
f"Exception thrown in fetch: {json_str}",
|
||||
request=self.request,
|
||||
response=None,
|
||||
)
|
||||
else:
|
||||
# EOF, free the buffers and return zero
|
||||
# and free the request
|
||||
self.is_live = False
|
||||
self.close()
|
||||
return 0
|
||||
# copy from int32array to python bytes
|
||||
ret_length = min(self.read_len, len(memoryview(byte_obj)))
|
||||
subarray = self.byte_buffer.subarray(
|
||||
self.read_pos, self.read_pos + ret_length
|
||||
).to_py()
|
||||
memoryview(byte_obj)[0:ret_length] = subarray
|
||||
self.read_len -= ret_length
|
||||
self.read_pos += ret_length
|
||||
return ret_length
|
||||
|
||||
|
||||
class _StreamingFetcher:
|
||||
def __init__(self) -> None:
|
||||
# make web-worker and data buffer on startup
|
||||
self.streaming_ready = False
|
||||
|
||||
js_data_blob = js.Blob.new(
|
||||
[_STREAMING_WORKER_CODE], _obj_from_dict({"type": "application/javascript"})
|
||||
)
|
||||
|
||||
def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None:
|
||||
def onMsg(e: JsProxy) -> None:
|
||||
self.streaming_ready = True
|
||||
js_resolve_fn(e)
|
||||
|
||||
def onErr(e: JsProxy) -> None:
|
||||
js_reject_fn(e) # Defensive: never happens in ci
|
||||
|
||||
self.js_worker.onmessage = onMsg
|
||||
self.js_worker.onerror = onErr
|
||||
|
||||
js_data_url = js.URL.createObjectURL(js_data_blob)
|
||||
self.js_worker = js.globalThis.Worker.new(js_data_url)
|
||||
self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver)
|
||||
|
||||
def send(self, request: EmscriptenRequest) -> EmscriptenResponse:
|
||||
headers = {
|
||||
k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE
|
||||
}
|
||||
|
||||
body = request.body
|
||||
fetch_data = {"headers": headers, "body": to_js(body), "method": request.method}
|
||||
# start the request off in the worker
|
||||
timeout = int(1000 * request.timeout) if request.timeout > 0 else None
|
||||
js_shared_buffer = js.SharedArrayBuffer.new(1048576)
|
||||
js_int_buffer = js.Int32Array.new(js_shared_buffer)
|
||||
js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8)
|
||||
|
||||
js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT)
|
||||
js.Atomics.notify(js_int_buffer, 0)
|
||||
js_absolute_url = js.URL.new(request.url, js.location).href
|
||||
self.js_worker.postMessage(
|
||||
_obj_from_dict(
|
||||
{
|
||||
"buffer": js_shared_buffer,
|
||||
"url": js_absolute_url,
|
||||
"fetchParams": fetch_data,
|
||||
}
|
||||
)
|
||||
)
|
||||
# wait for the worker to send something
|
||||
js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout)
|
||||
if js_int_buffer[0] == ERROR_TIMEOUT:
|
||||
raise _TimeoutError(
|
||||
"Timeout connecting to streaming request",
|
||||
request=request,
|
||||
response=None,
|
||||
)
|
||||
elif js_int_buffer[0] == SUCCESS_HEADER:
|
||||
# got response
|
||||
# header length is in second int of intBuffer
|
||||
string_len = js_int_buffer[1]
|
||||
# decode the rest to a JSON string
|
||||
js_decoder = js.TextDecoder.new()
|
||||
# this does a copy (the slice) because decode can't work on shared array
|
||||
# for some silly reason
|
||||
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
||||
# get it as an object
|
||||
response_obj = json.loads(json_str)
|
||||
return EmscriptenResponse(
|
||||
request=request,
|
||||
status_code=response_obj["status"],
|
||||
headers=response_obj["headers"],
|
||||
body=_ReadStream(
|
||||
js_int_buffer,
|
||||
js_byte_buffer,
|
||||
request.timeout,
|
||||
self.js_worker,
|
||||
response_obj["connectionID"],
|
||||
request,
|
||||
),
|
||||
)
|
||||
elif js_int_buffer[0] == ERROR_EXCEPTION:
|
||||
string_len = js_int_buffer[1]
|
||||
# decode the error string
|
||||
js_decoder = js.TextDecoder.new()
|
||||
json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
|
||||
raise _StreamingError(
|
||||
f"Exception thrown in fetch: {json_str}", request=request, response=None
|
||||
)
|
||||
else:
|
||||
raise _StreamingError(
|
||||
f"Unknown status from worker in fetch: {js_int_buffer[0]}",
|
||||
request=request,
|
||||
response=None,
|
||||
)
|
||||
|
||||
|
||||
# check if we are in a worker or not
|
||||
def is_in_browser_main_thread() -> bool:
|
||||
return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window
|
||||
|
||||
|
||||
def is_cross_origin_isolated() -> bool:
|
||||
return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated
|
||||
|
||||
|
||||
def is_in_node() -> bool:
|
||||
return (
|
||||
hasattr(js, "process")
|
||||
and hasattr(js.process, "release")
|
||||
and hasattr(js.process.release, "name")
|
||||
and js.process.release.name == "node"
|
||||
)
|
||||
|
||||
|
||||
def is_worker_available() -> bool:
|
||||
return hasattr(js, "Worker") and hasattr(js, "Blob")
|
||||
|
||||
|
||||
_fetcher: _StreamingFetcher | None = None
|
||||
|
||||
if is_worker_available() and (
|
||||
(is_cross_origin_isolated() and not is_in_browser_main_thread())
|
||||
and (not is_in_node())
|
||||
):
|
||||
_fetcher = _StreamingFetcher()
|
||||
else:
|
||||
_fetcher = None
|
||||
|
||||
|
||||
def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None:
|
||||
if _fetcher and streaming_ready():
|
||||
return _fetcher.send(request)
|
||||
else:
|
||||
_show_streaming_warning()
|
||||
return None
|
||||
|
||||
|
||||
_SHOWN_TIMEOUT_WARNING = False
|
||||
|
||||
|
||||
def _show_timeout_warning() -> None:
|
||||
global _SHOWN_TIMEOUT_WARNING
|
||||
if not _SHOWN_TIMEOUT_WARNING:
|
||||
_SHOWN_TIMEOUT_WARNING = True
|
||||
message = "Warning: Timeout is not available on main browser thread"
|
||||
js.console.warn(message)
|
||||
|
||||
|
||||
_SHOWN_STREAMING_WARNING = False
|
||||
|
||||
|
||||
def _show_streaming_warning() -> None:
|
||||
global _SHOWN_STREAMING_WARNING
|
||||
if not _SHOWN_STREAMING_WARNING:
|
||||
_SHOWN_STREAMING_WARNING = True
|
||||
message = "Can't stream HTTP requests because: \n"
|
||||
if not is_cross_origin_isolated():
|
||||
message += " Page is not cross-origin isolated\n"
|
||||
if is_in_browser_main_thread():
|
||||
message += " Python is running in main browser thread\n"
|
||||
if not is_worker_available():
|
||||
message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in
|
||||
if streaming_ready() is False:
|
||||
message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch
|
||||
is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`"""
|
||||
from js import console
|
||||
|
||||
console.warn(message)
|
||||
|
||||
|
||||
def send_request(request: EmscriptenRequest) -> EmscriptenResponse:
|
||||
try:
|
||||
js_xhr = js.XMLHttpRequest.new()
|
||||
|
||||
if not is_in_browser_main_thread():
|
||||
js_xhr.responseType = "arraybuffer"
|
||||
if request.timeout:
|
||||
js_xhr.timeout = int(request.timeout * 1000)
|
||||
else:
|
||||
js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15")
|
||||
if request.timeout:
|
||||
# timeout isn't available on the main thread - show a warning in console
|
||||
# if it is set
|
||||
_show_timeout_warning()
|
||||
|
||||
js_xhr.open(request.method, request.url, False)
|
||||
for name, value in request.headers.items():
|
||||
if name.lower() not in HEADERS_TO_IGNORE:
|
||||
js_xhr.setRequestHeader(name, value)
|
||||
|
||||
js_xhr.send(to_js(request.body))
|
||||
|
||||
headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders()))
|
||||
|
||||
if not is_in_browser_main_thread():
|
||||
body = js_xhr.response.to_py().tobytes()
|
||||
else:
|
||||
body = js_xhr.response.encode("ISO-8859-15")
|
||||
return EmscriptenResponse(
|
||||
status_code=js_xhr.status, headers=headers, body=body, request=request
|
||||
)
|
||||
except JsException as err:
|
||||
if err.name == "TimeoutError":
|
||||
raise _TimeoutError(err.message, request=request)
|
||||
elif err.name == "NetworkError":
|
||||
raise _RequestError(err.message, request=request)
|
||||
else:
|
||||
# general http error
|
||||
raise _RequestError(err.message, request=request)
|
||||
|
||||
|
||||
def streaming_ready() -> bool | None:
|
||||
if _fetcher:
|
||||
return _fetcher.streaming_ready
|
||||
else:
|
||||
return None # no fetcher, return None to signify that
|
||||
|
||||
|
||||
async def wait_for_streaming_ready() -> bool:
|
||||
if _fetcher:
|
||||
await _fetcher.js_worker_ready_promise
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 12 KiB |
@@ -0,0 +1,200 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
#
|
||||
# MSP file handling
|
||||
#
|
||||
# This is the format used by the Paint program in Windows 1 and 2.
|
||||
#
|
||||
# History:
|
||||
# 95-09-05 fl Created
|
||||
# 97-01-03 fl Read/write MSP images
|
||||
# 17-02-21 es Fixed RLE interpretation
|
||||
#
|
||||
# Copyright (c) Secret Labs AB 1997.
|
||||
# Copyright (c) Fredrik Lundh 1995-97.
|
||||
# Copyright (c) Eric Soroos 2017.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
# More info on this format: https://archive.org/details/gg243631
|
||||
# Page 313:
|
||||
# Figure 205. Windows Paint Version 1: "DanM" Format
|
||||
# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03
|
||||
#
|
||||
# See also: https://www.fileformat.info/format/mspaint/egff.htm
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import struct
|
||||
from typing import IO
|
||||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i16le as i16
|
||||
from ._binary import o16le as o16
|
||||
|
||||
#
|
||||
# read MSP files
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix[:4] in [b"DanM", b"LinS"]
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for Windows MSP images. This plugin supports both
|
||||
# uncompressed (Windows 1.0).
|
||||
|
||||
|
||||
class MspImageFile(ImageFile.ImageFile):
|
||||
format = "MSP"
|
||||
format_description = "Windows Paint"
|
||||
|
||||
def _open(self) -> None:
|
||||
# Header
|
||||
assert self.fp is not None
|
||||
|
||||
s = self.fp.read(32)
|
||||
if not _accept(s):
|
||||
msg = "not an MSP file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
# Header checksum
|
||||
checksum = 0
|
||||
for i in range(0, 32, 2):
|
||||
checksum = checksum ^ i16(s, i)
|
||||
if checksum != 0:
|
||||
msg = "bad MSP checksum"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
self._mode = "1"
|
||||
self._size = i16(s, 4), i16(s, 6)
|
||||
|
||||
if s[:4] == b"DanM":
|
||||
self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))]
|
||||
else:
|
||||
self.tile = [("MSP", (0, 0) + self.size, 32, None)]
|
||||
|
||||
|
||||
class MspDecoder(ImageFile.PyDecoder):
|
||||
# The algo for the MSP decoder is from
|
||||
# https://www.fileformat.info/format/mspaint/egff.htm
|
||||
# cc-by-attribution -- That page references is taken from the
|
||||
# Encyclopedia of Graphics File Formats and is licensed by
|
||||
# O'Reilly under the Creative Common/Attribution license
|
||||
#
|
||||
# For RLE encoded files, the 32byte header is followed by a scan
|
||||
# line map, encoded as one 16bit word of encoded byte length per
|
||||
# line.
|
||||
#
|
||||
# NOTE: the encoded length of the line can be 0. This was not
|
||||
# handled in the previous version of this encoder, and there's no
|
||||
# mention of how to handle it in the documentation. From the few
|
||||
# examples I've seen, I've assumed that it is a fill of the
|
||||
# background color, in this case, white.
|
||||
#
|
||||
#
|
||||
# Pseudocode of the decoder:
|
||||
# Read a BYTE value as the RunType
|
||||
# If the RunType value is zero
|
||||
# Read next byte as the RunCount
|
||||
# Read the next byte as the RunValue
|
||||
# Write the RunValue byte RunCount times
|
||||
# If the RunType value is non-zero
|
||||
# Use this value as the RunCount
|
||||
# Read and write the next RunCount bytes literally
|
||||
#
|
||||
# e.g.:
|
||||
# 0x00 03 ff 05 00 01 02 03 04
|
||||
# would yield the bytes:
|
||||
# 0xff ff ff 00 01 02 03 04
|
||||
#
|
||||
# which are then interpreted as a bit packed mode '1' image
|
||||
|
||||
_pulls_fd = True
|
||||
|
||||
def decode(self, buffer: bytes) -> tuple[int, int]:
|
||||
assert self.fd is not None
|
||||
|
||||
img = io.BytesIO()
|
||||
blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8))
|
||||
try:
|
||||
self.fd.seek(32)
|
||||
rowmap = struct.unpack_from(
|
||||
f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2)
|
||||
)
|
||||
except struct.error as e:
|
||||
msg = "Truncated MSP file in row map"
|
||||
raise OSError(msg) from e
|
||||
|
||||
for x, rowlen in enumerate(rowmap):
|
||||
try:
|
||||
if rowlen == 0:
|
||||
img.write(blank_line)
|
||||
continue
|
||||
row = self.fd.read(rowlen)
|
||||
if len(row) != rowlen:
|
||||
msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}"
|
||||
raise OSError(msg)
|
||||
idx = 0
|
||||
while idx < rowlen:
|
||||
runtype = row[idx]
|
||||
idx += 1
|
||||
if runtype == 0:
|
||||
(runcount, runval) = struct.unpack_from("Bc", row, idx)
|
||||
img.write(runval * runcount)
|
||||
idx += 2
|
||||
else:
|
||||
runcount = runtype
|
||||
img.write(row[idx : idx + runcount])
|
||||
idx += runcount
|
||||
|
||||
except struct.error as e:
|
||||
msg = f"Corrupted MSP file in row {x}"
|
||||
raise OSError(msg) from e
|
||||
|
||||
self.set_as_raw(img.getvalue(), ("1", 0, 1))
|
||||
|
||||
return -1, 0
|
||||
|
||||
|
||||
Image.register_decoder("MSP", MspDecoder)
|
||||
|
||||
|
||||
#
|
||||
# write MSP files (uncompressed only)
|
||||
|
||||
|
||||
def _save(im: Image.Image, fp: IO[bytes], filename: str) -> None:
|
||||
if im.mode != "1":
|
||||
msg = f"cannot write mode {im.mode} as MSP"
|
||||
raise OSError(msg)
|
||||
|
||||
# create MSP header
|
||||
header = [0] * 16
|
||||
|
||||
header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1
|
||||
header[2], header[3] = im.size
|
||||
header[4], header[5] = 1, 1
|
||||
header[6], header[7] = 1, 1
|
||||
header[8], header[9] = im.size
|
||||
|
||||
checksum = 0
|
||||
for h in header:
|
||||
checksum = checksum ^ h
|
||||
header[12] = checksum # FIXME: is this the right field?
|
||||
|
||||
# header
|
||||
for h in header:
|
||||
fp.write(o16(h))
|
||||
|
||||
# image body
|
||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))])
|
||||
|
||||
|
||||
#
|
||||
# registry
|
||||
|
||||
Image.register_open(MspImageFile.format, MspImageFile, _accept)
|
||||
Image.register_save(MspImageFile.format, _save)
|
||||
|
||||
Image.register_extension(MspImageFile.format, ".msp")
|
||||
Binary file not shown.
Reference in New Issue
Block a user