Remove 3.10 code for 3.8 code

This commit is contained in:
Silversith 2023-04-01 22:01:08 +02:00
parent 14d9b11a35
commit 8b7505d5bb

View File

@ -12,8 +12,8 @@
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
import image as image
import pilgram
from PIL import Image, ImageFilter, ImageEnhance, ImageOps, ImageDraw, ImageChops, ImageFont from PIL import Image, ImageFilter, ImageEnhance, ImageOps, ImageDraw, ImageChops, ImageFont
from PIL.PngImagePlugin import PngInfo from PIL.PngImagePlugin import PngInfo
from io import BytesIO from io import BytesIO
@ -698,9 +698,6 @@ class WAS_Image_Style_Filter:
subprocess.check_call( subprocess.check_call(
[sys.executable, '-m', 'pip', '-q', 'install', 'pilgram']) [sys.executable, '-m', 'pip', '-q', 'install', 'pilgram'])
# Import Pilgram module
import pilgram
# Convert image to PIL # Convert image to PIL
image = tensor2pil(image) image = tensor2pil(image)
@ -708,68 +705,66 @@ class WAS_Image_Style_Filter:
WFilter = WAS_Filter_Class() WFilter = WAS_Filter_Class()
# Apply blending # Apply blending
match style: if style == "1977":
case "1977": out_image = pilgram._1977(image)
out_image = pilgram._1977(image) elif style == "aden":
case "aden": out_image = pilgram.aden(image)
out_image = pilgram.aden(image) elif style == "brannan":
case "brannan": out_image = pilgram.brannan(image)
out_image = pilgram.brannan(image) elif style == "brooklyn":
case "brooklyn": out_image = pilgram.brooklyn(image)
out_image = pilgram.brooklyn(image) elif style == "clarendon":
case "clarendon": out_image = pilgram.clarendon(image)
out_image = pilgram.clarendon(image) elif style == "earlybird":
case "earlybird": out_image = pilgram.earlybird(image)
out_image = pilgram.earlybird(image) elif style == "fairy tale":
case "fairy tale": out_image = WFilter.sparkle(image)
out_image = WFilter.sparkle(image) elif style == "gingham":
case "gingham": out_image = pilgram.gingham(image)
out_image = pilgram.gingham(image) elif style == "hudson":
case "hudson": out_image = pilgram.hudson(image)
out_image = pilgram.hudson(image) elif style == "inkwell":
case "inkwell": out_image = pilgram.inkwell(image)
out_image = pilgram.inkwell(image) elif style == "kelvin":
case "kelvin": out_image = pilgram.kelvin(image)
out_image = pilgram.kelvin(image) elif style == "lark":
case "lark": out_image = pilgram.lark(image)
out_image = pilgram.lark(image) elif style == "lofi":
case "lofi": out_image = pilgram.lofi(image)
out_image = pilgram.lofi(image) elif style == "maven":
case "maven": out_image = pilgram.maven(image)
out_image = pilgram.maven(image) elif style == "mayfair":
case "mayfair": out_image = pilgram.mayfair(image)
out_image = pilgram.mayfair(image) elif style == "moon":
case "moon": out_image = pilgram.moon(image)
out_image = pilgram.moon(image) elif style == "nashville":
case "nashville": out_image = pilgram.nashville(image)
out_image = pilgram.nashville(image) elif style == "perpetua":
case "perpetua": out_image = pilgram.perpetua(image)
out_image = pilgram.perpetua(image) elif style == "reyes":
case "reyes": out_image = pilgram.reyes(image)
out_image = pilgram.reyes(image) elif style == "rise":
case "rise": out_image = pilgram.rise(image)
out_image = pilgram.rise(image) elif style == "slumber":
case "slumber": out_image = pilgram.slumber(image)
out_image = pilgram.slumber(image) elif style == "stinson":
case "stinson": out_image = pilgram.stinson(image)
out_image = pilgram.stinson(image) elif style == "toaster":
case "toaster": out_image = pilgram.toaster(image)
out_image = pilgram.toaster(image) elif style == "valencia":
case "valencia": out_image = pilgram.valencia(image)
out_image = pilgram.valencia(image) elif style == "walden":
case "walden": out_image = pilgram.walden(image)
out_image = pilgram.walden(image) elif style == "willow":
case "willow": out_image = pilgram.willow(image)
out_image = pilgram.willow(image) elif style == "xpro2":
case "xpro2": out_image = pilgram.xpro2(image)
out_image = pilgram.xpro2(image) else:
case _: out_image = image
out_image = image
out_image = out_image.convert("RGB") out_image = out_image.convert("RGB")
return (torch.from_numpy(np.array(out_image).astype(np.float32) / 255.0).unsqueeze(0), ) return (torch.from_numpy(np.array(out_image).astype(np.float32) / 255.0).unsqueeze(0),)
# COMBINE NODE # COMBINE NODE
@ -825,37 +820,36 @@ class WAS_Image_Blending_Mode:
img_b = tensor2pil(image_b) img_b = tensor2pil(image_b)
# Apply blending # Apply blending
match mode: if mode == "color":
case "color": out_image = pilgram.css.blending.color(img_a, img_b)
out_image = pilgram.css.blending.color(img_a, img_b) elif mode == "color_burn":
case "color_burn": out_image = pilgram.css.blending.color_burn(img_a, img_b)
out_image = pilgram.css.blending.color_burn(img_a, img_b) elif mode == "color_dodge":
case "color_dodge": out_image = pilgram.css.blending.color_dodge(img_a, img_b)
out_image = pilgram.css.blending.color_dodge(img_a, img_b) elif mode == "darken":
case "darken": out_image = pilgram.css.blending.darken(img_a, img_b)
out_image = pilgram.css.blending.darken(img_a, img_b) elif mode == "difference":
case "difference": out_image = pilgram.css.blending.difference(img_a, img_b)
out_image = pilgram.css.blending.difference(img_a, img_b) elif mode == "exclusion":
case "exclusion": out_image = pilgram.css.blending.exclusion(img_a, img_b)
out_image = pilgram.css.blending.exclusion(img_a, img_b) elif mode == "hard_light":
case "hard_light": out_image = pilgram.css.blending.hard_light(img_a, img_b)
out_image = pilgram.css.blending.hard_light(img_a, img_b) elif mode == "hue":
case "hue": out_image = pilgram.css.blending.hue(img_a, img_b)
out_image = pilgram.css.blending.hue(img_a, img_b) elif mode == "lighten":
case "lighten": out_image = pilgram.css.blending.lighten(img_a, img_b)
out_image = pilgram.css.blending.lighten(img_a, img_b) elif mode == "multiply":
case "multiply": out_image = pilgram.css.blending.multiply(img_a, img_b)
out_image = pilgram.css.blending.multiply(img_a, img_b) elif mode == "add":
case "add": out_image = pilgram.css.blending.normal(img_a, img_b)
out_image = pilgram.css.blending.normal(img_a, img_b) elif mode == "overlay":
case "overlay": out_image = pilgram.css.blending.overlay(img_a, img_b)
out_image = pilgram.css.blending.overlay(img_a, img_b) elif mode == "screen":
case "screen": out_image = pilgram.css.blending.screen(img_a, img_b)
out_image = pilgram.css.blending.screen(img_a, img_b) elif mode == "soft_light":
case "soft_light": out_image = pilgram.css.blending.soft_light(img_a, img_b)
out_image = pilgram.css.blending.soft_light(img_a, img_b) else:
case _: out_image = img_a
out_image = img_a
out_image = out_image.convert("RGB") out_image = out_image.convert("RGB")
@ -938,13 +932,12 @@ class WAS_Image_Monitor_Distortion_Filter:
WFilter = WAS_Filter_Class() WFilter = WAS_Filter_Class()
# Apply image effect # Apply image effect
match mode: if mode == 'Digital Distortion':
case 'Digital Distortion': image = WFilter.digital_distortion(image, amplitude, offset)
image = WFilter.digital_distortion(image, amplitude, offset) elif mode == 'Signal Distortion':
case 'Signal Distortion': image = WFilter.signal_distortion(image, amplitude)
image = WFilter.signal_distortion(image, amplitude) elif mode == 'TV Distortion':
case 'TV Distortion': image = WFilter.tv_vhs_distortion(image, amplitude)
image = WFilter.tv_vhs_distortion(image, amplitude)
return (pil2tensor(image), ) return (pil2tensor(image), )
@ -1021,13 +1014,12 @@ class WAS_Image_Analyze:
WFilter = WAS_Filter_Class() WFilter = WAS_Filter_Class()
# Analye Image # Analye Image
match mode: if mode == 'Black White Levels':
case 'Black White Levels': image = WFilter.black_white_levels(image)
image = WFilter.black_white_levels(image) elif mode == 'RGB Levels':
case 'RGB Levels': image = WFilter.channel_frequency(image)
image = WFilter.channel_frequency(image)
return (pil2tensor(image), ) return (pil2tensor(image), )
# IMAGE GENERATE GRADIENT # IMAGE GENERATE GRADIENT
@ -1995,13 +1987,14 @@ class WAS_Image_Rotate:
rotation = int((rotation//90)*90) rotation = int((rotation//90)*90)
# Set Sampler # Set Sampler
match sampler: if sampler == 'nearest':
case 'nearest': sampler = Image.NEAREST
sampler = Image.NEAREST elif sampler == 'bicubic':
case 'bicubic': sampler = Image.BICUBIC
sampler = Image.BICUBIC elif sampler == 'bilinear':
case 'bilinear': sampler = Image.BILINEAR
sampler = Image.BILINEAR else:
sampler = Image.NEAREST # default to nearest if none of the above
# Rotate Image # Rotate Image
if mode == 'internal': if mode == 'internal':
@ -2244,14 +2237,13 @@ class WAS_Image_Edge:
image = tensor2pil(image) image = tensor2pil(image)
# Detect edges # Detect edges
match mode: if mode == "normal":
case "normal": image = image.filter(ImageFilter.FIND_EDGES)
image = image.filter(ImageFilter.FIND_EDGES) elif mode == "laplacian":
case "laplacian": kernel = (-1, -1, -1, -1, 8, -1, -1, -1, -1)
image = image.filter(ImageFilter.Kernel((3, 3), (-1, -1, -1, -1, 8, image = image.filter(ImageFilter.Kernel((3, 3), kernel, scale=1, offset=0))
-1, -1, -1, -1), 1, 0)) else:
case _: image = image
image = image
return (torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0), ) return (torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(0), )
@ -3645,15 +3637,14 @@ class WAS_Random_Number:
random.seed(seed) random.seed(seed)
# Return random number # Return random number
match number_type: if number_type == 'integer':
case 'integer': number = random.randint(minimum, maximum)
number = random.randint(minimum, maximum) elif number_type == 'float':
case 'float': number = random.uniform(minimum, maximum)
number = random.uniform(minimum, maximum) elif number_type == 'bool':
case 'bool': number = random.random()
number = random.random() else:
case _: return
return
# Return number # Return number
return (number, ) return (number, )
@ -3682,13 +3673,14 @@ class WAS_Constant_Number:
def return_constant_number(self, number_type, number): def return_constant_number(self, number_type, number):
# Return number # Return number
match number_type: if number_type == 'integer':
case 'integer': return (int(number), )
return (int(number), ) elif number_type == 'float':
case 'integer': return (float(number), )
return (float(number), ) elif number_type == 'bool':
case 'bool': return ((1 if int(number) > 0 else 0), )
return ((1 if int(number) > 0 else 0), ) else:
return
# NUMBER TO SEED # NUMBER TO SEED
@ -3898,34 +3890,32 @@ class WAS_Number_Operation:
def math_operations(self, number_a, number_b, operation="addition"): def math_operations(self, number_a, number_b, operation="addition"):
# Return random number if operation == 'addition':
match operation: return ((number_a + number_b),)
case 'addition': elif operation == 'subtraction':
return ((number_a + number_b), ) return ((number_a - number_b),)
case 'subtraction': elif operation == 'division':
return ((number_a - number_b), ) return ((number_a / number_b),)
case 'division': elif operation == 'floor division':
return ((number_a / number_b), ) return ((number_a // number_b),)
case 'floor division': elif operation == 'multiplication':
return ((number_a // number_b), ) return ((number_a * number_b),)
case 'multiplication': elif operation == 'exponentiation':
return ((number_a * number_b), ) return ((number_a ** number_b),)
case 'exponentiation': elif operation == 'modulus':
return ((number_a ** number_b), ) return ((number_a % number_b),)
case 'modulus': elif operation == 'greater-than':
return ((number_a % number_b), ) return (+(number_a > number_b),)
case 'greater-than': elif operation == 'greater-than or equals':
return (+(number_a > number_b), ) return (+(number_a >= number_b),)
case 'greater-than or equals': elif operation == 'less-than':
return (+(number_a >= number_b), ) return (+(number_a < number_b),)
case 'less-than': elif operation == 'less-than or equals':
return (+(number_a < number_b), ) return (+(number_a <= number_b),)
case 'less-than or equals': elif operation == 'equals':
return (+(number_a <= number_b), ) return (+(number_a == number_b),)
case 'equals': elif operation == 'does not equal':
return (+(number_a == number_b), ) return (+(number_a != number_b),)
case 'does not equal':
return (+(number_a != number_b), )
#! MISC #! MISC