Spaces:
Paused
Paused
Update main.py
Browse files
main.py
CHANGED
|
@@ -13,7 +13,6 @@ import numpy as np
|
|
| 13 |
|
| 14 |
processor = SegformerImageProcessor.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
| 15 |
model = TFSegformerForSemanticSegmentation.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
| 16 |
-
# d2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2
|
| 17 |
# 022ee431-4073-424c-8298-68cb75352785
|
| 18 |
# e8346eb2-6187-4748-a42f-7241580ee1f1
|
| 19 |
# ae337a18-427c-4035-a73b-a89b97a14b6c
|
|
@@ -92,7 +91,7 @@ def generator():
|
|
| 92 |
payload["style_preset"] = style
|
| 93 |
payload = json.dumps(payload)
|
| 94 |
headers = {
|
| 95 |
-
'X-Prodia-Key': '
|
| 96 |
'accept': 'application/json',
|
| 97 |
'content-type': 'application/json'
|
| 98 |
}
|
|
@@ -112,7 +111,7 @@ def image(jobId):
|
|
| 112 |
url = "https://api.prodia.com/v1/job/"+ jobId.split(".")[0]
|
| 113 |
|
| 114 |
headers = {
|
| 115 |
-
'X-Prodia-Key': '
|
| 116 |
'accept': 'application/json'
|
| 117 |
}
|
| 118 |
|
|
@@ -249,7 +248,7 @@ def nudify():
|
|
| 249 |
"steps": 30
|
| 250 |
})
|
| 251 |
headers = {
|
| 252 |
-
'X-Prodia-Key': '
|
| 253 |
'accept': 'application/json',
|
| 254 |
'content-type': 'application/json'
|
| 255 |
}
|
|
@@ -356,7 +355,7 @@ def clotheChanger():
|
|
| 356 |
"steps": 30
|
| 357 |
})
|
| 358 |
headers = {
|
| 359 |
-
'X-Prodia-Key': '
|
| 360 |
'accept': 'application/json',
|
| 361 |
'content-type': 'application/json'
|
| 362 |
}
|
|
@@ -381,7 +380,7 @@ def upscale():
|
|
| 381 |
imgUrl = data["image"]
|
| 382 |
url = "https://api.prodia.com/v1/upscale"
|
| 383 |
headers = {
|
| 384 |
-
'X-Prodia-Key': '
|
| 385 |
'accept': 'application/json',
|
| 386 |
'content-type': 'application/json',
|
| 387 |
}
|
|
|
|
| 13 |
|
| 14 |
processor = SegformerImageProcessor.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
| 15 |
model = TFSegformerForSemanticSegmentation.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
|
|
|
| 16 |
# 022ee431-4073-424c-8298-68cb75352785
|
| 17 |
# e8346eb2-6187-4748-a42f-7241580ee1f1
|
| 18 |
# ae337a18-427c-4035-a73b-a89b97a14b6c
|
|
|
|
| 91 |
payload["style_preset"] = style
|
| 92 |
payload = json.dumps(payload)
|
| 93 |
headers = {
|
| 94 |
+
'X-Prodia-Key': 'd2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2',
|
| 95 |
'accept': 'application/json',
|
| 96 |
'content-type': 'application/json'
|
| 97 |
}
|
|
|
|
| 111 |
url = "https://api.prodia.com/v1/job/"+ jobId.split(".")[0]
|
| 112 |
|
| 113 |
headers = {
|
| 114 |
+
'X-Prodia-Key': 'd2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2',
|
| 115 |
'accept': 'application/json'
|
| 116 |
}
|
| 117 |
|
|
|
|
| 248 |
"steps": 30
|
| 249 |
})
|
| 250 |
headers = {
|
| 251 |
+
'X-Prodia-Key': 'd2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2',
|
| 252 |
'accept': 'application/json',
|
| 253 |
'content-type': 'application/json'
|
| 254 |
}
|
|
|
|
| 355 |
"steps": 30
|
| 356 |
})
|
| 357 |
headers = {
|
| 358 |
+
'X-Prodia-Key': 'd2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2',
|
| 359 |
'accept': 'application/json',
|
| 360 |
'content-type': 'application/json'
|
| 361 |
}
|
|
|
|
| 380 |
imgUrl = data["image"]
|
| 381 |
url = "https://api.prodia.com/v1/upscale"
|
| 382 |
headers = {
|
| 383 |
+
'X-Prodia-Key': 'd2c2605e-b4a5-4ab6-aa32-fcf81e6d27a2',
|
| 384 |
'accept': 'application/json',
|
| 385 |
'content-type': 'application/json',
|
| 386 |
}
|