Skip to content

Commit

Permalink
Add coolstuffinc retail provider
Browse files Browse the repository at this point in the history
  • Loading branch information
kodawah committed Jul 14, 2024
1 parent 69c1494 commit 40e34f3
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 0 deletions.
1 change: 1 addition & 0 deletions scripts/check_product_fuzzy.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"tcgplayerProductId": "tcgplayer",
"miniaturemarketId": "miniaturemarket",
"scgId": "starcitygames",
"csiId": "coolstuffinc",
}

review_products = []
Expand Down
72 changes: 72 additions & 0 deletions scripts/load_new_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
import base64
import zlib

from bs4 import BeautifulSoup
from urllib.parse import urlparse

def get_cardKingdom():
sealed_url = "https://api.cardkingdom.com/api/sealed_pricelist"
Expand Down Expand Up @@ -497,6 +499,72 @@ def load_starcity_buylist(secret):
return sealed_data


def load_coolstuffinc_retail(secret):
sealed_data = []
skip_tags = [
"Basic Land",
"Card Box",
"Complete Set (Mint/Near Mint Condition)",
"Complete Set (Partially Sealed)",
"CoolStuffInc.com",
"D6 Dice",
"Enamel Pin",
"Factory Sealed Complete Set",
"Grab Bag",
"Japanese Booster",
"Magic Rares",
"Magic: The Gathering - New Player Deck",
"Player's Guide",
"Random Foil",
"Russian Booster",
"Set of 5 Dice",
"Spindown Life",
"Spinning Life Counter",
"Token Pack",
"Token Set",
"Variety Pack",
]

page = 0
while True:
page += 1
link = "https://www.coolstuffinc.com/sq/1556988?page=" + str(page)
print(f"Parsing page {page}")

header = {
"User-Agent": "curl/8.6",
}
r = requests.get(link, headers=header)
soup = BeautifulSoup(r.content, 'html.parser')

for div in soup.find_all('div', attrs={"class": "row product-search-row main-container"}):
try:
title = div.find('span', attrs={"itemprop": "name"}).get_text()
productURL = div.find('link', attrs={"itemprop": "url"}).get("content")
except Exception:
continue

if any(tag.lower() in title.lower() for tag in skip_tags):
continue

u = urlparse(productURL)
csiId = u.path.removeprefix("/p/")

sealed_data.extend([
{
"name": title,
"id": csiId,
}
])

# Exit loop condition, only when the Next field has no future links
nextPage = soup.find('span', attrs={"id": "nextLink"})
if not nextPage or not nextPage.find('a'):
break

return sealed_data


providers_dict = {
"cardKingdom": {
"identifier": "cardKingdomId",
Expand Down Expand Up @@ -526,6 +594,10 @@ def load_starcity_buylist(secret):
"load_func": load_starcity,
"auth": ["scg_guid", "scg_bearer"],
},
"coolstuffinc": {
"identifier": "csiId",
"load_func": load_coolstuffinc_retail,
},
}


Expand Down

0 comments on commit 40e34f3

Please sign in to comment.