Adding trackdays back to the bot
All checks were successful
Build and push / changes (push) Successful in 4s
Build and push / Lint-Python (push) Successful in 2s
Build and push / Build-and-Push-Docker (push) Successful in 59s
Build and push / post-status-to-discord (push) Successful in 1s
Build and push / sync-argocd-app (push) Successful in 2s
All checks were successful
Build and push / changes (push) Successful in 4s
Build and push / Lint-Python (push) Successful in 2s
Build and push / Build-and-Push-Docker (push) Successful in 59s
Build and push / post-status-to-discord (push) Successful in 1s
Build and push / sync-argocd-app (push) Successful in 2s
This commit is contained in:
parent
1e84321d90
commit
24fdbd2dd3
119
app/cogs/trackdays.py
Executable file
119
app/cogs/trackdays.py
Executable file
@ -0,0 +1,119 @@
|
|||||||
|
from discord.ext import commands
|
||||||
|
import discord
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
|
class TrackDays(commands.Cog):
|
||||||
|
def __init__(self, bot):
|
||||||
|
self.bot: commands.Bot = bot
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_tracks():
|
||||||
|
"""
|
||||||
|
returns a dictionary of all tracks and their URLs
|
||||||
|
"""
|
||||||
|
url = "https://www.trackpinata.com/"
|
||||||
|
response = requests.get(url).text
|
||||||
|
soup = BeautifulSoup(response, "html.parser")
|
||||||
|
|
||||||
|
return {
|
||||||
|
x.find("h2", class_="thumbnail-header").text: x["href"]
|
||||||
|
for x in soup.find("div", class_="track-list w-dyn-items").find_all("a")
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_all_tracks(ctx: discord.AutocompleteContext):
|
||||||
|
"""
|
||||||
|
returns a list of all the cali tracks for use in auto-complete
|
||||||
|
"""
|
||||||
|
return TrackDays.get_tracks().keys()
|
||||||
|
|
||||||
|
@commands.slash_command(
|
||||||
|
guild_ids=None,
|
||||||
|
name="trackdays",
|
||||||
|
description="Look up upcoming trackdays",
|
||||||
|
)
|
||||||
|
async def trackdays_lookup(
|
||||||
|
self,
|
||||||
|
ctx: discord.ApplicationContext,
|
||||||
|
track: discord.Option(
|
||||||
|
str,
|
||||||
|
autocomplete=discord.utils.basic_autocomplete(get_all_tracks),
|
||||||
|
description="Track to look up days for",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
tracks = self.get_tracks()
|
||||||
|
track_url = tracks.get(track)
|
||||||
|
if not track_url:
|
||||||
|
await ctx.respond(f"Track {track} not found")
|
||||||
|
return
|
||||||
|
|
||||||
|
base_url = "https://www.trackpinata.com"
|
||||||
|
full_url = f"{base_url}{track_url}"
|
||||||
|
|
||||||
|
await ctx.defer()
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
response = requests.get(full_url).text
|
||||||
|
soup = BeautifulSoup(response, "html.parser")
|
||||||
|
|
||||||
|
months = soup.find_all("h4", class_="month")
|
||||||
|
|
||||||
|
for month in months:
|
||||||
|
month_name = month.text
|
||||||
|
data[month_name] = {"events": []}
|
||||||
|
|
||||||
|
events = month.find_next(
|
||||||
|
"div", class_="collection-list-wrapper w-dyn-list"
|
||||||
|
).find_all("a", class_="list-item w-inline-block")
|
||||||
|
|
||||||
|
for event in events:
|
||||||
|
data[month_name]["events"].append(
|
||||||
|
{
|
||||||
|
"reg_url": base_url + event["href"],
|
||||||
|
"provider": event.find("div", class_="text-block").text,
|
||||||
|
"day": event.find("div", class_="text-block-5").text
|
||||||
|
+ " "
|
||||||
|
+ month_name
|
||||||
|
+ " "
|
||||||
|
+ event.find("div", class_="text-block-6").text,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a month order mapping
|
||||||
|
month_order = {
|
||||||
|
"January": 1,
|
||||||
|
"February": 2,
|
||||||
|
"March": 3,
|
||||||
|
"April": 4,
|
||||||
|
"May": 5,
|
||||||
|
"June": 6,
|
||||||
|
"July": 7,
|
||||||
|
"August": 8,
|
||||||
|
"September": 9,
|
||||||
|
"October": 10,
|
||||||
|
"November": 11,
|
||||||
|
"December": 12,
|
||||||
|
}
|
||||||
|
|
||||||
|
embed = discord.Embed(description="", color=discord.Color.blue(), type="rich")
|
||||||
|
|
||||||
|
embed.set_author(name=track)
|
||||||
|
embed.set_thumbnail(
|
||||||
|
url="https://t4.ftcdn.net/jpg/02/80/57/05/360_F_280570531_y52gDRp2ce9YSno3tfuIqKoRcEbn5Eau.jpg"
|
||||||
|
)
|
||||||
|
|
||||||
|
for month in sorted(data.keys(), key=lambda x: month_order[x]):
|
||||||
|
embed.add_field(
|
||||||
|
name=f"🏁 {month} 🏁\n---------",
|
||||||
|
value="\n".join(
|
||||||
|
f"[{event['provider']}: {event['day']}]({event['reg_url']})"
|
||||||
|
for event in data[month]["events"]
|
||||||
|
),
|
||||||
|
inline=False,
|
||||||
|
)
|
||||||
|
await ctx.send_followup(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(bot):
|
||||||
|
bot.add_cog(TrackDays(bot))
|
63
scratchpad/get_trackdays.py
Executable file
63
scratchpad/get_trackdays.py
Executable file
@ -0,0 +1,63 @@
|
|||||||
|
from bs4 import BeautifulSoup
|
||||||
|
import datetime
|
||||||
|
import pprint
|
||||||
|
import requests
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
pp = pprint.PrettyPrinter(indent=2)
|
||||||
|
|
||||||
|
url = "https://www.trackpinata.com/"
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
base_url = "https://www.trackpinata.com"
|
||||||
|
|
||||||
|
track_url = f"{base_url}/tracks/buttonwillow-circuit"
|
||||||
|
response = requests.get(track_url).text
|
||||||
|
soup = BeautifulSoup(response, "html.parser")
|
||||||
|
|
||||||
|
months = soup.find_all("h4", class_="month")
|
||||||
|
|
||||||
|
for month in months:
|
||||||
|
month_name = month.text
|
||||||
|
data[month_name] = {"events": []}
|
||||||
|
|
||||||
|
events = month.find_next(
|
||||||
|
"div", class_="collection-list-wrapper w-dyn-list"
|
||||||
|
).find_all("a", class_="list-item w-inline-block")
|
||||||
|
for event in events:
|
||||||
|
data[month_name]["events"].append(
|
||||||
|
{
|
||||||
|
"reg_url": base_url + event["href"],
|
||||||
|
"provider": event.find("div", class_="text-block").text,
|
||||||
|
"day": event.find("div", class_="text-block-5").text
|
||||||
|
+ " "
|
||||||
|
+ month_name
|
||||||
|
+ " "
|
||||||
|
+ event.find("div", class_="text-block-6").text,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a month order mapping
|
||||||
|
month_order = {
|
||||||
|
"January": 1,
|
||||||
|
"February": 2,
|
||||||
|
"March": 3,
|
||||||
|
"April": 4,
|
||||||
|
"May": 5,
|
||||||
|
"June": 6,
|
||||||
|
"July": 7,
|
||||||
|
"August": 8,
|
||||||
|
"September": 9,
|
||||||
|
"October": 10,
|
||||||
|
"November": 11,
|
||||||
|
"December": 12,
|
||||||
|
}
|
||||||
|
|
||||||
|
print(soup.find("img", class_="track-map-full")["src"])
|
||||||
|
|
||||||
|
# for month in sorted(data.keys(), key=lambda x: month_order[x]):
|
||||||
|
# print(f"{month}:")
|
||||||
|
# pp.pprint(data[month])
|
||||||
|
# print()
|
@ -1,46 +0,0 @@
|
|||||||
from discord.ext import commands
|
|
||||||
import discord
|
|
||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
|
|
||||||
class TrackDays(commands.Cog):
|
|
||||||
def __init__(self, bot):
|
|
||||||
self.bot: commands.Bot = bot
|
|
||||||
|
|
||||||
async def get_all_tracks(ctx: discord.AutocompleteContext):
|
|
||||||
"""
|
|
||||||
returns a list of all the cali tracks for use in auto-complete
|
|
||||||
"""
|
|
||||||
url = "https://www.trackpinata.com/"
|
|
||||||
|
|
||||||
response = requests.get(url).text
|
|
||||||
soup = BeautifulSoup(response, "html.parser")
|
|
||||||
|
|
||||||
return [x.text for x in soup.find_all("h2", class_="thumbnail-header")]
|
|
||||||
|
|
||||||
@commands.slash_command(
|
|
||||||
guild_ids=None,
|
|
||||||
name="trackdays",
|
|
||||||
description="Look up upcoming trackdays",
|
|
||||||
)
|
|
||||||
async def trackdays_lookup(
|
|
||||||
self,
|
|
||||||
ctx: discord.ApplicationContext,
|
|
||||||
track: discord.Option(
|
|
||||||
str,
|
|
||||||
autocomplete=discord.utils.basic_autocomplete(get_all_tracks),
|
|
||||||
description="Track to look up days for",
|
|
||||||
),
|
|
||||||
):
|
|
||||||
track = track.replace(" ", "-").lower()
|
|
||||||
base_url = f"https://www.trackpinata.com/tracks/{track}"
|
|
||||||
|
|
||||||
response = requests.get(base_url)
|
|
||||||
soup = BeautifulSoup(response.text, "html.parser")
|
|
||||||
|
|
||||||
soup.find_all("a", class_="list-item w-inline-block")
|
|
||||||
|
|
||||||
|
|
||||||
def setup(bot):
|
|
||||||
bot.add_cog(TrackDays(bot))
|
|
Loading…
x
Reference in New Issue
Block a user