Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
189 changes: 189 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
# Created by https://www.toptal.com/developers/gitignore/api/python
# Edit at https://www.toptal.com/developers/gitignore?templates=python

### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/

# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml

# ruff
.ruff_cache/

# LSP config files
pyrightconfig.json

# End of https://www.toptal.com/developers/gitignore/api/python

# ignore generated output data
/data
/cache

# visual studio remnants
/.vscode

# mac remnants
.DS_Store

# build scripts
*.sh
16 changes: 9 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,24 @@ My Twitter: https://twitter.com/MijagoCoding/

# How to Use?
3) Install all required packages
1) `python -m pip install pandas plotly pathos requests pretty_html_table bar-chart-race`
2) If you want to use mp4 instead of gif, also install `python -m pip install python-ffmpeg` and put a [ffmpeg](https://www.ffmpeg.org/download.html) in your PATH variable. Then set the `VIDEO_TYPE` in `main.py` to `mp4`.
1) `python3 -m pip install pandas plotly pathos requests pretty_html_table bar-chart-race tqdm`
2) If you want to use mp4 instead of gif, also install `python3 -m pip install python-ffmpeg` and put a [ffmpeg](https://www.ffmpeg.org/download.html) in your PATH variable. Then set the `VIDEO_TYPE` in `main.py` to `mp4`.

**I highly encourage you to do this, as the gifs tend to be 40mb in size, whereas the mp4 is only around 1.5mb~2mb**.

Download it here: [ffmpeg](https://github.com/BtbN/FFmpeg-Builds/releases) (for Windows, `ffmpeg-n5.0-latest-win64-gpl-5.0.zip`).
4) Add your api key to `main.py`. For this, edit `api = BungieApi("API-KEY")`. Get it [here](https://www.bungie.net/en/Application).
5) Edit your user info in `main.py`.
4) Set your API key as an environemnt variable `BUNGIE_API_KEY`. Get the key [here](https://www.bungie.net/en/Application).
1) Alternatively: Add your api key to `main.py`. For this, edit `# API_KEY = "123456789"`.
5) Edit your user info in `main.py`. Alternatively, you can also use command line parameters to set this later.
```py
MEMBERSHIP_MIJAGO = (3, 4611686018482684809)
MEMBERSHIP_MYCOOLID = (1, 1231231231231233353) # for example, add this
USED_MEMBERSHIP = MEMBERSHIP_MYCOOLID
api = BungieApi("API-KEY")
```
6) Run! `python3 main.py`
1) May take a while. I need 35~45 seconds for 1000 PGCRs with a download speed of 4.5mb/s.
6) Run the script `python3 main.py`.
1) Complete Example: `BUNGIE_API_KEY=123456789012345 python3 main.py -p 3 -id 4611686018482684809`
2) Alternatively you can also specify the platform and user: `python3 main.py -p 3 -id 4611686018482684809`
3) This may take a while. I need 35~45 seconds for 1000 PGCRs with a download speed of 4.5mb/s.

# Where do I get my user ID?
1) Go to https://www.d2checklist.com (or any other similar page)
Expand Down
28 changes: 14 additions & 14 deletions app/Director.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,27 @@
class Director:

@staticmethod
def ClearResultDirectory(membershipType, membershipId):
path = Director.GetResultDirectory(membershipType, membershipId)
def ClearResultDirectory(displayName):
path = Director.GetResultDirectory(displayName)
shutil.rmtree(path)

@staticmethod
def GetZipPath(membershipType, membershipId):
return "./data/%d_%d/charts_%d_%d.zip" % (membershipType, membershipId, membershipType, membershipId)
def GetZipPath(displayName):
return f"./data/{displayName}/charts_{displayName}.zip"

@staticmethod
def GetResultDirectory(membershipType, membershipId):
return "./data/%d_%d/result/" % (membershipType, membershipId)
def GetResultDirectory(displayName):
return f"./data/{displayName}/result/"

@staticmethod
def GetPGCRDirectory(membershipType, membershipId):
return "./data/%d_%d/pgcr/" % (membershipType, membershipId)
def GetPGCRDirectory(displayName):
return f"./data/{displayName}/pgcr/"

@staticmethod
def GetAllPgcrFilename(membershipType, membershipId):
return "./data/%d_%d/pgcr.json" % (membershipType, membershipId)

def GetAllPgcrFilename(displayName):
return f"./data/{displayName}/pgcr.json"
@staticmethod
def CreateDirectoriesForUser(membershipType, membershipId):
Path(Director.GetResultDirectory(membershipType, membershipId)).mkdir(parents=True, exist_ok=True)
Path(Director.GetPGCRDirectory(membershipType, membershipId)).mkdir(parents=True, exist_ok=True)
def CreateDirectoriesForUser(displayName):
Path(Director.GetResultDirectory(displayName)).mkdir(parents=True, exist_ok=True)
Path(Director.GetPGCRDirectory(displayName)).mkdir(parents=True, exist_ok=True)
17 changes: 0 additions & 17 deletions app/DiscordSender.py

This file was deleted.

52 changes: 34 additions & 18 deletions app/PgcrCollector.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,41 @@ def __init__(self, membershipType, membershipId, api: BungieApi, pool) -> None:
self.api = api
self.characters = None
self.activities = None
self.displayName = None

def getProfile(self):
print("> Get profile")
account_profile = self.api.getProfile(self.membershipType, self.membershipId)
bungieGlobalDisplayName = account_profile['profile']['data']['userInfo']['bungieGlobalDisplayName']
bungieGlobalDisplayNameCode = account_profile['profile']['data']['userInfo']['bungieGlobalDisplayNameCode']
self.displayName = f'{bungieGlobalDisplayName}[{bungieGlobalDisplayNameCode}]'
print(f"Found profile: {self.displayName}")
return self

def getDisplayName(self):
return self.displayName

def getCharacters(self):
print("> Get Characters")
account_stats = self.api.getAccountStats(self.membershipType, self.membershipId)
self.characters = [c["characterId"] for c in account_stats["characters"]]
allCharacters = account_stats['characters']
self.characters = [c["characterId"] for c in allCharacters]
print("Found characters: ", len(self.characters))
for char in allCharacters:
deleted = char['deleted']
if deleted:
className = None
else:
className = self.api.getCharacterClass(self.membershipType, self.membershipId, char['characterId'])
print(f"{char['characterId']}{'' if className == None else ' | ' + className}")
return self

def getActivities(self, limit=None):
print("> Get Activities")
assert self.characters is not None
assert len(self.characters) > 0

existingPgcrList = [f[5:-5] for f in os.listdir(Director.GetPGCRDirectory(self.membershipType, self.membershipId))]
existingPgcrList = [f[5:-5] for f in os.listdir(Director.GetPGCRDirectory(self.displayName))]

self.activities = []
for k, char_id in enumerate(self.characters):
Expand Down Expand Up @@ -66,7 +87,7 @@ def downloadActivityPage(page):

return self

def getPGCRs(self, pagesize=1000):
def getPGCRs(self):
bungo = self.api

def downloadPGCR(activity):
Expand All @@ -78,29 +99,21 @@ def downloadPGCR(activity):
tries += 1
pgcr = bungo.getPGCR(id)

with open("%s/pgcr_%s.json" % (Director.GetPGCRDirectory(self.membershipType, self.membershipId), pgcr["activityDetails"]["instanceId"]), "w") as f:
with open("%s/pgcr_%s.json" % (Director.GetPGCRDirectory(self.displayName), pgcr["activityDetails"]["instanceId"]), "w", encoding='utf-8') as f:
f.write(json.dumps(pgcr))

stepsize = pagesize
START_PAGE = 0

if len(self.activities) == 0:
print("No activities to grab")
return self

for steps in range(START_PAGE, (len(self.activities) + stepsize - 1) // stepsize):
try:
with Timer("Get PGCRs %d through %d" % (steps * stepsize + 1, min(len(self.activities), (steps + 1) * stepsize))):
# self.processPool.restart(True)
self.processPool.amap(downloadPGCR, self.activities[steps * stepsize:(steps + 1) * stepsize]).get()
except Exception as e:
print(e)
from tqdm.auto import tqdm
list(tqdm(self.processPool.imap(downloadPGCR, self.activities), total=len(self.activities), desc="Downloading PGCRs"))
return self

def combineAllPgcrs(self):
all = self.getAllPgcrs()
with Timer("Write all PGCRs to one file"):
with open(Director.GetAllPgcrFilename(self.membershipType, self.membershipId), "w", encoding='utf-8') as f:
with open(Director.GetAllPgcrFilename(self.displayName), "w", encoding='utf-8') as f:
json.dump(all, f, ensure_ascii=False)
return self

Expand All @@ -111,12 +124,15 @@ def loadJson(fnameList):
for fname in fnameList:
if fname is None:
continue
with open(fname, "r") as f:
r.append( json.load(f))
with open(fname, "r", encoding='utf-8') as f:
try:
r.append(json.load(f))
except Exception:
print('Error on %s' % fname)
return r

with Timer("Get all PGCRs from individual files"):
root = Director.GetPGCRDirectory(self.membershipType, self.membershipId)
root = Director.GetPGCRDirectory(self.displayName)
fileList = ["%s/%s" % (root, f) for f in os.listdir(root)]
chunks = list(zip_longest(*[iter(fileList)] * 100, fillvalue=None))
pgcrs = self.processPool.amap(loadJson, chunks).get()
Expand Down
Loading