Github Projects (#1872)

* github projects

* add project prefixes
This commit is contained in:
Pheenoh 2023-08-09 16:27:37 -06:00 committed by GitHub
parent bb546a286f
commit 244892aeb8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 15422 additions and 64 deletions

26
.github/workflows/assign.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Assign issue to commenter
on:
issue_comment:
types: [created]
jobs:
assign_to_commenter:
permissions:
issues: write
runs-on: ubuntu-latest
steps:
- name: Assign issue to commenter
uses: actions/github-script@v3
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const issueComment = context.payload.comment;
const issue = context.payload.issue;
if (issueComment.body === "/assign") {
github.issues.addAssignees({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issue.number,
assignees: [issueComment.user.login]
});
}

View File

@ -32,3 +32,33 @@ jobs:
run: | run: |
./tp progress -f JSON > progress-${{ github.run_id }}.json ./tp progress -f JSON > progress-${{ github.run_id }}.json
./tp upload-progress progress-${{ github.run_id }}.json -b https://progress.deco.mp/ -p twilightprincess -v gcn_usa ./tp upload-progress progress-${{ github.run_id }}.json -b https://progress.deco.mp/ -p twilightprincess -v gcn_usa
- name: Get changed .c, .cpp, .inc files
if: github.event_name != 'pull_request'
id: changed-files-specific
uses: tj-actions/changed-files@v36
with:
files: |
**/*.{c,cpp,inc}
- name: Update Status
if: github.event_name != 'pull_request' && steps.changed-files-specific.outputs.any_changed == 'true'
run: |
# Install libclang-16-dev for FunctionChecker
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
sudo add-apt-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-16 main"
sudo apt update
sudo apt install -y libclang-16-dev
FILENAMES="${{ steps.changed-files-specific.outputs.all_changed_files }}"
CMD="./tp github-check-update-status --personal-access-token ${{ secrets.PAT_TOKEN }} --debug"
# If .inc files are found, assume it's for d_a_alink.cpp for now.
# We can come up something better here later if needed.
if echo "$FILENAMES" | grep -q ".inc"; then
CMD="$CMD --filename src/d/a/d_a_alink.cpp"
else
for FILE in $FILENAMES; do
CMD="$CMD --filename $FILE"
done
fi
$CMD

View File

@ -1,3 +1,9 @@
/**
* @file d_a_e_yk.h
* This header contains information about the Shadow Keese actor.
*
*/
#ifndef D_A_E_YK_H #ifndef D_A_E_YK_H
#define D_A_E_YK_H #define D_A_E_YK_H
@ -8,20 +14,28 @@
#include "d/cc/d_cc_uty.h" #include "d/cc/d_cc_uty.h"
#include "d/d_path.h" #include "d/d_path.h"
/**
* \enum daE_YK_Action
* \brief Shadow Keese current action.
*
*/
enum daE_YK_Action { enum daE_YK_Action {
ACT_ROOF, // keese is on roof ACT_ROOF, /**< Keese is on roof. */
ACT_FIGHT_FLY, // keese is transitioning from flying to aggro ACT_FIGHT_FLY, /**< Keese is transitioning from flying to aggro. */
ACT_FIGHT, // keese is aggroed ACT_FIGHT, /**< Keese is aggroed. */
ACT_ATTACK, // keese is attacking ACT_ATTACK, /**< Keese is attacking. */
ACT_RETURN, // keese is returning to roof ACT_RETURN, /**< Keese is returning to roof. */
ACT_FLY, // keese is flying around ACT_FLY, /**< Keese is flying around. */
ACT_PATH_FLY, // Unconfirmed: keese is flying along its path? ACT_PATH_FLY, /**< Unconfirmed: Keese is flying along its path. */
ACT_CHANCE = 10, // keese is stunned/fell down ACT_CHANCE = 10, /**< Keese is stunned/fell down. */
ACT_WOLFBITE = 13, // keese is being bitten by wolf ACT_WOLFBITE = 13, /**< Keese is being bitten by wolf. */
ACT_WIND // keese is in the gale boomerang ACT_WIND /**< Keese is in the gale boomerang. */
}; };
// tmp struct to get HIO to match /**
* \brief Temporary HIO struct to get a match. Remove later.
*
*/
struct daE_YK_HIO_c_tmp { struct daE_YK_HIO_c_tmp {
void* vtable; void* vtable;
/* 0x04 */ s8 field_0x04; /* 0x04 */ s8 field_0x04;
@ -32,6 +46,11 @@ struct daE_YK_HIO_c_tmp {
/* 0x18 */ f32 field_0x18; /* 0x18 */ f32 field_0x18;
}; };
/**
* \brief Shadow Keese Host Input Output class
*
*/
class daE_YK_HIO_c { class daE_YK_HIO_c {
public: public:
/* 8080482C */ daE_YK_HIO_c(); /* 8080482C */ daE_YK_HIO_c();
@ -46,52 +65,55 @@ public:
/* 0x18 */ f32 field_0x18; /* 0x18 */ f32 field_0x18;
}; };
/**
* \brief Shadow Keese actor class
*
*/
class e_yk_class : public fopEn_enemy_c { class e_yk_class : public fopEn_enemy_c {
public: public:
/* 0x5AC */ request_of_phase_process_class mPhase; /* 0x5AC */ request_of_phase_process_class mPhase; ///< @brief Actor phase process class.
/* 0x5B4 */ u8 mParam1; // paramter - used to determine default action? /* 0x5B4 */ u8 mParam1; ///< @brief Actor parameter. Used to determine default action?
/* 0x5B5 */ u8 mPlayerTriggerBase; // parameter - base value used to determine mPlayerTrigger below /* 0x5B5 */ u8 mPlayerTriggerBase; ///< @brief Actor parameter. Used to determine mPlayerTrigger value. @see mPlayerTrigeer
/* 0x5B6 */ u8 field_0x5b6; /* 0x5B6 */ u8 field_0x5b6; ///< @brief ???
/* 0x5B7 */ u8 mPathIdx; // parameter - used to lookup/set mpPath based on the room /* 0x5B7 */ u8 mPathIdx; ///< @brief Path index. Used to lookup/set mpPath based on the room.
/* 0x5B8 */ u8 field_0x5b8; /* 0x5B8 */ u8 field_0x5b8; ///< @brief ???
/* 0x5B9 */ s8 mPathPntIdx; // tracks the index of the points along the keese's path /* 0x5B9 */ s8 mPathPntIdx; ///< @brief Path point index. Tracks the index of the points along the actor's path.
/* 0x5BA */ s8 field_0x5ba; /* 0x5BA */ s8 field_0x5ba; ///< @brief ???
/* 0x5BB */ u8 field_0x5bb; /* 0x5BB */ u8 field_0x5bb; ///< @brief ???
/* 0x5BC */ dPath* mpPath; // flight path for keese to follow? /* 0x5BC */ dPath* mpPath; ///< @brief Flight path. Flight path for the actor to follow?
/* 0x5C0 */ mDoExt_McaMorfSO* mpMorfSO; // last res loaded /* 0x5C0 */ mDoExt_McaMorfSO* mpMorfSO; ///< @brief MorfSO pointer. Tracks the last resource loaded.
/* 0x5C4 */ Z2CreatureEnemy mCreature; // used for playing keese noises /* 0x5C4 */ Z2CreatureEnemy mCreature; ///< @brief Z2 Audio class. Used for playing actor noises.
/* 0x668 */ int mResIdx; // index number of last res loaded /* 0x668 */ int mResIdx; ///< @brief Resource index. Tracks the index of the last resource loaded.
/* 0x66C */ s16 field_0x66c; /* 0x66C */ s16 field_0x66c; ///< @brief ???
/* 0x66E */ s16 mAction; // current action (see daE_YK_Action above) /* 0x66E */ s16 mAction; ///< @brief Current action. Tracks the current action of the actor. @see daE_YK_Action
/* 0x670 */ s16 mActionPhase; // current phase of current action /* 0x670 */ s16 mActionPhase; ///< @brief Current action phase. Tracks the phase of the current action of the actor. @see daE_YK_Action
/* 0x674 */ cXyz mPathPntPos; // tracks the position of the point the keese is along a path /* 0x674 */ cXyz mPathPntPos; ///< @brief Path point position. Tracks the position of the point the actor is along a path.
/* 0x680 */ s16 mAngleFromPlayer; // current keese angle from player /* 0x680 */ s16 mAngleFromPlayer; ///< @brief Angle from player. Tracks the current actor angle from player.
/* 0x684 */ f32 mDistanceXZFromPlayer; // current keese distance from player /* 0x684 */ f32 mDistanceXZFromPlayer; ///< @brief Distance from player. Tracks the current distance the actor is from the player.
/* 0x688 */ f32 mPlayerTrigger; // how close to the keese the player has to be before aggro /* 0x688 */ f32 mPlayerTrigger; ///< @brief Trigger distance from player. Tracks how close to the actor the player has to be before it will begin to attack the player.
/* 0x68C */ f32 field_0x68c; /* 0x68C */ f32 field_0x68c; ///< @brief ???
/* 0x690 */ f32 field_0x690; /* 0x690 */ f32 field_0x690; ///< @brief ???
/* 0x694 */ f32 field_0x694; /* 0x694 */ f32 field_0x694; ///< @brief ???
/* 0x698 */ s16 field_0x698; /* 0x698 */ s16 field_0x698; ///< @brief ???
/* 0x69A */ csXyz field_0x69a; /* 0x69A */ csXyz field_0x69a; ///< @brief ???
/* 0x6A0 */ s8 field_0x6a0; /* 0x6A0 */ s8 field_0x6a0; ///< @brief ???
/* 0x6A1 */ u8 field_0x6a1; /* 0x6A1 */ u8 field_0x6a1; ///< @brief ???
/* 0x6A2 */ s16 field_0x6a2[4]; /* 0x6A2 */ s16 field_0x6a2[4]; ///< @brief ???
/* 0x6AA */ s16 field_0x6aa; /* 0x6AA */ s16 field_0x6aa; ///< @brief ???
/* 0x6AC */ s16 field_0x6ac[6]; /* 0x6AC */ s16 field_0x6ac[6]; ///< @brief ???
/* 0x6B8 */ cXyz mBoomrangPosOffset; // offset position when keese caught in rang /* 0x6B8 */ cXyz mBoomrangPosOffset; ///< @brief Boomerang position offset. Tracks the offset position when the actor is caught in the gale boomerang.
/* 0x6C4 */ s16 mBoomrangXRotOffset; // offset x rotation when keese caught in rang /* 0x6C4 */ s16 mBoomrangXRotOffset; ///< @brief Boomerang rotation offset. Tracks the offset x rotation when the actor is caught in the gale boomerang.
/* 0x6C8 */ dBgS_AcchCir field_0x6c8; /* 0x6C8 */ dBgS_AcchCir field_0x6c8; ///< @brief ???
/* 0x708 */ dBgS_ObjAcch field_0x708; /* 0x708 */ dBgS_ObjAcch field_0x708; ///< @brief ???
/* 0x8E0 */ dCcD_Stts mCollisionStatus; // collision status /* 0x8E0 */ dCcD_Stts mCollisionStatus; ///< @brief Collision status.
/* 0x91C */ dCcD_Sph mCollisionSphere; // collision sphere /* 0x91C */ dCcD_Sph mCollisionSphere; ///< @brief Collision sphere.
/* 0xA54 */ dCcU_AtInfo mAtColliderInfo; // attack collider info (used when keese gets hit) /* 0xA54 */ dCcU_AtInfo mAtColliderInfo; ///< @brief Attack collider info. Used when actor gets hit.
/* 0xA78 */ u32 field_0xa78; /* 0xA78 */ u32 field_0xa78; ///< @brief ???
/* 0xA7C */ u32 field_0xa7c; /* 0xA7C */ u32 field_0xa7c; ///< @brief ???
/* 0xA80 */ u32 field_0xa80; /* 0xA80 */ u32 field_0xa80; ///< @brief ???
/* 0xA84 */ u32 mParticleEmitterIds[2]; /* 0xA84 */ u32 mParticleEmitterIds[2]; ///< @brief ???
/* 0xA8C */ u8 field_0xa8c; /* 0xA8C */ u8 field_0xa8c; ///< @brief ???
}; };
// size: 0xA90 // size: 0xA90
#endif /* D_A_E_YK_H */ #endif /* D_A_E_YK_H */

View File

@ -1,5 +1,5 @@
/** /**
* d_a_e_yk.cpp * @file d_a_e_yk.cpp
* Enemy - Shadow Keese * Enemy - Shadow Keese
*/ */

127
tools/classify_tu.py Normal file
View File

@ -0,0 +1,127 @@
import clang.cindex
import re
import shutil
from logger import LOG
# Regular expression to match #include "asm/..."
asm_pattern = re.compile(r'#\s*include\s*"asm/.*"')
# Include directories passed to clang
include_dirs = ['include','include/dolphin','src']
class FunctionChecker:
def __init__(self, file_path):
self.file_path = file_path
self.backup_file_path = None
self.index = clang.cindex.Index.create()
self.include_options = []
self.not_attempted = []
self.new_not_attempted = []
self.attempted = []
self.completed = []
for dir in include_dirs:
self.include_options += ['-I', dir]
def replace_static_asm(self):
pattern = r'\bstatic\s+asm\b'
self.backup_file_path = self.file_path + '.bak'
# Make a backup copy of the file
shutil.copy2(self.file_path, self.backup_file_path)
with open(self.file_path, 'r') as file:
lines = file.readlines()
replaced_lines = [re.sub(pattern, 'asm', line) for line in lines]
with open(self.file_path, 'w') as file:
file.writelines(replaced_lines)
def restore_backup(self):
shutil.move(self.backup_file_path, self.file_path)
def check_function(self,line):
# Check if there are any characters between { and }
if re.search(r'\{.+\}', line, re.DOTALL):
# Check if those characters are not '/* empty function */' or whitespaces or 'return True;' or 'return False;'
if not re.search(r'\{\s*(/\*\s*empty function\s*\*/)?\s*\}', line) and not re.search(r'\{\s*return (True|False)\s*;\s*\}', line, re.IGNORECASE):
return True
return False
def return_status(self) -> str:
unattempted = len(self.new_not_attempted)
attempted = len(self.attempted)
completed = len(self.completed)
if attempted > 0 or (unattempted > 0 and completed > 0):
return "in progress"
if unattempted > 0:
return "todo"
if completed > 0 or (completed == 0 and attempted == 0 and unattempted == 0):
return "done"
return "error"
def find_includes(self,node, nonmatching: bool):
if str(node.location.file) == self.file_path or str(node.location.file).endswith(".inc"):
# Check if the node is a function
if node.kind in (clang.cindex.CursorKind.FUNCTION_DECL,
clang.cindex.CursorKind.CXX_METHOD,
clang.cindex.CursorKind.CONSTRUCTOR,
clang.cindex.CursorKind.DESTRUCTOR,
clang.cindex.CursorKind.CONVERSION_FUNCTION):
# Get the function body
body = [token.spelling for token in node.get_tokens()]
body_str = ' '.join(body)
check_body_str = body_str.strip()
# Skip if the function body doesn't exist or is empty
if self.check_function(check_body_str):
LOG.debug(f"Checking function: {node.mangled_name}")
if asm_pattern.search(body_str):
if nonmatching:
self.new_not_attempted.append(node.mangled_name)
else:
self.not_attempted.append(node.mangled_name)
else:
if nonmatching and node.mangled_name not in self.not_attempted:
self.completed.append(node.mangled_name)
# Recurse for children of this node
for c in node.get_children():
self.find_includes(c, nonmatching)
def run(input_filename: str) -> str:
checker = FunctionChecker(input_filename)
try:
# tiny hack to get all functions to show up correctly in the AST
checker.replace_static_asm()
# first categorize unattempted functions
tu = checker.index.parse(checker.file_path, checker.include_options)
checker.find_includes(tu.cursor, False)
# enable nonmatching functions
checker.include_options += ['-DNONMATCHING']
# then categorize attempted/completed functions
tu = checker.index.parse(input_filename, checker.include_options)
checker.find_includes(tu.cursor, True)
checker.attempted = set(checker.not_attempted) - set(checker.new_not_attempted)
LOG.debug(f"Not Attempted: {len(set(checker.new_not_attempted))}")
LOG.debug(f"Attempted: {len(set(checker.attempted))}")
LOG.debug(f"Complete: {len(set(checker.completed))}")
LOG.debug(f"Total: {len(set(checker.new_not_attempted)) + len(set(checker.attempted)) + len(set(checker.completed))}")
return checker.return_status()
except Exception as e:
print("Exception:", e)
return e
finally:
checker.restore_backup()

View File

@ -0,0 +1,4 @@
from .issue import *
from .project import *
from .label import *
from .repository import *

85
tools/libgithub/field.py Normal file
View File

@ -0,0 +1,85 @@
import sys
from .option import Option
from .graphql import GraphQLClient
from logger import LOG
class Field:
def __init__(self, id, name, options):
self.id = id
self.name = name
self.options = options
@staticmethod
def get_status_field(project_id: str) -> 'Field':
LOG.debug(f'Getting status field for project ID {project_id}')
query = '''
query ($projectId: ID!) {
node(id: $projectId) {
... on ProjectV2 {
fields(first: 100) {
nodes {
... on ProjectV2SingleSelectField {
name
id
}
}
}
}
}
}
'''
variables = {
"projectId": project_id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
fields = data['data']['node']['fields']['nodes']
for field in fields:
if 'name' in field and field['name'] == 'Status':
field_id = field['id']
LOG.info(f'Status Field ID: {field_id}')
return Field(
id=field_id,
name='Status',
options=Option.get_all_options(field_id)
)
else:
LOG.critica(f'No field found with name "Status"!')
sys.exit(1)
def create_option(self, option_name: str):
LOG.debug(f'Creating option with name {option_name} for field {self.name}')
query = '''
mutation ($fieldId: ID!, $optionName: String!) {
createProjectOption(input: {projectId: $fieldId, name: $optionName}) {
projectOption {
id
name
}
}
}
'''
variables = {
"fieldId": self.id,
"optionName": option_name
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
option_id = data['data']['createProjectOption']['projectOption']['id']
LOG.info(f'Created option with name {option_name} and ID {option_id}')
return Option(option_id, option_name)
else:
LOG.warning(f'Could not create option with name {option_name}')
return None
# Finish later if we decide to add more fields other than the default Status field
def create(self, project_id: str):
pass
# Finish later if we decide to add more fields other than the default Status field
def delete(self):
pass

View File

@ -0,0 +1,60 @@
import requests
import sys
import time
from typing import Optional
from logger import LOG
class GraphQLClient:
instance = None
url: str = 'https://api.github.com/graphql'
headers: dict
personal_access_token: Optional[str] = None
@classmethod
def setup(cls, pat_token: Optional[str] = None):
if cls.instance is None:
cls.instance = GraphQLClient()
if pat_token is not None:
cls.personal_access_token = pat_token
cls.headers = {
'Authorization': f'Bearer {cls.personal_access_token}',
'Accept': 'application/vnd.github.v3+json',
'Accept': 'application/vnd.github.bane-preview+json',
}
else:
LOG.error('No personal access token provided. Please provide one with the --personal-access-token flag.')
sys.exit(1)
@classmethod
def get_instance(cls):
if cls.instance is None:
raise Exception("The singleton instance has not been set up. Call setup() first.")
return cls.instance
def __init__(self):
if self.instance is not None:
raise Exception("This class is a singleton!")
def make_request(self, query_or_mutation: str, variables: dict):
payload = {
'query': query_or_mutation,
'variables': variables
}
while True:
response = requests.post(self.url, headers=self.headers, json=payload)
data = response.json()
if 'errors' in data:
error_message = data['errors'][0]['message']
if 'was submitted too quickly' in error_message or 'API rate limit exceeded' in error_message or 'Something went wrong while executing your query' in error_message:
LOG.warning('Hit rate limit. Sleeping for 30 seconds...')
time.sleep(30)
continue
else:
LOG.error(f"Fail. Error: {error_message}")
return None
return data

443
tools/libgithub/issue.py Normal file
View File

@ -0,0 +1,443 @@
import sys
from .label import *
from typing import Optional
@dataclass
class Issue:
yaml_data: Optional[dict] = None
def __eq__(self, other):
if isinstance(other, Issue):
return self.unique_id == other.unique_id
return False
def __hash__(self):
return hash(self.unique_id)
def __init__(self,id=None, title=None, body=None, label_ids=None, file_path=None, data=None):
if data is not None:
self.yaml_ctor(data)
else:
self.id = id
self.title = title
self.body = body
self.label_ids = label_ids
self.file_path = file_path
def yaml_ctor(self,data):
for tu, labels, file_path in get_translation_units(data):
self.id = None
self.title = tu
self.body = None
self.file_path = file_path
self.label_ids = []
def set_open(self):
pass
def set_closed(self):
LOG.debug(f'Closing issue {self.id}')
mutation = """
mutation CloseIssue($id: ID!) {
closeIssue(input: {issueId: $id}) {
clientMutationId
}
}
"""
variables = {
"id": self.id
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
LOG.info(f'Closed issue {self.id}')
else:
LOG.error(f'Failed to close issue {self.id}')
sys.exit(1)
@staticmethod
def delete_all():
LOG.debug(f'Deleting all issues in {RepoInfo.owner.name}/{RepoInfo.name}')
issue_state = StateFile.data["issues"]
if issue_state is not None and len(issue_state) > 0:
for issue in issue_state.copy():
Issue(
file_path=issue["file_path"],
id=issue["id"],
title=issue["file_path"]
).delete()
else:
LOG.warning(f'No issues found in state file, nothing to delete.')
return
@staticmethod
def get_all_from_github() -> list['Issue']:
return Issue.get_open() + Issue.get_closed()
@staticmethod
def get_closed() -> list['Issue']:
return Issue.get_by_state("CLOSED")
@staticmethod
def get_open() -> list['Issue']:
return Issue.get_by_state("OPEN")
@staticmethod
def get_all_from_yaml(data):
ret_issues = []
labels_dict = {label['name']: label['id'] for label in StateFile.data["labels"]}
for d in data:
# Get tu, labels, filepath for current project
tu_info = get_translation_units(d)
# Add in project name as a label
for idx, (tu, labels, path) in enumerate(tu_info):
new_labels = labels + [d['project']['title']]
tu_info[idx] = (tu, new_labels, path)
for tu, labels, file_path in tu_info:
state_label_ids = []
# Fetch label ids from state file
for label in labels:
if label in labels_dict:
LOG.debug(f'Found label {label} for TU {tu} in state file, adding to issue.')
state_label_ids.append(labels_dict[label])
else:
LOG.error(f'Label {label} not found in state file, please run ./tp github-sync-labels first.')
sys.exit(1)
issue = Issue(
id=None, # set in check_and_create or create method
title=tu,
body=None,
label_ids=state_label_ids,
file_path=file_path
)
ret_issues.append(issue)
return ret_issues
@staticmethod
def get_by_unique_id(unique_id: str) -> 'Issue':
LOG.debug(f'Getting issue with unique ID {unique_id} on {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($owner: String!, $repo: String!, $first: Int!, $after: String) {
repository(owner: $owner, name: $repo) {
issues(first: $first, after: $after) {
pageInfo {
endCursor
hasNextPage
}
nodes {
title
id
body
labels(first: 100) {
nodes {
id
name
}
}
}
}
}
}
'''
has_next_page = True
cursor = None
while has_next_page:
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name,
"first": 100,
"after": cursor
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
issues = data['data']['repository']['issues']['nodes']
for issue in issues:
labels = sorted([label['name'] for label in issue['labels']['nodes']])
issue_unique_id = issue['title'] + '-' + '-'.join(labels)
if issue_unique_id == unique_id:
return Issue(
id=issue['id'],
title=issue['title'],
body=issue['body'],
labels=[
Label(
id=label['id'],
name=label['name']
) for label in issue['labels']['nodes']
],
unique_id=unique_id
)
page_info = data['data']['repository']['issues']['pageInfo']
has_next_page = page_info['hasNextPage']
cursor = page_info['endCursor']
else:
LOG.warning(f'No issue found with unique ID {unique_id}')
return None
@staticmethod
def get_by_state(state: str) -> list['Issue']:
LOG.debug(f'Getting {state} issues on {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($owner: String!, $repo: String!, $state: [IssueState!], $cursor: String) {
repository(owner: $owner, name: $repo) {
issues(first: 100, states: $state, after: $cursor) {
pageInfo {
endCursor
hasNextPage
}
nodes {
title
id
body
labels(first: 100) {
nodes {
id
name
}
}
}
}
}
}
'''
all_issues = []
has_next_page = True
cursor = None
while has_next_page:
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name,
"state": state,
"cursor": cursor
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
issues = data['data']['repository']['issues']['nodes']
for issue in issues:
insert_issue = Issue(
id=issue['id'],
title=issue['title'],
body=issue['body']
)
all_issues.append(insert_issue)
LOG.debug(f'{state} issues retrieved: {issues}')
page_info = data['data']['repository']['issues']['pageInfo']
has_next_page = page_info['hasNextPage']
cursor = page_info['endCursor']
LOG.debug(f'All {state} issues retrieved: {all_issues}')
return all_issues
@staticmethod
def get_labels_by_id(issue_id: str) -> list[Label]:
LOG.debug(f'Getting all labels for issue {issue_id} on {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($id: ID!) {
node(id: $id) {
... on Issue {
labels(first: 100) {
nodes {
id
name
}
}
}
}
}
'''
variables = {
"id": issue_id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
labels = data['data']['node']['labels']['nodes']
LOG.debug(f'Labels retrieved: {labels} for issue {issue_id}')
ret_labels = []
for label in labels:
label = Label(
id=label["id"],
name=label["name"]
)
ret_labels.append(label)
return ret_labels
else:
LOG.debug(f'No labels found for issue {issue_id}')
return []
def attach_labels(self) -> None:
LOG.debug(f'Attaching labels to issue {self.id} on {RepoInfo.owner.name}/{RepoInfo.name}')
def check_and_create(self) -> None:
issues = StateFile.data.get('issues')
if issues is None:
issue_dict = {}
else:
issue_dict = {issue['file_path']: issue for issue in issues}
if self.file_path in issue_dict:
LOG.info(f"Issue {self.title} from TU {self.file_path} already setup!")
self.id = issue_dict[self.file_path]["id"]
else:
LOG.info(f'Creating missing issue {self.title}.')
self.create()
# def check_and_attach_labels(self) -> None:
# LOG.debug(f'Checking labels for issue {self.title} on {RepoInfo.owner.name}/{RepoInfo.name}')
# issues = StateFile.data.get('issues')
# if issues is None:
# issue_dict = {}
# else:
# issue_dict = {issue['file_path']: issue for issue in issues}
# if self.file_path in issue_dict:
# state_labels = StateFile.data.get('labels')
# label_ids = issue_dict[self.file_path]["label_ids"]
# if label_ids is not None:
# state_label_ids = [label['id'] for label in state_labels]
# for label_id in label_ids:
# if label_id in state_label_ids:
# LOG.debug(f'Label {label_id} exists in state, continuing')
# continue
# else:
# LOG.error(f'Label {label_id} does not exist in state, please run sync-labels first')
# sys.exit(1)
# LOG.info(f'All labels already attached to issue {self.title} on {RepoInfo.owner.name}/{RepoInfo.name}')
# else:
# LOG.info(f'Attaching labels to issue {self.title} on {RepoInfo.owner.name}/{RepoInfo.name}')
# # use yaml data to fetch label names for this issue
# # lookup id from state and attach to issue
# <replace> =
# for label in <replace>:
# self.attach_label_by_id() # finish
# LOG.info(f'Labels attached to issue {self.title} on {RepoInfo.owner.name}/{RepoInfo.name}')
# print(label_ids)
# sys.exit(0)
# else:
# LOG.error(f"Issue {self.title} from TU {self.file_path} is missing")
# sys.exit(1)
def create(self):
repo_id = RepoInfo.id
mutation = """
mutation CreateIssue($input: CreateIssueInput!) {
createIssue(input: $input) {
issue {
id
title
}
}
}
"""
input_dict = {
"repositoryId": repo_id,
"title": self.title,
"body": self.body,
}
if self.label_ids is not None:
input_dict["labelIds"] = self.label_ids
variables = {
"input": input_dict
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
self.id = data["data"]["createIssue"]["issue"]["id"]
self.title = data["data"]["createIssue"]["issue"]["title"]
self.write_state_to_file()
LOG.info(f'Created Issue {self.title} with ID {self.id}')
return self.id
def delete(self) -> None:
LOG.debug(f'Deleting issue {self.title} with ID {self.id}')
mutation = '''
mutation DeleteIssue($id: ID!) {
deleteIssue(input: {issueId: $id}) {
clientMutationId
}
}
'''
variables = {
"id": self.id
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
self.write_state_to_file(delete=True)
LOG.info(f'Successfully deleted issue {self.title}.')
else:
LOG.error(f'Failed to delete issue {self.title}')
def write_state_to_file(self, delete: bool = False):
state = {
"id": self.id,
"title": self.title,
"body": self.body,
"label_ids": self.label_ids,
"file_path": self.file_path,
}
curr_state_issues = StateFile.data.get("issues", None)
if curr_state_issues is not None:
for i, issue in enumerate(curr_state_issues):
if issue['id'] == self.id:
if delete:
del StateFile.data['issues'][i]
break
else:
StateFile.data['issues'][i] = state
break
else:
StateFile.data['issues'].append((state))
else:
StateFile.data['issues'] = [state]
with open("tools/pjstate.yml", 'w') as f:
yaml.safe_dump(StateFile.data, f)

229
tools/libgithub/label.py Normal file
View File

@ -0,0 +1,229 @@
from .repository import RepoInfo
from .graphql import GraphQLClient
from .util import *
from .state import *
from logger import LOG
from dataclasses import dataclass
from typing import Optional
import yaml, sys
@dataclass
class Label:
@staticmethod
def get_all_from_yaml(data):
ret_labels = []
sub_labels = []
for d in data:
sub_labels = get_sub_labels(d)
for label in sub_labels:
ret_label = Label(name=label)
ret_labels.append(ret_label)
title_label = Label(data=d)
ret_labels.append(title_label)
return ret_labels
def __eq__(self, other):
if isinstance(other, Label):
return self.name == other.name and self.id == other.id
return False
def __hash__(self):
return hash((self.name, self.id))
def __init__(self,data=None,id=None,name=None):
if data is not None:
project_data = data.get('project', {})
self.yaml_ctor(project_data)
else:
self.id = id
self.name = name
def yaml_ctor(self,project_data):
self.id = None
self.name = project_data.get('title', 'MISSING_TITLE')
@staticmethod
def delete_all():
LOG.debug(f'Deleting all labels in {RepoInfo.owner.name}/{RepoInfo.name}')
label_state = StateFile.data["labels"]
if label_state is not None and len(StateFile.data) > 0:
for label in label_state.copy():
Label(
id=label["id"],
name=label["name"]
).delete()
else:
LOG.warning(f'No labels found in state file, nothing to delete.')
return
@staticmethod
def get_all_from_github() -> list['Label']:
LOG.debug(f'Fetch all current labels for {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
labels(first: 100) {
nodes {
name
id
}
}
}
}
'''
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
labels = data['data']['repository']['labels']['nodes']
LOG.debug(f'Labels retrieved: {labels}')
return [
Label(
id=label["id"],
name=label["name"]
) for label in labels
]
else:
LOG.warning(f'No labels found in {RepoInfo.owner.name}/{RepoInfo.name}')
return []
@staticmethod
def get_id_by_name(label_name: str):
LOG.debug(f'Fetch label ID for label {label_name} in {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
labels(first: 100) {
nodes {
name
id
}
}
}
}
'''
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
labels = data['data']['repository']['labels']['nodes']
for label in labels:
if label['name'] == label_name:
LOG.debug(f'Label ID for {label_name} retrieved: {label["id"]}')
return label['id']
LOG.warning(f'Label {label_name} not found in {RepoInfo.owner.name}/{RepoInfo.name}')
return None
def check_and_create(self) -> None:
labels = StateFile.data.get('labels')
if labels is None:
label_dict = {}
else:
label_dict = {label['name']: label for label in labels}
if self.name in label_dict:
LOG.info(f"Label {self.name} already setup!")
self.id = label_dict[self.name]["id"]
self.name = label_dict[self.name]["name"]
else:
LOG.debug(f'Creating missing label {self.name}.')
self.create()
def create(self) -> None:
LOG.debug(f'Creating issue label: {self.name}')
mutation = '''
mutation ($repoId: ID!, $labelName: String!, $color: String!) {
createLabel(input: { name: $labelName, repositoryId: $repoId, color: $color }) {
label {
name
id
}
}
}
'''
variables = {
"repoId": RepoInfo.id,
"labelName": self.name,
"color": generate_random_rgb_hex()
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
LOG.debug(f'Create label data: {data}')
self.id = data['data']['createLabel']['label']['id']
self.write_state_to_file()
LOG.info(f"Successfully created label '{self.name} with ID {self.id}'!")
else:
LOG.error(f"Failed to create label {self.name}")
sys.exit(1)
def delete(self) -> None:
query = '''
mutation ($labelId: ID!) {
deleteLabel(input: {
id: $labelId
}) {
clientMutationId
}
}
'''
variables = {
"labelId": self.id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
self.write_state_to_file(delete=True)
LOG.info(f'Successfully deleted label {self.name}')
else:
LOG.error(f'Failed to delete label {self.name}')
def write_state_to_file(self, delete: bool = False):
state = {
"id": self.id,
"name": self.name
}
curr_state_labels = StateFile.data.get('labels',None)
if curr_state_labels is not None:
for i, label in enumerate(StateFile.data['labels']):
if label['id'] == self.id:
if delete:
del StateFile.data['labels'][i]
break
else:
StateFile.data['labels'][i] = state
break
else:
StateFile.data['labels'].append((state))
else:
StateFile.data['labels'] = [state]
with open("tools/pjstate.yml", 'w') as f:
yaml.safe_dump(StateFile.data, f)

78
tools/libgithub/option.py Normal file
View File

@ -0,0 +1,78 @@
from .util import *
from .graphql import GraphQLClient
from dataclasses import dataclass
from logger import LOG
class Option:
def __init__(self, id, name):
self.id = id
self.name = name
@staticmethod
def get_all_options(field_id: str) -> list['Option']:
LOG.debug(f'Getting all options for field {field_id}')
query = '''
query ($fieldId: ID!) {
node(id: $fieldId) {
... on ProjectV2SingleSelectField {
options {
id
name
}
}
}
}
'''
variables = {
"fieldId": field_id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
options = data['data']['node']['options']
LOG.info(f'Options: {options}')
ret_options = []
for option in options:
option_id = option['id']
option_name = option['name']
ret_options.append(Option(id = option_id, name = option_name))
return ret_options
else:
LOG.warning(f'No options found for field {field_id}')
return []
@staticmethod
def get_id(field_id: str, option_name: str) -> str:
LOG.debug(f'Getting option ID for field {field_id} with name {option_name}')
query = '''
query ($fieldId: ID!) {
node(id: $fieldId) {
... on ProjectV2SingleSelectField {
options {
id
name
}
}
}
}
'''
variables = {
"fieldId": field_id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
options = data['data']['node']['options']
for option in options:
if option['name'] == option_name:
option_id = option['id']
LOG.info(f'{option_name} Option ID: {option_id}')
return option_id
LOG.warning(f'No option found with name {option_name}')
return None
def delete(self):
pass

546
tools/libgithub/project.py Normal file
View File

@ -0,0 +1,546 @@
import yaml
from .issue import *
from .field import *
from typing import Optional
class Project:
def __eq__(self, other):
if isinstance(other, Label):
return self.title == other.name and self.id == other.id
return False
def __hash__(self):
return hash((self.title, self.id))
def __init__(self, id=None, title=None, number=None, items=None, items_to_attach=None, status_field=None, data=None):
if data is not None:
self.id = None
self.title = data.get("project").get('title', 'MISSING_TITLE')
self.number = None
self.items = None
self.items_to_attach = None
self.status_field = None
else:
self.id = id
self.title = title
self.number = number
self.items = items
self.items_to_attach = items_to_attach
self.status_field = status_field
@staticmethod
def get_all_from_yaml(data) -> list['Project']:
ret_projects = []
issues_dict = {issue['file_path']: issue['id'] for issue in StateFile.data["issues"]}
for d in data:
items = []
for tu, _, file_path in get_translation_units(d):
if file_path in issues_dict:
LOG.debug(f'Issue {tu} found in state file.')
LOG.debug(f'Adding ID {issues_dict[file_path]} to items.')
items.append({
"issue_id": issues_dict[file_path]
})
else:
LOG.error(f'Issue {tu} not found in state file. Please run ./tp github-sync-issues first.')
sys.exit(1)
project = Project(
id=None,
title=d['project']['title'],
number=None,
items=[],
items_to_attach=items
)
ret_projects.append(project)
return ret_projects
@staticmethod
def get_all_from_github() -> list['Project']:
LOG.debug(f'Getting projects on {RepoInfo.owner.name}/{RepoInfo.name}')
query = '''
query ($owner: String!, $repo: String!, $cursor: String) {
repository(owner: $owner, name: $repo) {
projectsV2(first: 20) {
nodes {
id
title
number
items(first: 100, after: $cursor) {
pageInfo {
endCursor
hasNextPage
}
edges {
cursor
node {
id
content {
... on Issue {
id
title
}
}
}
}
}
}
}
}
}
'''
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name,
"cursor": None,
}
ret_projects = []
while True:
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
projects = data['data']['repository']['projectsV2']['nodes']
LOG.debug(f'Projects retrieved: {projects}')
for project in projects:
items = []
for edge in project['items']['edges']:
LOG.debug(f'Item: {edge}')
item_id = edge['node']['id']
issue_id = edge['node']['content']['id']
item = ProjectItem(
id=item_id,
issue_id = issue_id,
)
items.append(item)
ret_project = Project(
id=project['id'],
title=project['title'],
number=project['number'],
items=items
)
ret_projects.append(ret_project)
# Check if there are more items to fetch
if len(projects) == 0 or not data['data']['repository']['projectsV2']['nodes'][0]['items']['pageInfo']['hasNextPage']:
break
# Update the cursor to the last item's cursor for the next fetch
variables['cursor'] = data['data']['repository']['projectsV2']['nodes'][0]['items']['pageInfo']['endCursor']
else:
LOG.warning("No projects found!")
break
return ret_projects
@staticmethod
def get_project_by_name(project_name: str) -> Optional['Project']:
all_projects = Project.get_all_from_github()
if all_projects:
for project in all_projects:
if project.title == project_name:
return project
else:
LOG.warning(f'No projects found in {RepoInfo.owner.name}/{RepoInfo.name}')
return None
@staticmethod
def delete_all():
LOG.debug(f'Deleting all projects in {RepoInfo.owner.name}/{RepoInfo.name}')
project_state = StateFile.data["projects"]
if project_state is not None and len(project_state) > 0:
for project in project_state.copy():
Project(
id=project["id"],
title=project["title"]
).delete()
else:
LOG.warning(f'No projects found in state file. Nothing to delete.')
return
def create(self) -> None:
owner_id = RepoInfo.owner.id
repo_id = RepoInfo.id
if not owner_id or not repo_id:
return
LOG.debug(f'Creating Github project {self.title}')
mutation = '''
mutation ($ownerId: ID!, $repoId: ID!, $projectName: String!) {
createProjectV2(input: { ownerId: $ownerId, repositoryId: $repoId, title: $projectName }) {
projectV2 {
id
number
title
}
}
}
'''
variables = {
"ownerId": owner_id,
"repoId": repo_id,
"projectName": self.title
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
self.id = data['data']['createProjectV2']['projectV2']['id']
self.number = data['data']['createProjectV2']['projectV2']['number']
self.status_field = Field.get_status_field(self.id)
self.write_state_to_file()
self.set_public()
LOG.info(f"Successfully created project '{self.title}' with ID {self.id} and number {self.number}")
else:
LOG.error(f'Failed to create project {self.title}')
sys.exit(1)
def check_and_create(self) -> None:
projects = StateFile.data.get('projects')
if projects is None:
project_dict = {}
else:
project_dict = {project['title']: project for project in projects}
if self.title in project_dict:
LOG.info(f'Project {self.title} already exists')
self.id = project_dict[self.title]["id"]
self.number = project_dict[self.title]["number"]
self.items = project_dict[self.title]["items"]
self.status_field = project_dict[self.title]["status_field"]
missing_issue_ids = [item['issue_id'] for item in self.items_to_attach if item['issue_id'] not in {item['issue_id'] for item in self.items}]
LOG.info(f'Attaching missing issues to project {self.title}')
if len(missing_issue_ids) > 0:
for id in missing_issue_ids:
self.attach_issue(id)
else:
LOG.info(f'All issues already attached to project {self.title}')
else:
LOG.info(f'Creating missing project {self.title}')
self.create()
for item in self.items_to_attach:
self.attach_issue(item["issue_id"])
def attach_issue(self, issue_id) -> None:
LOG.debug(f'Attaching issue {issue_id} to project {self.title}')
mutation = """
mutation AddProjectV2ItemById($input: AddProjectV2ItemByIdInput!) {
addProjectV2ItemById(input: $input) {
clientMutationId
item {
id
}
}
}
"""
variables = {
"input": {
"projectId": self.id,
"contentId": issue_id,
"clientMutationId": "UNIQUE_ID"
}
}
data = GraphQLClient.get_instance().make_request(mutation, variables)
if data:
LOG.debug(f'Issue {issue_id} attached to project {self.title}')
item_id = data['data']['addProjectV2ItemById']['item']['id']
self.items.append({
"issue_id": issue_id,
"item_id": item_id
})
self.write_state_to_file()
else:
LOG.error(f'Failed to attach issue {issue_id} to project {self.title}')
sys.exit(1)
def get_item_id_from_issue(self, issue: Issue) -> str:
LOG.debug(f'Getting item ID for issue {issue.title} in project {self.title}')
query = """
query ($projectId: ID!, $issueId: ID!) {
projectV2Item(projectId: $projectId, contentId: $issueId) {
id
}
}
"""
variables = {
"projectId": self.id,
"issueId": issue.id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
LOG.debug(f'Got item ID for issue {issue.title} in project {self.title}')
return data['data']['projectV2Item']['id']
else:
LOG.error(f'Failed to get item ID for issue {issue.title} in project {self.title}')
sys.exit(1)
def delete(self) -> None:
query = '''
mutation ($projectId: ID!) {
deleteProjectV2(input: {
projectId: $projectId
}) {
clientMutationId
}
}
'''
variables = {
"projectId": self.id
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
self.write_state_to_file(delete=True)
LOG.info(f'Successfully deleted project {self.title}.')
else:
LOG.error(f'Failed to delete project {self.title}')
sys.exit(1)
def set_public(self) -> None:
query = '''
mutation UpdateProjectVisibility($input: UpdateProjectV2Input!) {
updateProjectV2(input: $input) {
projectV2 {
id
title
public
}
}
}
'''
variables = {
"input": {
"projectId": self.id,
"public": True
}
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
LOG.info(f'Successfully set project {self.title} to public.')
else:
LOG.error(f'Failed to set project {self.title} to public.')
sys.exit(1)
def set_status_for_item(self, item_id: str, status: str) -> None:
query = '''
mutation updateProjectV2ItemFieldValue($input: UpdateProjectV2ItemFieldValueInput!) {
updateProjectV2ItemFieldValue(input: $input) {
projectV2Item {
databaseId
}
}
}
'''
options = self.status_field.options
option = next((option for option in options if option.name.lower() == status.lower()), None)
variables = {
"input": {
"projectId": self.id,
"itemId": item_id,
"fieldId": self.status_field.id,
"value": {
"singleSelectOptionId": option.id
}
}
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
LOG.info(f'Successfully set status for item {item_id} to {status}')
else:
LOG.error(f'Failed to set status for item {item_id} to {status}')
sys.exit(1)
def set_id(self) -> None:
LOG.debug(f'Getting ID for project {self.title}')
query = '''
query ($owner: String!, $name: String!, $projectName: String!) {
repository(owner: $owner, name: $name) {
projectsV2(query: $projectName, first: 1) {
nodes {
id
title
}
}
}
}
'''
variables = {
"owner": RepoInfo.owner.name,
"name": RepoInfo.name,
"projectName": self.title
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
projects = data['data']['repository']['projectsV2']['nodes']
for project in projects:
if project['title'] == self.title:
project_id = project['id']
LOG.info(f'Project ID for {self.title}: {project_id}')
self.id = project_id
else:
LOG.critical(f'No project found with title {self.title}')
sys.exit(1)
else:
LOG.critical(f'Query failed.')
sys.exit(1)
def set_items(self) -> None:
query = '''
query ($owner: String!, $repo: String!, $cursor: String) {
repository(owner: $owner, name: $repo) {
projectsV2(first: 20) {
nodes {
items(first: 100, after: $cursor) {
pageInfo {
endCursor
hasNextPage
}
edges {
cursor
node {
id
content {
... on Issue {
id
title
}
}
}
}
}
}
}
}
}
'''
variables = {
"owner": RepoInfo.owner.name,
"repo": RepoInfo.name,
"cursor": None,
}
self.items = []
while True:
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
projects = data['data']['repository']['projectsV2']['nodes']
for project in projects:
for edge in project['items']['edges']:
item_id = edge['node']['id']
issue_id = edge['node']['content']['id']
issue_title = edge['node']['content']['title']
item = {
'id': item_id,
'issue_id': issue_id,
'issue_title': issue_title
}
self.items.append(item)
# Check if there are more items to fetch
if not data['data']['repository']['projectsV2']['nodes'][0]['items']['pageInfo']['hasNextPage']:
break
# Update the cursor to the last item's cursor for the next fetch
variables['cursor'] = data['data']['repository']['projectsV2']['nodes'][0]['items']['pageInfo']['endCursor']
else:
break
def write_state_to_file(self, delete: bool = False):
state = {
"title": self.title,
"id": self.id,
"number": self.number,
"items": self.items,
"status_field": self.status_field
}
curr_state_projects = StateFile.data.get("projects", None)
if curr_state_projects is not None:
for i, project in enumerate(curr_state_projects):
if project['id'] == self.id:
if delete:
del StateFile.data['projects'][i]
break
else:
StateFile.data['projects'][i] = state
break
else:
StateFile.data['projects'].append((state))
else:
StateFile.data['projects'] = [state]
with open("tools/pjstate.yml", 'w') as f:
yaml.safe_dump(StateFile.data, f)
# Custom representer for Option
def option_representer(dumper, data):
return dumper.represent_mapping('!Option', {
'id': data.id,
'name': data.name
})
# Custom constructor for Option
def option_constructor(loader, node):
values = loader.construct_mapping(node)
return Option(values['id'], values['name'])
# Custom representer for Field
def field_representer(dumper, data):
return dumper.represent_mapping('!Field', {
'id': data.id,
'name': data.name,
'options': data.options
})
# Custom constructor for Field
def field_constructor(loader, node):
values = loader.construct_mapping(node)
return Field(values['id'], values['name'], values['options'])
# Register the custom representers and constructors with SafeDumper
yaml.add_representer(Option, option_representer, Dumper=yaml.SafeDumper)
yaml.add_constructor('!Option', option_constructor, Loader=yaml.SafeLoader)
yaml.add_representer(Field, field_representer, Dumper=yaml.SafeDumper)
yaml.add_constructor('!Field', field_constructor, Loader=yaml.SafeLoader)

View File

@ -0,0 +1,41 @@
import sys
from .graphql import GraphQLClient
from typing import ClassVar
from logger import LOG
class OwnerInfo:
id: ClassVar[str]
name: ClassVar[str]
class RepoInfo:
id: ClassVar[str]
name: ClassVar[str]
owner: ClassVar[OwnerInfo]
@classmethod
def set_ids(cls):
LOG.debug(f'Fetching repo ID for {cls.name}')
query = '''
query ($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
owner {
id
}
id
}
}
'''
variables = {
"owner": cls.owner.name,
"repo": cls.name
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
cls.id = data['data']['repository']['id']
cls.owner.id = data['data']['repository']['owner']['id']
else:
LOG.error(f"Failed to fetch repo ID! Make sure {cls.owner.name}/{cls.name} exists and isn't private.")
sys.exit(1)

9
tools/libgithub/state.py Normal file
View File

@ -0,0 +1,9 @@
import yaml, pathlib
class StateFile:
data = None
@classmethod
def load(self, file_name: pathlib.Path):
with open(file_name, 'r') as f:
self.data = yaml.safe_load(f)

86
tools/libgithub/util.py Normal file
View File

@ -0,0 +1,86 @@
import random, os
from typing import List, Tuple
from logger import LOG
def generate_random_rgb_hex() -> str:
return ''.join([random.choice('0123456789ABCDEF') for _ in range(6)])
def get_translation_units(data) -> list[Tuple[str, List[str], str]]:
dirs = [{'path': dir_data['path'], 'sub_labels': dir_data.get('sub_labels', [])} for dir_data in data.get('dirs', [])]
not_dirs = [{'path': dir_data['path'], 'sub_labels': dir_data.get('sub_labels', [])} for dir_data in data.get('notDirs', [])]
files = [{'path': file_data['path'], 'sub_labels': file_data.get('sub_labels', [])} for file_data in data.get('files', [])]
not_files = [{'path': file_data['path'], 'sub_labels': file_data.get('sub_labels', [])} for file_data in data.get('notFiles', [])]
ignore_files = [
"ctx.c",
"unknown_translation_unit.cpp",
"unknown_translation_unit_bss.cpp",
] + [file['path'] for file in not_files]
ignore_dirs = [
"build",
"tools",
"expected"
] + [directory['path'] for directory in not_dirs]
tus = []
LOG.debug('Adding include files directly to tu list')
for file in files:
if file['path'] not in ignore_files and file['path'].endswith((".c", ".cpp")):
tus.append((file['path'].split("/")[-1], file['sub_labels'], file['path'])) # Use sub_labels from file and include file path
LOG.debug(f'TU name: {file["path"]}')
directories_to_walk = dirs
LOG.debug('Adding files from include dirs directly to tu list')
for directory in directories_to_walk:
for root, _, files in os.walk(directory['path']):
if any(ignore_dir in root for ignore_dir in ignore_dirs):
continue
for file in files:
if file not in ignore_files and file.endswith((".c", ".cpp")):
full_file_path = os.path.join(root, file)
tus.append((file, directory['sub_labels'], full_file_path)) # Use sub_labels from directory and include file path
return tus
def get_sub_labels(data) -> list[str]:
dirs = [{'path': dir_data['path'], 'sub_labels': dir_data.get('sub_labels', [])} for dir_data in data.get('dirs', [])]
not_dirs = [{'path': dir_data['path'], 'sub_labels': dir_data.get('sub_labels', [])} for dir_data in data.get('notDirs', [])]
files = [{'path': file_data['path'], 'sub_labels': file_data.get('sub_labels', [])} for file_data in data.get('files', [])]
not_files = [{'path': file_data['path'], 'sub_labels': file_data.get('sub_labels', [])} for file_data in data.get('notFiles', [])]
ignore_files = [
"ctx.c",
"unknown_translation_unit.cpp",
"unknown_translation_unit_bss.cpp",
] + [file['path'] for file in not_files]
ignore_dirs = [
"build",
"tools",
"expected"
] + [directory['path'] for directory in not_dirs]
sub_labels = []
for file in files:
if file['path'] not in ignore_files and file['path'].endswith((".c", ".cpp")):
sub_labels.append(file['sub_labels'])
for directory in dirs:
for root, _, files in os.walk(directory['path']):
if any(ignore_dir in root for ignore_dir in ignore_dirs):
continue
for file in files:
if file not in ignore_files and file.endswith((".c", ".cpp")):
sub_labels.append(directory['sub_labels'])
sub_labels = [item for sublist in sub_labels for item in sublist]
# Convert the list of strings to a set
sub_labels = set(sub_labels)
return sub_labels

51
tools/libgithub/view.py Normal file
View File

@ -0,0 +1,51 @@
# This is mostly useless until Github extends their API to allow for view creation.
from .graphql import GraphQLClient
from dataclasses import dataclass
from logger import LOG
@dataclass
class View:
def set_layoutout(self, layout: str):
self.layout = layout
# Doesn't actually work (yet)
def check_and_create(self):
LOG.debug(f'Checking if view {self.name} exists')
query = '''
query ($projectNumber: String!) {
node(id: $projectNumber) {
... on ProjectV2 {
views {
nodes {
name
number
}
}
}
}
}
'''
variables = {
"projectNumber": self.number
}
data = GraphQLClient.get_instance().make_request(query, variables)
if data:
views = data['data']['node']['views']['nodes']
LOG.info(f'Views: {views}')
for view in views:
if view['name'] == self.name:
LOG.info(f'View {self.name} exists')
return
LOG.info(f'View {self.name} does not exist, creating')
self.create()
else:
LOG.warning(f'No views found for project {self.number}')
# Waiting on Github to update their API
def create(self):
pass

21
tools/logger.py Normal file
View File

@ -0,0 +1,21 @@
try:
import logging
import sys
from rich.logging import RichHandler
from rich.console import Console
except ImportError as e:
print(f"Failed to import module in {__file__}: {e}")
sys.exit(1)
CONSOLE = Console()
logging.basicConfig(
level="NOTSET",
format="%(message)s",
datefmt="[%X]",
handlers=[RichHandler(console=CONSOLE, rich_tracebacks=True)],
)
LOG = logging.getLogger("rich")
LOG.setLevel(logging.INFO)

13005
tools/pjstate.yml Normal file

File diff suppressed because it is too large Load Diff

243
tools/projects.yml Normal file
View File

@ -0,0 +1,243 @@
# This file controls what projects, issues and labels will be created in Github
#
#
#
# title - Title of the project. Titles are also used as labels on issues
# views - Not supported yet (thanks github)
# dirs - Includes any TUs inside the listed dirs, recursively
# not_dirs - Excludes specific dirs from the dirs selection
# files - Includes any TUs manually specified
# not_files - Excludes any TUs manually specified
# sub_labels - Additional labels used for sub categorization on large projects
# so that views can be built later, and also so identically named TUs can
# be distinguished
- project:
title: tp_TRK_MINNOW_DOLPHIN
dirs:
- path: libs/TRK_MINNOW_DOLPHIN/GCN/EXI2_DDH_GCN
sub_labels: ["TRK_MINNOW_DOLPHIN/EXI2_DDH_GCN"]
- path: libs/TRK_MINNOW_DOLPHIN/GCN/EXI2_GDEV_GCN
sub_labels: ["TRK_MINNOW_DOLPHIN/EXI2_GDEV_GCN"]
- path: libs/TRK_MINNOW_DOLPHIN/MetroTRK
sub_labels: ["TRK_MINNOW_DOLPHIN/MetroTRK"]
- path: libs/TRK_MINNOW_DOLPHIN/ppc
sub_labels: ["TRK_MINNOW_DOLPHIN/ppc"]
- path: libs/TRK_MINNOW_DOLPHIN/Os
sub_labels: ["TRK_MINNOW_DOLPHIN/dolphin"]
- path: libs/TRK_MINNOW_DOLPHIN/utils
sub_labels: ["TRK_MINNOW_DOLPHIN/utils"]
- project:
title: tp_Actors
dirs:
- path: rel/d/a/d_a_alldie
- path: rel/d/a/d_a_andsw
- path: rel/d/a/d_a_andsw2
- path: rel/d/a/d_a_arrow
- path: rel/d/a/d_a_balloon_2D
- path: rel/d/a/d_a_bd
- path: rel/d/a/d_a_bg
- path: rel/d/a/d_a_bg_obj
- path: rel/d/a/d_a_boomerang
- path: rel/d/a/d_a_bullet
- path: rel/d/a/d_a_canoe
- path: rel/d/a/d_a_coach_2D
- path: rel/d/a/d_a_coach_fire
- path: rel/d/a/d_a_cow
- path: rel/d/a/d_a_crod
- path: rel/d/a/d_a_cstaF
- path: rel/d/a/d_a_cstatue
- path: rel/d/a/d_a_demo00
- path: rel/d/a/d_a_demo_item
- path: rel/d/a/d_a_disappear
- path: rel/d/a/d_a_dmidna
- path: rel/d/a/d_a_do
- path: rel/d/a/d_a_dshutter
- path: rel/d/a/d_a_econt
- path: rel/d/a/d_a_ep
- path: rel/d/a/d_a_formation_mng
- path: rel/d/a/d_a_fr
- path: rel/d/a/d_a_grass
- path: rel/d/a/d_a_guard_mng
- path: rel/d/a/d_a_hitobj
- path: rel/d/a/d_a_horse
- path: rel/d/a/d_a_hozelda
- path: rel/d/a/d_a_izumi_gate
- path: rel/d/a/d_a_kago
- path: rel/d/a/d_a_L7demo_dr
- path: rel/d/a/d_a_L7low_dr
- path: rel/d/a/d_a_L7op_demo_dr
- path: rel/d/a/d_a_mant
- path: rel/d/a/d_a_mg_fish
- path: rel/d/a/d_a_mg_fshop
- path: rel/d/a/d_a_mg_rod
- path: rel/d/a/d_a_midna
- path: rel/d/a/d_a_mirror
- path: rel/d/a/d_a_movie_player
- path: rel/d/a/d_a_myna
- path: rel/d/a/d_a_nbomb
- path: rel/d/a/d_a_ni
- path: rel/d/a/d_a_passer_mng
- path: rel/d/a/d_a_path_line
- path: rel/d/a/d_a_peru
- path: rel/d/a/d_a_ppolamp
- path: rel/d/a/d_a_scene_exit
- path: rel/d/a/d_a_scene_exit2
- path: rel/d/a/d_a_set_bgobj
- path: rel/d/a/d_a_shop_item
- path: rel/d/a/d_a_skip_2D
- path: rel/d/a/d_a_spinner
- path: rel/d/a/d_a_sq
- path: rel/d/a/d_a_startAndGoal
- path: rel/d/a/d_a_suspend
- path: rel/d/a/d_a_swBall
- path: rel/d/a/d_a_swc00
- path: rel/d/a/d_a_swhit0
- path: rel/d/a/d_a_swLBall
- path: rel/d/a/d_a_swTime
- path: rel/d/a/d_a_talk
- path: rel/d/a/d_a_tbox
- path: rel/d/a/d_a_tbox2
- path: rel/d/a/d_a_tboxSw
- path: rel/d/a/d_a_title
- path: rel/d/a/d_a_vrbox
- path: rel/d/a/d_a_vrbox2
- path: rel/d/a/d_a_warp_bug
- path: rel/d/a/d_a_ykgr
- path: rel/d/a/door
sub_labels: ["Actors/Door"]
- path: rel/d/a/e
sub_labels: ["Actors/Enemy"]
- path: rel/d/a/b
sub_labels: ["Actors/Boss"]
- path: rel/d/a/kytag
sub_labels: ["Actors/Kytag"]
- path: rel/d/a/npc
sub_labels: ["Actors/NPC"]
- path: rel/d/a/obj
sub_labels: ["Actors/Object"]
- path: rel/d/a/tag
sub_labels: ["Actors/Tag"]
files:
- path: src/d/a/d_a_alink.cpp
- path: src/d/a/d_a_horse_static.cpp
- path: src/d/a/d_a_item_static.cpp
- path: src/d/a/d_a_itembase.cpp
- path: src/d/a/d_a_itembase_static.cpp
- path: src/d/a/d_a_player.cpp
- path: src/d/a/d_a_no_chg_room.cpp
- path: src/d/a/d_a_shop_item_static.cpp
- path: src/d/a/d_a_npc.cpp
sub_labels: ["Actors/NPC"]
- path: src/d/a/d_a_npc_cd.cpp
sub_labels: ["Actors/NPC"]
- path: src/d/a/d_a_npc_cd2.cpp
sub_labels: ["Actors/NPC"]
- path: src/d/a/d_a_obj.cpp
sub_labels: ["Actors/Object"]
- path: src/d/a/d_a_obj_item.cpp
sub_labels: ["Actors/Object"]
- path: src/d/a/d_a_obj_ss_base.cpp
sub_labels: ["Actors/Object"]
- project:
title: tp_dolphin
dirs:
- path: libs/dolphin
- project:
title: tp_dolzel
dirs:
- path: src/d
- path: src/msg
notDirs:
- path: src/d/a
files:
- path: src/c/c_damagereaction.cpp
- project:
title: tp_Framework
dirs:
- path: src/f_ap
- path: src/f_op
- path: src/f_pc
- project:
title: tp_JSystem
dirs:
- path: libs/JSystem/J2DGraph
sub_labels: ["JSystem/J2DGraph"]
- path: libs/JSystem/J3DGraphAnimator
sub_labels: ["JSystem/J3DGraphAnimator"]
- path: libs/JSystem/J3DGraphBase
sub_labels: ["JSystem/J3DGraphBase"]
- path: libs/JSystem/J3DGraphLoader
sub_labels: ["JSystem/J3DGraphLoader"]
- path: libs/JSystem/J3DU
sub_labels: ["JSystem/J3DU"]
- path: libs/JSystem/JAudio2
sub_labels: ["JSystem/JAudio2"]
- path: libs/JSystem/JFramework
sub_labels: ["JSystem/JFramework"]
- path: libs/JSystem/JGadget
sub_labels: ["JSystem/JGadget"]
- path: libs/JSystem/JKernel
sub_labels: ["JSystem/JKernel"]
- path: libs/JSystem/JMath
sub_labels: ["JSystem/JMath"]
- path: libs/JSystem/JMessage
sub_labels: ["JSystem/JMessage"]
- path: libs/JSystem/JParticle
sub_labels: ["JSystem/JParticle"]
- path: libs/JSystem/JStage
sub_labels: ["JSystem/JStage"]
- path: libs/JSystem/JStudio/JStudio_JAudio2
sub_labels: ["JSystem/JStudio_JAudio2"]
- path: libs/JSystem/JStudio/JStudio_JStage
sub_labels: ["JSystem/JStudio_JStage"]
- path: libs/JSystem/JStudio/JStudio_JParticle
sub_labels: ["JSystem/JStudio_JParticle"]
- path: libs/JSystem/JStudio/JStudio
sub_labels: ["JSystem/JStudio"]
- path: libs/JSystem/JSupport
sub_labels: ["JSystem/JSupport"]
- path: libs/JSystem/JUtility
sub_labels: ["JSystem/JUtility"]
- project:
title: tp_m_Do
dirs:
- path: src/m_Do
- project:
title: tp_Misc
dirs:
- path: libs/odenotstub
- path: libs/odemuexi2
- path: libs/amcstubs
files:
- path: src/init.c
- path: src/__start.c
- path: src/DynamicLink.cpp
- path: src/c/c_dylink.cpp
- project:
title: tp_MSL_C
dirs:
- path: libs/MSL_C
- project:
title: tp_Runtime.PPCEABI.H
dirs:
- path: libs/Runtime.PPCEABI.H
- project:
title: tp_SSystem
dirs:
- path: libs/SSystem
- project:
title: tp_Z2AudioLib
dirs:
- path: libs/Z2AudioLib

View File

@ -10,3 +10,4 @@ cxxfilt
pyelftools pyelftools
requests requests
GitPython GitPython
clang

View File

@ -35,10 +35,9 @@ def _handle_import_error(ex: ImportError):
try: try:
import click import click
import libdol import libdol
import libarc import libgithub
import requests import requests
import glob import glob
import git
from rich.logging import RichHandler from rich.logging import RichHandler
from rich.console import Console from rich.console import Console
@ -738,7 +737,6 @@ def calculate_progress(build_path: Path, matching: bool, format: str, print_rels
LOG.error("unknown format: '{format}'") LOG.error("unknown format: '{format}'")
def find_function_range(asm: Path) -> Tuple[int, int]: def find_function_range(asm: Path) -> Tuple[int, int]:
with asm.open("r", encoding="utf-8") as file: with asm.open("r", encoding="utf-8") as file:
lines = file.readlines() lines = file.readlines()
@ -1030,7 +1028,6 @@ def find_includes(lines: List[str], non_matching: bool, ext: str = ".s") -> Set[
def find_used_asm_files(non_matching: bool, use_progress_bar: bool = True) -> Set[Path]: def find_used_asm_files(non_matching: bool, use_progress_bar: bool = True) -> Set[Path]:
cpp_files = find_all_files() cpp_files = find_all_files()
includes = set() includes = set()
@ -1200,6 +1197,250 @@ def check_sha1(game_path: Path, build_path: Path, include_rels: bool):
return True return True
#
# Github Command Helpers
#
import functools
def common_github_options(func):
@click.option("--debug/--no-debug")
@click.option(
"--personal-access-token",
help="Github Personal Access Token for authorizing API calls.",
required=False,
default=os.environ.get('GITHUB_TOKEN')
)
@click.option(
"--owner",
help="Github repo owner",
required=False,
default="zeldaret"
)
@click.option(
"--repo",
help="Github repository name",
required=False,
default="tp"
)
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
def prereqs(owner: str, repo: str, personal_access_token: str):
# Setup GraphQL client singleton
libgithub.GraphQLClient.setup(personal_access_token)
# Setup RepoInfo classvars
libgithub.RepoInfo.owner = libgithub.OwnerInfo()
libgithub.RepoInfo.owner.name = owner
libgithub.RepoInfo.name = repo
libgithub.RepoInfo.set_ids()
# Load in the project state
libgithub.StateFile.load("tools/pjstate.yml")
def load_from_yaml(type: str) -> any:
with open("./tools/projects.yml", 'r') as stream:
try:
import yaml
projects_data = yaml.safe_load(stream)
LOG.debug(f"Loaded projects.yml data: {projects_data}")
match type:
case "labels":
ret_data = libgithub.Label.get_all_from_yaml(projects_data)
case "issues":
ret_data = libgithub.Issue.get_all_from_yaml(projects_data)
case "projects":
ret_data = libgithub.Project.get_all_from_yaml(projects_data)
case _:
LOG.error(f"Invalid type: {type}")
sys.exit(1)
return ret_data
except ImportError:
LOG.error("Can't import yaml, exiting.")
sys.exit(1)
except yaml.YAMLError as error:
LOG.error(f"Error loading YAML: {error}")
sys.exit(1)
#
# Github Sync Commands
#
@tp.command(name="github-sync-labels", help="Creates all labels based on tools/projects.yml")
@common_github_options
def github_sync_labels(debug: bool, personal_access_token: str, owner: str, repo: str):
if debug:
LOG.setLevel(logging.DEBUG)
prereqs(owner, repo, personal_access_token)
yaml_labels = load_from_yaml("labels")
LOG.info("Syncing up labels")
for label in yaml_labels:
label.check_and_create()
@tp.command(name="github-sync-issues", help="Creates all issues and labels based on tools/projects.yml")
@common_github_options
def github_sync_issues(debug: bool, personal_access_token: str, owner: str, repo: str):
if debug:
LOG.setLevel(logging.DEBUG)
prereqs(owner,repo,personal_access_token)
yaml_issues = load_from_yaml("issues")
LOG.info("Syncing up issues")
for issue in yaml_issues:
issue.check_and_create()
@tp.command(name="github-sync-projects", help="Creates all projects, issues and labels based on tools/projects.yml")
@common_github_options
def github_sync_projects(debug: bool, personal_access_token: str, owner: str, repo: str):
if debug:
LOG.setLevel(logging.DEBUG)
prereqs(owner, repo, personal_access_token)
yaml_projects = load_from_yaml("projects")
LOG.info("Syncing up projects")
for project in yaml_projects:
project.check_and_create()
@tp.command(name="github-check-update-status", help="Checks all issues and updates their status based on their local file path.")
@common_github_options
@click.option(
'--filename','filenames',
multiple=True,
type=click.Path(exists=True)
)
@click.option(
'--all',
help="Check all items in every project and update their status.",
is_flag=True,
default=False
)
@click.option(
'--clang-lib-path',
help="Path to libclang.so",
default="/usr/lib/x86_64-linux-gnu/libclang-16.so"
)
def github_check_update_status(debug: bool, personal_access_token: str, owner: str, repo: str, filenames: Tuple[click.Path], all: bool, clang_lib_path: str):
if debug:
LOG.setLevel("DEBUG")
prereqs(owner, repo, personal_access_token)
issues = libgithub.StateFile.data.get('issues')
projects = libgithub.StateFile.data.get('projects')
filenames_list = list(filenames)
# If all flag is set, check all issue file paths in state file
if all:
for issue in issues:
filenames_list.append(issue["file_path"])
import classify_tu, clang
# Set the clang library file
clang.cindex.Config.set_library_file(clang_lib_path)
for filename in filenames_list:
LOG.info(f"Classifying TU {filename}")
status = classify_tu.run(filename)
LOG.debug(f"Classification result: {status}")
if status == "error":
LOG.error(f"Error classifying TU {filename}")
sys.exit(1)
# Find the matching issue_id for the filename
issue_id = None
for issue in issues:
if issue["file_path"] == filename:
issue_id = issue["id"]
break
if issue_id is None:
LOG.error(f"Couldn't find issue_id for {filename}. Run github-sync-issues first.")
sys.exit(1)
# Find the matching project_id, item_id and status_field for the issue_id
project_id = None
for project in projects:
for item in project["items"]:
if item["issue_id"] == issue_id:
project_id = project["id"]
item_id = item["item_id"]
status_field = project["status_field"]
break
if project_id is None:
LOG.error(f"Couldn't find project_id associated with {filename}. Run github-sync-projects first.")
sys.exit(1)
libgithub.Project(id=project_id,status_field=status_field).set_status_for_item(item_id, status)
if status == "done":
libgithub.Issue(id=issue_id).set_closed()
#
# Github Clean Commands
#
@tp.command(name="github-clean-labels", help="Delete all labels for a given owner/repository.")
@common_github_options
def github_clean_labels(debug: bool, personal_access_token: str, owner: str, repo: str) -> None:
if debug:
LOG.setLevel("DEBUG")
LOG.warning(f"This command will completely delete all labels for {owner}/{repo}. Are you sure you want to do this? (y/n)")
confirmation = input().lower()
if confirmation == 'y':
prereqs(owner,repo,personal_access_token)
libgithub.Label.delete_all()
else:
sys.exit(0)
@tp.command(name="github-clean-issues", help="Delete all issues for a given owner/repository.")
@common_github_options
def github_clean_issues(debug: bool, personal_access_token: str, owner: str, repo: str):
if debug:
LOG.setLevel("DEBUG")
LOG.warning(f"This command will completely delete all issues for {owner}/{repo}. Are you sure you want to do this? (y/n)")
confirmation = input().lower()
if confirmation == 'y':
prereqs(owner,repo,personal_access_token)
libgithub.Issue.delete_all()
else:
sys.exit(0)
@tp.command(name="github-clean-projects", help="Delete all projects for a given owner/repository.")
@common_github_options
def github_clean_projects(debug: bool, personal_access_token: str, owner: str, repo: str):
if debug:
LOG.setLevel("DEBUG")
LOG.warning(f"This command will completely delete all projects for {owner}/{repo}. Are you sure you want to do this? (y/n)")
confirmation = input().lower()
if confirmation == 'y':
prereqs(owner,repo,personal_access_token)
libgithub.Project.delete_all()
else:
sys.exit(0)
#
# Progress Command Helpers
#
def copy_progress_script() -> None: def copy_progress_script() -> None:
file_path = './tools/tp.py' file_path = './tools/tp.py'
destination_path = './tools/tp_copy.py' destination_path = './tools/tp_copy.py'
@ -1254,8 +1495,14 @@ def generate_progress(commit: str, wibo_path: Optional[str] = None) -> None:
LOG.debug(f"stdout: {stdout.decode()}") LOG.debug(f"stdout: {stdout.decode()}")
def checkout_and_run(repo_path: str, start_commit_hash: str, wibo_path: Optional[str] = None) -> None: def checkout_and_run(repo_path: str, start_commit_hash: str, wibo_path: Optional[str] = None) -> None:
try:
import git
repo = git.Repo(repo_path) repo = git.Repo(repo_path)
head_commit = repo.head.commit head_commit = repo.head.commit
except ImportError:
LOG.error("Can't import git, exiting.")
sys.exit(1)
copy_progress_script() copy_progress_script()
make_progress_dir() make_progress_dir()
@ -1274,6 +1521,10 @@ def checkout_and_run(repo_path: str, start_commit_hash: str, wibo_path: Optional
LOG.debug(f"Checking out origin head commit: {head_commit.hexsha}") LOG.debug(f"Checking out origin head commit: {head_commit.hexsha}")
repo.git.checkout(head_commit.hexsha) repo.git.checkout(head_commit.hexsha)
#
# Progress Commands
#
@tp.command(name="progress-history") @tp.command(name="progress-history")
@click.option("--debug/--no-debug", default=False) @click.option("--debug/--no-debug", default=False)
@click.option("--repo-path", default=".", required=False, help="Path to your git repository. Defaults to current directory.") @click.option("--repo-path", default=".", required=False, help="Path to your git repository. Defaults to current directory.")