Compare commits

..

2 Commits

3 changed files with 15 additions and 2 deletions

View File

@@ -3,9 +3,13 @@ from utils.inspector import generate_hashes_gradebook, generate_duplicate_hashes
def main():
gradebook_dir_name = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else exit(f'\nNo gradebook dir name given. Provide the name as an argument.\n\nUsage: python {sys.argv[0]} [gradebook dir name]\nExample: python {sys.argv[0]} AssignmentX\n')
gradebook_dir_name = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else exit(f'\nNo gradebook directory name given. Provide the name as an argument.\n\nUsage: python {sys.argv[0]} [gradebook dir name]\nExample: python {sys.argv[0]} AssignmentX\n')
gradebook_dir_path = os.path.join('BB_gradebooks', gradebook_dir_name)
if not os.path.exists(gradebook_dir_path):
exit('[Info] Gradebook directory does not exist - nothing to inspect')
if not os.listdir(gradebook_dir_path): # if no files in gradebook dir
exit(f'[Info] No files found in this gradebook - nothing to inspect')
# generate CSV file with hashes for all files in gradebook & return path to CSV file for finding duplicate hashes
hashes_csv_file_path = generate_hashes_gradebook(gradebook_dir_path)
# generate CSV file with files having duplicate hashes

View File

@@ -3,9 +3,13 @@ from utils.inspector import generate_hashes_submissions, generate_duplicate_hash
def main():
submissions_dir_name = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else exit(f'\nNo submissions dir name given. Provide the name as an argument.\n\nUsage: python {sys.argv[0]} [submissions dir name]\nExample: python {sys.argv[0]} AssignmentX\n')
submissions_dir_name = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else exit(f'\nNo submissions directory name given. Provide the name as an argument.\n\nUsage: python {sys.argv[0]} [submissions dir name]\nExample: python {sys.argv[0]} AssignmentX\n')
submissions_dir_path = os.path.join('BB_submissions', submissions_dir_name)
if not os.path.exists(submissions_dir_path):
exit('[Info] Directory does not exist - nothing to inspect')
if not os.listdir(submissions_dir_path): # if no files in dir
exit(f'[Info] No files found in this submissions directory - nothing to inspect')
# generate CSV file with hashes for all files in submissions (except for any 'excluded') & return path to CSV file for finding duplicate hashes
hashes_csv_file_path = generate_hashes_submissions(submissions_dir_path)
# generate CSV file with files having duplicate hashes

View File

@@ -47,6 +47,8 @@ def generate_hashes_gradebook(gradebook_dir_path: str) -> str: # main function
dicts_with_hashes_list = get_hashes_in_dir(gradebook_dir_path)
for hash_dict in dicts_with_hashes_list:
student_id = hash_dict['filename'].split('_attempt_')[0].split('_')[-1]
full_path = os.path.join(os.getcwd(), hash_dict["filepath"])
hash_dict['filename'] = f'=HYPERLINK("{full_path}", "{hash_dict["filename"]}")'
del hash_dict['filepath']
hash_dict.update({'Student ID': student_id})
@@ -76,6 +78,9 @@ def generate_hashes_submissions(submissions_dir_path: str) -> str: # main funct
student_dicts_list = []
for hash_dict in student_dicts_with_hashes_list:
hash_dict.update({'Student ID': student_dir_name}) # update hash records with student id
full_path = os.path.join(os.getcwd(), hash_dict["filepath"])
hash_dict['filepath'] = f'=HYPERLINK("{full_path}", "{hash_dict["filepath"]}")'
hash_dict['filename'] = f'=HYPERLINK("{full_path}", "{hash_dict["filename"]}")'
student_dicts_list.append(hash_dict) # append file dict to student list of dict for csv export
dicts_with_hashes_list.append(student_dicts_list) # append student hashes to main list with all submissions