INIT: premier commit
This commit is contained in:
25
README.md
Normal file
25
README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Résumé
|
||||
|
||||
Ce dossier contient un script Python pour calculer automatiquement le poids des projets en octets ainsi que le nombre de fichiers qu'ils contiennent. Ces informations correspondent aux statistiques données par Windows lorsque l'on fait clique droit > Propriété.
|
||||
|
||||
# Instructions
|
||||
|
||||
Pour commencer, il est nécessaire de données une liste de dossiers à parcourir. Lancer la commande suivante :
|
||||
|
||||
```.\venv\Scripts\python.exe init_args.py```
|
||||
|
||||
Un nouveau fichier est créé : `args.json` dans le dossier courant. Ce fichier contient la structure à suivre pour y mettre la liste des dossiers projet à parcourir:
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
Vous pouvez le remplir manuellement ou choisir une approche automatique en éxecutant la commande suivante :
|
||||
|
||||
```.\venv\Scripts\python.exe populate_args_with_all_project_folders.py```
|
||||
|
||||
Ensuite, lancez la commande suivante :
|
||||
|
||||
```.\venv\Scripts\python.exe main.py```
|
||||
|
||||
À la fin du script, un CSV `output.csv` est créé contenant la liste des projets, leur poids et leur nombre de fichiers.
|
||||
13
init_args.py
Normal file
13
init_args.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# This scripts generates a brand new args.json file
|
||||
import json
|
||||
|
||||
data = {}
|
||||
data['folder_paths'] = [
|
||||
"path\\to\\your\\folder_1",
|
||||
"path\\to\\your\\folder_2",
|
||||
"...",
|
||||
"path\\to\\your\\folder_n"
|
||||
]
|
||||
|
||||
with open('args.json', 'w') as file:
|
||||
file.write(json.dumps(data, indent=4))
|
||||
58
main.py
Normal file
58
main.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# This script computes the sizes of the folders listed in args.json
|
||||
|
||||
import os
|
||||
import json
|
||||
import csv
|
||||
from tqdm import tqdm
|
||||
|
||||
class Folder:
|
||||
def __init__(self, folder_path, desc_prefix=""):
|
||||
self.folder_path = folder_path
|
||||
self.size = 0
|
||||
self.files = 0
|
||||
self.desc_prefix = desc_prefix
|
||||
|
||||
def desc(self) -> str:
|
||||
return f"{self.desc_prefix}{self.folder_path} - {self.size}B"
|
||||
|
||||
def get_size(self) -> int:
|
||||
"""Return the size in Bytes of the folder"""
|
||||
p_bar = tqdm(desc=self.desc(), unit=' file')
|
||||
for path, dirs, files in os.walk(self.folder_path):
|
||||
for f in files:
|
||||
fp = os.path.join(path, f)
|
||||
self.size += os.path.getsize(fp)
|
||||
self.files += 1
|
||||
p_bar.set_description(self.desc(), refresh=False)
|
||||
p_bar.update()
|
||||
|
||||
p_bar.set_description(self.desc(), refresh=True)
|
||||
p_bar.close()
|
||||
return self.size
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Initialize CSV
|
||||
csv_file = open('output.csv', 'w', newline='')
|
||||
spamwriter = csv.writer(csv_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
|
||||
spamwriter.writerow(['folder_path', 'size_B', 'files'])
|
||||
|
||||
folders = []
|
||||
|
||||
with open('args.json', 'r') as file:
|
||||
data = json.load(file)
|
||||
|
||||
nb_folders = len(data['folder_paths'])
|
||||
|
||||
for i, folder_path in enumerate(data['folder_paths']):
|
||||
folders.append(Folder(folder_path, desc_prefix=f"[{i+1: >3}/{nb_folders}] "))
|
||||
|
||||
# Calculate the size of all folders
|
||||
for folder in folders:
|
||||
folder.get_size()
|
||||
# Write info in CSV
|
||||
spamwriter.writerow([folder.folder_path, folder.size, folder.files])
|
||||
|
||||
csv_file.close()
|
||||
|
||||
|
||||
49
populate_args_with_all_project_folders.py
Normal file
49
populate_args_with_all_project_folders.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# This scripts parse 'TRAVAIL_VIDEO' to write all the project folders into args.json
|
||||
import os
|
||||
import json
|
||||
|
||||
folders_list = []
|
||||
|
||||
### CLIPS
|
||||
root_path = os.path.join("G:\\_PRO\\CLIPS")
|
||||
folders_list += [ f.path for f in os.scandir(root_path) if f.is_dir() and not f.name.startswith('_')]
|
||||
folders_list += [ f.path for f in os.scandir(
|
||||
os.path.join(root_path, "_ARCHIVES (EXPORTS SEULS)")
|
||||
) if f.is_dir()]
|
||||
|
||||
|
||||
### COMMERCIAL
|
||||
root_path = os.path.join("G:\\_PRO\\COMMERCIAL")
|
||||
prods = [ f.name for f in os.scandir(root_path) if f.is_dir() and not f.name.startswith('_')]
|
||||
|
||||
for prod_name in prods:
|
||||
folders_list += [f.path for f in os.scandir(os.path.join(root_path, prod_name)) if f.is_dir()]
|
||||
|
||||
|
||||
### DOCU
|
||||
root_path = os.path.join("G:\\_PRO\\DOCU")
|
||||
prods = [ f.name for f in os.scandir(root_path) if f.is_dir() and not f.name.startswith('_')]
|
||||
|
||||
for prod_name in prods:
|
||||
folders_list += [f.path for f in os.scandir(os.path.join(root_path, prod_name)) if f.is_dir()]
|
||||
|
||||
|
||||
### FICTIONS
|
||||
root_path = os.path.join("G:\\_PRO\\FICTIONS")
|
||||
prods = [ f.name for f in os.scandir(root_path) if f.is_dir() and not f.name.startswith('_')]
|
||||
|
||||
for prod_name in prods:
|
||||
folders_list += [f.path for f in os.scandir(os.path.join(root_path, prod_name)) if f.is_dir()]
|
||||
|
||||
|
||||
### WEB
|
||||
root_path = os.path.join("G:\\_PRO\\WEB")
|
||||
folders_list += [ f.path for f in os.scandir(root_path) if f.is_dir() and not f.name.startswith('_')]
|
||||
|
||||
|
||||
# Write args.json
|
||||
data = {}
|
||||
data['folder_paths'] = folders_list
|
||||
|
||||
with open('args.json', 'w') as file:
|
||||
file.write(json.dumps(data, indent=4))
|
||||
Reference in New Issue
Block a user