Skip to content

Commit

Permalink
Remove terminal args parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
Heizenburg committed May 20, 2024
1 parent 9f25f64 commit 8c4a4a9
Show file tree
Hide file tree
Showing 7 changed files with 408 additions and 4,858 deletions.
Binary file not shown.
2 changes: 1 addition & 1 deletion notebooks/ESTs/devland.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
"version": "3.11.3"
},
"orig_nbformat": 4
},
Expand Down
84 changes: 33 additions & 51 deletions notebooks/ESTs/woermann brock.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,9 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"c:\\Users\\tsello01\\AppData\\Local\\anaconda3\\lib\\site-packages\\paramiko\\transport.py:219: CryptographyDeprecationWarning: Blowfish has been deprecated\n",
" \"class\": algorithms.Blowfish,\n"
]
}
],
"outputs": [],
"source": [
"import pandas as pd\n",
"import re\n",
Expand All @@ -35,40 +26,45 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"folder_path = r'R:\\RawData\\Elite Star\\\\Woermann Brock\\ToUpload'\n",
"done_path = r'R:\\RawData\\Elite Star\\\\Woermann Brock\\\\Done'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"folder_path = r'R:\\RawData\\Elite Star\\\\Woermann Brock\\ToUpload'"
"def sort_files_by_date(file_path):\n",
" files = os.listdir(file_path)\n",
" files.sort(key=lambda x: os.path.getmtime(os.path.join(file_path, x)))\n",
" return files"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" File Name Date\n",
"0 ESTexport-16042024.txt 2024-04-16 00:00:00\n",
"1 ESTexport-15042024.txt 2024-04-15 00:00:00\n",
"2 ESTexport-14042024.txt 2024-04-14 00:00:00\n",
"3 ESTexport-13042024.txt 2024-04-13 00:00:00\n",
"4 ESTexport-12042024.txt 2024-04-12 00:00:00\n",
"5 ESTexport-11042024.txt 2024-04-11 00:00:00\n",
"6 ESTexport-10042024.txt 2024-04-10 00:00:00\n",
"7 ESTexport-09042024.txt 2024-04-09 00:00:00\n",
"8 ESTexport-08042024.txt 2024-04-08 00:00:00\n",
"9 ESTexport-07042024.txt 2024-04-07 00:00:00\n",
"10 ESTexport-06042024.txt 2024-04-06 00:00:00\n",
"11 ESTexport-05042024.txt 2024-04-05 00:00:00\n",
"12 ESTexport-04042024.txt 2024-04-04 00:00:00\n",
"13 ESTexport-03042024.txt 2024-04-03 00:00:00\n",
"14 ESTexport-02042024.txt 2024-04-02 00:00:00\n",
"15 ESTexport-01042024.txt 2024-04-01 00:00:00\n",
"16 Back up 0001-01-01 00:00:00\n"
" File Name Date Latest\n",
"0 ESTexport-14052024 .txt 2024-05-14 00:00:00 ESTexport-02052024 .txt\n",
"1 ESTexport-13052024 .txt 2024-05-13 00:00:00 ESTexport-02052024 .txt\n",
"2 ESTexport-12052024 .txt 2024-05-12 00:00:00 ESTexport-02052024 .txt\n",
"3 ESTexport-11052024 .txt 2024-05-11 00:00:00 ESTexport-02052024 .txt\n",
"4 ESTexport-10052024 .txt 2024-05-10 00:00:00 ESTexport-02052024 .txt\n",
"5 ESTexport-09052024 .txt 2024-05-09 00:00:00 ESTexport-02052024 .txt\n",
"6 ESTexport-08052024 .txt 2024-05-08 00:00:00 ESTexport-02052024 .txt\n",
"7 ESTexport-07052024 .txt 2024-05-07 00:00:00 ESTexport-02052024 .txt\n",
"8 Back up 0001-01-01 00:00:00 ESTexport-02052024 .txt\n"
]
}
],
Expand All @@ -93,30 +89,16 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" Initial Filename Final Filename\n",
"0 ESTexport-01042024.txt ESTexport-01042024 .txt\n",
"1 ESTexport-02042024.txt ESTexport-02042024 .txt\n",
"2 ESTexport-03042024.txt ESTexport-03042024 .txt\n",
"3 ESTexport-04042024.txt ESTexport-04042024 .txt\n",
"4 ESTexport-05042024.txt ESTexport-05042024 .txt\n",
"5 ESTexport-06042024.txt ESTexport-06042024 .txt\n",
"6 ESTexport-07042024.txt ESTexport-07042024 .txt\n",
"7 ESTexport-08042024.txt ESTexport-08042024 .txt\n",
"8 ESTexport-09042024.txt ESTexport-09042024 .txt\n",
"9 ESTexport-10042024.txt ESTexport-10042024 .txt\n",
"10 ESTexport-11042024.txt ESTexport-11042024 .txt\n",
"11 ESTexport-12042024.txt ESTexport-12042024 .txt\n",
"12 ESTexport-13042024.txt ESTexport-13042024 .txt\n",
"13 ESTexport-14042024.txt ESTexport-14042024 .txt\n",
"14 ESTexport-15042024.txt ESTexport-15042024 .txt\n",
"15 ESTexport-16042024.txt ESTexport-16042024 .txt\n"
"Empty DataFrame\n",
"Columns: [Initial Filename, Final Filename]\n",
"Index: []\n"
]
}
],
Expand Down Expand Up @@ -158,7 +140,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
"version": "3.11.3"
}
},
"nbformat": 4,
Expand Down
113 changes: 37 additions & 76 deletions notebooks/Massmart/massmart_placement.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,48 +2,25 @@
"cells": [
{
"cell_type": "code",
"execution_count": 14,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"No files with the keyword 'MassCashWholesale' found in R:\\RawData\\Massmart Data. Skipping move operation.\n",
"No files with the keyword 'MassCashRetail' found in R:\\RawData\\Massmart Data. Skipping move operation.\n",
"No files with the keyword 'Makro' found in R:\\RawData\\Massmart Data. Skipping move operation.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"No files with the keyword 'MassDiscounters' found in R:\\RawData\\Massmart Data. Skipping move operation.\n",
"No files with the keyword 'KitKat' found in R:\\RawData\\Massmart Data. Skipping move operation.\n",
"No files with the keyword 'MassBuild' found in R:\\RawData\\Massmart Data. Skipping move operation.\n",
"Monitoring R:\\RawData\\Massmart Data for new files...\n",
"No new files detected for the past 5 minutes. Terminating script.\n",
"No new files detected for the past 5 minutes. Terminating script.\n"
]
}
],
"outputs": [],
"source": [
"import os\n",
"import shutil\n",
"import time\n",
"from watchdog.observers import Observer\n",
"from watchdog.events import FileSystemEventHandler\n",
"import argparse\n",
"import threading\n",
"\n",
"# Define constants\n",
"SOURCE_FOLDER = r'R:\\RawData\\Massmart Data'\n",
"PARAMETERS = [\n",
" {'target_dir': r'R:\\RawData\\Masscash\\\\Wholesale\\ToUpload', 'keyword': 'MassCashWholesale'},\n",
" {'target_dir': r'R:\\RawData\\Masscash\\Retail\\ToUpload', 'keyword': 'MassCashRetail'},\n",
" {'target_dir': r'R:\\RawData\\Makro Updated\\\\ToUpload', 'keyword': 'Makro'},\n",
" {'target_dir': r'R:\\RawData\\Game\\ToUpload', 'keyword': 'MassDiscounters'},\n",
" {'target_dir': r'R:\\RawData\\KitKat\\ToUpload', 'keyword': 'KitKat'},\n",
" {'target_dir': r'R:\\RawData\\Massbuild\\ToUpload', 'keyword': 'MassBuild'}\n",
" {'target_dir': r'R:\\RawData\\Masscash\\Retail\\ToUpload', 'keyword': 'MassCashRetail'},\n",
" {'target_dir': r'R:\\RawData\\Makro Updated\\\\ToUpload', 'keyword': 'Makro'},\n",
" {'target_dir': r'R:\\RawData\\Game\\ToUpload', 'keyword': 'MassDiscounters'},\n",
" {'target_dir': r'R:\\RawData\\KitKat\\ToUpload', 'keyword': 'KitKat'},\n",
" {'target_dir': r'R:\\RawData\\Massbuild\\ToUpload', 'keyword': 'Massbuild'}\n",
"]\n",
"EXCLUDED_KEYWORDS = ['ExportSummary', 'Stock']\n",
"MAX_ALLOWED_TIME_LAPSE = 60 * 5 # Five minutes\n",
Expand All @@ -56,44 +33,40 @@
" def on_created(self, event):\n",
" # Check if the created file matches any of the parameters\n",
" for params in self.parameters:\n",
" if params['keyword'].lower() in event.src_path.lower():\n",
" if params['keyword'] in event.src_path:\n",
" # Move the file based on the keyword\n",
" move_files_based_on_keyword(self.source_dir, params['target_dir'], params['keyword'])\n",
"\n",
"def move_files_based_on_keyword(source_dir, target_dir, keyword):\n",
" try:\n",
" # Get a list of files in the source directory that match the keyword and are not excluded\n",
" files_to_move = [file for file in os.listdir(source_dir) \n",
" if keyword.lower() in file.lower() \n",
" and file.endswith('.txt') \n",
" and not any(excluded_keyword.lower() in file.lower() for excluded_keyword in EXCLUDED_KEYWORDS)\n",
" ]\n",
" # Get a list of files in the source directory that match the keyword and are not excluded\n",
" files_to_move = [file for file in os.listdir(source_dir) \n",
" if keyword in file \n",
" and file.endswith('.txt') \n",
" and not any(excluded_keyword in file for excluded_keyword in EXCLUDED_KEYWORDS)\n",
" ]\n",
" \n",
" if not files_to_move:\n",
" print(f\"No files with the keyword '{keyword}' found in {source_dir}. Skipping move operation.\")\n",
" return\n",
" if not files_to_move:\n",
" print(f\"No files with the keyword '{keyword}' found in {source_dir}. Skipping move operation.\")\n",
" return\n",
"\n",
" # Move each file to the target directory\n",
" for file in files_to_move:\n",
" source = os.path.join(source_dir, file)\n",
" destination = os.path.join(target_dir, file)\n",
" shutil.move(source, destination)\n",
" print(f\"Moved {file} to {destination}\")\n",
" except Exception as e:\n",
" print(f\"Error moving files: {str(e)}\")\n",
" # Move each file to the target directory\n",
" for file in files_to_move:\n",
" source = os.path.join(source_dir, file)\n",
" destination = os.path.join(target_dir, file)\n",
" shutil.move(source, destination)\n",
" print(f\"Moved {file} to {destination}\")\n",
"\n",
"def monitor_files(source_folder, parameters, max_inactive_time):\n",
" try:\n",
" # Move any existing files based on the parameters\n",
" for params in parameters:\n",
" move_files_based_on_keyword(source_folder, params['target_dir'], params['keyword'])\n",
" for params in parameters:\n",
" move_files_based_on_keyword(source_folder, params['target_dir'], params['keyword'])\n",
"\n",
" # Create a file handler and observer to monitor the source folder\n",
" event_handler = FileHandler(source_folder, parameters)\n",
" observer = Observer()\n",
" observer.schedule(event_handler, source_folder, recursive=True)\n",
" observer.start()\n",
" # Create a file handler and observer to monitor the source folder\n",
" event_handler = FileHandler(source_folder, parameters)\n",
" observer = Observer()\n",
" observer.schedule(event_handler, source_folder, recursive=True)\n",
" observer.start()\n",
"\n",
" try:\n",
" print(f\"Monitoring {source_folder} for new files...\")\n",
" start_time = time.time()\n",
" while True:\n",
Expand All @@ -103,24 +76,12 @@
" break\n",
"\n",
" time.sleep(10)\n",
" except Exception as e:\n",
" print(f\"Error monitoring files: {str(e)}\")\n",
" finally:\n",
" observer.join()\n",
"\n",
"def main():\n",
" parser = argparse.ArgumentParser()\n",
" parser.add_argument('--source_folder', type=str, default=SOURCE_FOLDER)\n",
" parser.add_argument('--parameters', type=str, default=PARAMETERS)\n",
" parser.add_argument('--time', type=int, default=MAX_ALLOWED_TIME_LAPSE)\n",
" args, unknown_args = parser.parse_known_args()\n",
"\n",
" # Start monitoring in a separate thread\n",
" monitoring_thread = threading.Thread(target=monitor_files, args=(args.source_folder, args.parameters, args.time))\n",
" monitoring_thread.start()\n",
" except KeyboardInterrupt:\n",
" observer.stop()\n",
" observer.join()\n",
"\n",
"if __name__ == \"__main__\":\n",
" main()"
" monitor_files(SOURCE_FOLDER, PARAMETERS, MAX_ALLOWED_TIME_LAPSE)"
]
}
],
Expand All @@ -140,7 +101,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.10.9"
},
"orig_nbformat": 4
},
Expand Down
2 changes: 1 addition & 1 deletion notebooks/PnP/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
- path: R:\RawData\PNP B2B\Daily Client File Downloads
age_threshold_days: 3
- path: R:\RawData\PNP SAP\Client File Downloads
age_threshold_days: 30
age_threshold_days: 60
- path: R:\RawData\OKFOODS\Done
age_threshold_days: 7
- path: R:\RawData\Engen\EngenWeekly
Expand Down
Loading

0 comments on commit 8c4a4a9

Please sign in to comment.