diff options
author | Sigma-Ohio <crt@teleco.ch> | 2025-06-09 00:42:33 +0200 |
---|---|---|
committer | Sigma-Ohio <crt@teleco.ch> | 2025-06-09 00:42:33 +0200 |
commit | 24462f8cdcd280fad3d444fa7dc5278497e831f4 (patch) | |
tree | c51e3a6a9f93683eb1d048f0529590cd810dfc6a /executables | |
parent | 7ceba5a0f3cbeb6ab15bc7f99aec87741ef177ea (diff) |
send help
Diffstat (limited to 'executables')
-rw-r--r-- | executables/raus.py | 29 | ||||
-rw-r--r-- | executables/remove_duplicates.py | 29 |
2 files changed, 58 insertions, 0 deletions
diff --git a/executables/raus.py b/executables/raus.py new file mode 100644 index 0000000..2480fce --- /dev/null +++ b/executables/raus.py @@ -0,0 +1,29 @@ +import os +import sys # Add this import + +def remove_duplicate_lines(filepath): + # Read the file and split it into lines + with open(filepath, 'r') as file: + lines = file.readlines() + + # Remove duplicates by converting the list of lines to a set, then back to a list + # This also sorts the lines, as sets do not maintain order + unique_lines = list(set(lines)) + + # Sort the lines if needed (optional, depending on whether you want to maintain original order) + unique_lines.sort() + + # Write the unique lines back to the file + with open(filepath, 'w') as file: + file.writelines(unique_lines) + +if __name__ == "__main__": + # Get filepath from command line arguments + if len(sys.argv) < 2: + print("Usage: python remove_duplicates.py <path_to_file>") + sys.exit(1) + + file_to_process = sys.argv[1] + + print(f"Processing file: {file_to_process}") + remove_duplicate_lines(file_to_process)
\ No newline at end of file diff --git a/executables/remove_duplicates.py b/executables/remove_duplicates.py new file mode 100644 index 0000000..2480fce --- /dev/null +++ b/executables/remove_duplicates.py @@ -0,0 +1,29 @@ +import os +import sys # Add this import + +def remove_duplicate_lines(filepath): + # Read the file and split it into lines + with open(filepath, 'r') as file: + lines = file.readlines() + + # Remove duplicates by converting the list of lines to a set, then back to a list + # This also sorts the lines, as sets do not maintain order + unique_lines = list(set(lines)) + + # Sort the lines if needed (optional, depending on whether you want to maintain original order) + unique_lines.sort() + + # Write the unique lines back to the file + with open(filepath, 'w') as file: + file.writelines(unique_lines) + +if __name__ == "__main__": + # Get filepath from command line arguments + if len(sys.argv) < 2: + print("Usage: python remove_duplicates.py <path_to_file>") + sys.exit(1) + + file_to_process = sys.argv[1] + + print(f"Processing file: {file_to_process}") + remove_duplicate_lines(file_to_process)
\ No newline at end of file |