Norquinal commited on
Commit
00f8c9a
1 Parent(s): 9c37ed5

Upload 2 files

Browse files
Files changed (2) hide show
  1. token_counter.py +43 -0
  2. token_filter.py +24 -0
token_counter.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import tiktoken
3
+
4
+ # Load the JSON data from your file
5
+ with open('your_json_file_here.json', 'r') as json_file:
6
+ data = json.load(json_file)
7
+
8
+ # Initialize a list to store token lengths
9
+ token_lengths = []
10
+
11
+ # Iterate through items in the JSON file
12
+ for item in data:
13
+ if 'conversations' in item:
14
+ all_content = ""
15
+ for conversation in item['conversations']:
16
+ if 'message' in conversation:
17
+ all_content += conversation['message']
18
+
19
+ # Calculate token length for all content
20
+ encoding = tiktoken.get_encoding("cl100k_base")
21
+ num_tokens = len(encoding.encode(all_content))
22
+
23
+ # Add token length as a property to the item
24
+ item['token_length'] = num_tokens
25
+ token_lengths.append(num_tokens)
26
+
27
+ # Calculate the average token length for each item
28
+ for i, item in enumerate(data):
29
+ num_contents = len(item.get('conversations', []))
30
+ if num_contents > 0:
31
+ item['average_token_length'] = item['token_length'] / num_contents
32
+
33
+ # Save the updated data with average token lengths to a new JSON file
34
+ with open('json_file_w_token_counts.json', 'w') as updated_json_file:
35
+ json.dump(data, updated_json_file, indent=4)
36
+
37
+ # Optionally, you can print the average token length for each item
38
+ for i, item in enumerate(data):
39
+ print(f"Item {i + 1} - Average Token Length: {item.get('average_token_length')}")
40
+
41
+ # Optionally, you can print the total average token length for all items
42
+ total_average_token_length = sum(item.get('average_token_length', 0) for item in data) / len(data)
43
+ print(f"Total Average Token Length for all items: {total_average_token_length}")
token_filter.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+
3
+ # Load the JSON data from your file
4
+ with open('your_json_file_here.json', 'r') as json_file:
5
+ data = json.load(json_file)
6
+
7
+ # Define the minimum values for filtering
8
+ minimum_average_token_length = 125 # Set your desired minimum average token length
9
+ minimum_conversations_length = 10 # Set your desired minimum length of the 'conversations' array
10
+
11
+ # Filter the items that meet the minimum criteria
12
+ filtered_items = [
13
+ item for item in data
14
+ if item.get('average_token_length', 0) >= minimum_average_token_length
15
+ and len(item.get('conversations', [])) >= minimum_conversations_length
16
+ ]
17
+
18
+ # Save the filtered items to a new JSON file
19
+ with open('filtered_json_file.json', 'w') as filtered_json_file:
20
+ json.dump(filtered_items, filtered_json_file, indent=4)
21
+
22
+ # Optionally, you can print the filtered items
23
+ for i, item in enumerate(filtered_items):
24
+ print(f"Filtered Item {i + 1} - Average Token Length: {item.get('average_token_length')}, Conversations Length: {len(item.get('conversations', []))}")