Tunisian_Language_Dataset / create_data.py
azayz
add scripts to reproduce
4ce09e7
raw
history blame contribute delete
No virus
4.64 kB
import time
import os
from datasets import load_dataset
import pickle
# Define a helper function to load datasets with retry mechanism
def load_dataset_with_retries(dataset_name, *args, retries=3, wait=5, **kwargs):
for attempt in range(retries):
try:
return load_dataset(dataset_name, *args, **kwargs)
except Exception as e:
print(f"Attempt {attempt + 1} failed for {dataset_name}. Error: {e}")
if attempt < retries - 1:
time.sleep(wait)
else:
raise
# Checkpoint file to save progress
checkpoint_file = 'tunisian_data_checkpoint.txt'
dataset_count_path = 'data_count.pkl'
# Load progress if checkpoint exists
if os.path.exists(checkpoint_file):
with open(checkpoint_file, 'r') as f:
final_data = eval(f.read())
else:
final_data = []
if os.path.exists(dataset_count_path):
# Loading the variable back
with open(dataset_count_path, 'rb') as f:
loaded_data = pickle.load(f)
datasets_completed = loaded_data['datasets_completed']
else:
datasets_completed = 0
# Helper function to save progress to a checkpoint file
def save_checkpoint(data):
with open(checkpoint_file, 'w') as f:
f.write(str(data))
def save_datasets_completed(num):
# Saving the variable to a file
with open(dataset_count_path, 'wb') as f:
pickle.dump({'datasets_completed': num}, f)
# Load and process datasets
try:
if datasets_completed < 1:
ds_xp3x = load_dataset_with_retries("Muennighoff/xP3x", "aeb_Arab", trust_remote_code=True)
final_data.extend(list(sentence['targets'] for sentence in ds_xp3x['train']))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 2:
ds_glotcc = load_dataset_with_retries("cis-lmu/glotcc-v1", name="aeb-Arab", split="train")
final_data.extend(list(sentence['content'] for sentence in ds_glotcc))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 3:
ds_flores = load_dataset_with_retries('facebook/flores', 'aeb_Arab')
final_data.extend(list(sentence['sentence'] for sentence in ds_flores['dev']))
final_data.extend(list(sentence['sentence'] for sentence in ds_flores['devtest']))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 4:
ds_glotstory = load_dataset_with_retries('cis-lmu/GlotStoryBook', 'default', split='train')
glotstory_sentences = [sentence for sentence in ds_glotstory if sentence["Language"] == 'aeb']
final_data.extend(glotstory_sentences)
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 5:
ds_sib200 = load_dataset_with_retries('Davlan/sib200', 'aeb_Arab')
final_data.extend(list(sentence['text'] for sentence in ds_sib200['train']))
final_data.extend(list(sentence['text'] for sentence in ds_sib200['validation']))
final_data.extend(list(sentence['text'] for sentence in ds_sib200['test']))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 6:
ds_xsimplus = load_dataset_with_retries("jaygala24/xsimplusplus", "aeb_Arab")
final_data.extend(list(sentence['query'] for sentence in ds_xsimplus['dev']))
final_data.extend(list(sentence['query'] for sentence in ds_xsimplus['devtest']))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 7:
ds_gentai = load_dataset_with_retries("gentaiscool/bitext_sib200_miners", "eng_Latn-aeb_Arab")
final_data.extend(list(sentence['sentence2'] for sentence in ds_gentai['train']))
save_checkpoint(final_data)
datasets_completed += 1
if datasets_completed < 8:
dataset_reddit = load_dataset_with_retries('dataverse-scraping/reddit_dataset_219', split='train',
streaming=True)
def filter_function(example):
return example['communityName'] == 'r/Tunisia' # Replace with your filter condition
filtered_dataset = dataset_reddit.filter(filter_function)
final_data.extend(list(sentence['text'] for sentence in filtered_dataset))
save_checkpoint(final_data)
datasets_completed += 1
# Final save to a text file
with open('tunisian_data.txt', 'w') as f:
for line in final_data:
f.write(f"{line}\n")
except Exception as e:
print(f"An error occurred: {e}. Progress saved.")
save_checkpoint(final_data)