How to write individual txt files utilising multithreading
Question:
Hey all trust that you’re well, I’m trying to get each thread to create separate .txt files and write to each txt file individually.
Example: process 1 opens pri0.txt and writes 10 names, whilst process two opens pri1.txt and write 10 names
Outcome I received: pri0.txt as well as pri1.txt were created, process 1 wrote 1 line to pri0.txt and the rest in pri1.txt and process 2 wrote to pri1.txt.
Below is the code
import time
import json
import threading
import itertools
limit = 10
def test(process):
z = 0
for _ in itertools.count(start=1):
if z == limit:
break
z += 1
time.sleep(1)
with open(f'pri{i}.txt', 'a') as file:
file.write(process[f'user{z}'] + 'n')
process = open('user.json',)
pro = json.load(process)
process.close()
process_count = 2
thread_list = []
for i in range(process_count):
t = threading.Thread(name=f'Account {i}', target=test, args=[process[i]])
t.start()
time.sleep(1)
print(t.name + ' started')
thread_list.append(t)
for thread in thread_list:
thread.join()
Answers:
This code writes in two different files individually.
As you can see in the output, the two threads are writing at the same time.
import threading
def writeToFile(fileNumber):
with open(f'File{fileNumber}.txt', 'w') as file:
for i in range(5):
print(f"wroteToFile: {fileNumber}")
file.write("hellon")
thread_list = []
thread_list.append(threading.Thread(target=writeToFile, args=[0]))
thread_list.append(threading.Thread(target=writeToFile, args=[1]))
# start the threads
for index, thread in enumerate(thread_list):
thread.start()
print(f"ThreadNumber {index} started")
# wait until all threads are terminated
for thread in thread_list:
thread.join()
Appreciate Philipps contribution
Edit: removed the time.sleep(1)
from the threads and added an extra argument t = threading.Thread(name=f'Account {i}', target=test, args=[usr[i]]+[i])
Result: both pri0.txt & pri1.txt were created, process 1 & process 2 wrote 10 unique values to each txt file.
Final Code:
import time
import json
import threading
import itertools
limit = 10
def test(usr, number):
z = 0
for _ in itertools.count(start=1):
if z == limit:
break
z += 1
time.sleep(1)
with open(f'pri{number}.txt', 'a') as file:
file.write(usr[f'user{z}'] + 'n')
us = open('followers.json',)
usr = json.load(us)
us.close()
process_count = 2
thread_list = []
for i in range(process_count):
t = threading.Thread(name=f'Account {i}', target=test, args=[usr[i]]+[i])
t.start()
print(t.name + ' started')
thread_list.append(t)
for thread in thread_list:
thread.join()
Here’s a variation on the original code that prints all of the key/values from the input JSON. It shows how the loop counter needs to be passed to the individual threads rather than being used globally.
from threading import Thread
import json
NTHREADS = 5
def func(d, i):
with open(f'{i}.txt', 'w') as output:
for user, name in d.items():
print(f'{user=}, {name=}', file=output)
with open('user.json') as j:
users = json.load(j)
threads = []
for i in range(NTHREADS):
(thread := Thread(target=func, args=(users, i))).start()
threads.append(thread)
for thread in threads:
thread.join()
The assumption here is that user.json looks something like this:-
{
"user1": "John",
"user2": "Peter",
"user3": "Michael"
}
This code will produce 5 files each with identical content:-
user='user1', name='John'
user='user2', name='Peter'
user='user3', name='Michael'
Hey all trust that you’re well, I’m trying to get each thread to create separate .txt files and write to each txt file individually.
Example: process 1 opens pri0.txt and writes 10 names, whilst process two opens pri1.txt and write 10 names
Outcome I received: pri0.txt as well as pri1.txt were created, process 1 wrote 1 line to pri0.txt and the rest in pri1.txt and process 2 wrote to pri1.txt.
Below is the code
import time
import json
import threading
import itertools
limit = 10
def test(process):
z = 0
for _ in itertools.count(start=1):
if z == limit:
break
z += 1
time.sleep(1)
with open(f'pri{i}.txt', 'a') as file:
file.write(process[f'user{z}'] + 'n')
process = open('user.json',)
pro = json.load(process)
process.close()
process_count = 2
thread_list = []
for i in range(process_count):
t = threading.Thread(name=f'Account {i}', target=test, args=[process[i]])
t.start()
time.sleep(1)
print(t.name + ' started')
thread_list.append(t)
for thread in thread_list:
thread.join()
This code writes in two different files individually.
As you can see in the output, the two threads are writing at the same time.
import threading
def writeToFile(fileNumber):
with open(f'File{fileNumber}.txt', 'w') as file:
for i in range(5):
print(f"wroteToFile: {fileNumber}")
file.write("hellon")
thread_list = []
thread_list.append(threading.Thread(target=writeToFile, args=[0]))
thread_list.append(threading.Thread(target=writeToFile, args=[1]))
# start the threads
for index, thread in enumerate(thread_list):
thread.start()
print(f"ThreadNumber {index} started")
# wait until all threads are terminated
for thread in thread_list:
thread.join()
Appreciate Philipps contribution
Edit: removed the time.sleep(1)
from the threads and added an extra argument t = threading.Thread(name=f'Account {i}', target=test, args=[usr[i]]+[i])
Result: both pri0.txt & pri1.txt were created, process 1 & process 2 wrote 10 unique values to each txt file.
Final Code:
import time
import json
import threading
import itertools
limit = 10
def test(usr, number):
z = 0
for _ in itertools.count(start=1):
if z == limit:
break
z += 1
time.sleep(1)
with open(f'pri{number}.txt', 'a') as file:
file.write(usr[f'user{z}'] + 'n')
us = open('followers.json',)
usr = json.load(us)
us.close()
process_count = 2
thread_list = []
for i in range(process_count):
t = threading.Thread(name=f'Account {i}', target=test, args=[usr[i]]+[i])
t.start()
print(t.name + ' started')
thread_list.append(t)
for thread in thread_list:
thread.join()
Here’s a variation on the original code that prints all of the key/values from the input JSON. It shows how the loop counter needs to be passed to the individual threads rather than being used globally.
from threading import Thread
import json
NTHREADS = 5
def func(d, i):
with open(f'{i}.txt', 'w') as output:
for user, name in d.items():
print(f'{user=}, {name=}', file=output)
with open('user.json') as j:
users = json.load(j)
threads = []
for i in range(NTHREADS):
(thread := Thread(target=func, args=(users, i))).start()
threads.append(thread)
for thread in threads:
thread.join()
The assumption here is that user.json looks something like this:-
{
"user1": "John",
"user2": "Peter",
"user3": "Michael"
}
This code will produce 5 files each with identical content:-
user='user1', name='John'
user='user2', name='Peter'
user='user3', name='Michael'